summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorjhendersonHDF <jhenderson@hdfgroup.org>2023-05-03 21:58:37 (GMT)
committerGitHub <noreply@github.com>2023-05-03 21:58:37 (GMT)
commit43e7790f0e0b055433df089255b271c8291ca8ec (patch)
tree1d58d88179d4587d71df1378f747ef189881c5fd
parent75d64819b050bb30b2a2751d9ba55651f9a1af79 (diff)
parent3fbdd700cb68b4a7f6f9ac44eebcf677bb71fd0a (diff)
downloadhdf5-43e7790f0e0b055433df089255b271c8291ca8ec.zip
hdf5-43e7790f0e0b055433df089255b271c8291ca8ec.tar.gz
hdf5-43e7790f0e0b055433df089255b271c8291ca8ec.tar.bz2
Merge pull request #2887 from jhendersonHDF/1_14_develop_sync
Sync with develop
-rw-r--r--.github/workflows/codespell.yml2
-rw-r--r--CMakeLists.txt54
-rw-r--r--CONTRIBUTING.md1
-rw-r--r--config/cmake/cacheinit.cmake11
-rw-r--r--doxygen/dox/Overview.dox2
-rw-r--r--fortran/src/README1
-rw-r--r--hl/c++/test/Makefile.am2
-rw-r--r--release_docs/RELEASE.txt42
-rw-r--r--src/H5Ocache.c2
-rw-r--r--test/API/CMakeLists.txt319
-rw-r--r--test/API/H5_api_async_test.c2730
-rw-r--r--test/API/H5_api_async_test.h29
-rw-r--r--test/API/H5_api_attribute_test.c11027
-rw-r--r--test/API/H5_api_attribute_test.h203
-rw-r--r--test/API/H5_api_dataset_test.c11683
-rw-r--r--test/API/H5_api_dataset_test.h331
-rw-r--r--test/API/H5_api_datatype_test.c2693
-rw-r--r--test/API/H5_api_datatype_test.h79
-rw-r--r--test/API/H5_api_file_test.c2564
-rw-r--r--test/API/H5_api_file_test.h85
-rw-r--r--test/API/H5_api_group_test.c2394
-rw-r--r--test/API/H5_api_group_test.h65
-rw-r--r--test/API/H5_api_link_test.c27072
-rw-r--r--test/API/H5_api_link_test.h437
-rw-r--r--test/API/H5_api_misc_test.c1060
-rw-r--r--test/API/H5_api_misc_test.h52
-rw-r--r--test/API/H5_api_object_test.c7172
-rw-r--r--test/API/H5_api_object_test.h191
-rw-r--r--test/API/H5_api_test.c227
-rw-r--r--test/API/H5_api_test.h73
-rw-r--r--test/API/H5_api_test_config.h.in66
-rw-r--r--test/API/H5_api_test_util.c819
-rw-r--r--test/API/H5_api_test_util.h24
-rw-r--r--test/API/H5_api_tests_disabled.h46
-rw-r--r--test/API/README.md87
-rw-r--r--test/API/driver/CMakeLists.txt34
-rw-r--r--test/API/driver/h5_api_test_driver.cxx910
-rw-r--r--test/API/driver/h5_api_test_driver.hxx93
-rw-r--r--test/API/tarray.c2250
-rw-r--r--test/API/tattr.c11929
-rw-r--r--test/API/tchecksum.c251
-rw-r--r--test/API/tconfig.c199
-rw-r--r--test/API/tcoords.c724
-rw-r--r--test/API/testhdf5.c729
-rw-r--r--test/API/testhdf5.h349
-rw-r--r--test/API/tfile.c8381
-rw-r--r--test/API/tgenprop.c2201
-rw-r--r--test/API/th5o.c1889
-rw-r--r--test/API/th5s.c3538
-rw-r--r--test/API/tid.c1413
-rw-r--r--test/API/titerate.c1263
-rw-r--r--test/API/tmisc.c6349
-rw-r--r--test/API/trefer.c3641
-rw-r--r--test/API/tselect.c16314
-rw-r--r--test/API/ttime.c231
-rw-r--r--test/API/tunicode.c867
-rw-r--r--test/API/tvlstr.c1013
-rw-r--r--test/API/tvltypes.c3268
-rw-r--r--test/CMakeLists.txt4
-rw-r--r--test/h5test.c7
-rw-r--r--test/h5test.h72
-rw-r--r--test/vol.c62
-rw-r--r--testpar/API/CMakeLists.txt294
-rw-r--r--testpar/API/H5_api_async_test_parallel.c3668
-rw-r--r--testpar/API/H5_api_async_test_parallel.h29
-rw-r--r--testpar/API/H5_api_attribute_test_parallel.c47
-rw-r--r--testpar/API/H5_api_attribute_test_parallel.h20
-rw-r--r--testpar/API/H5_api_dataset_test_parallel.c8149
-rw-r--r--testpar/API/H5_api_dataset_test_parallel.h20
-rw-r--r--testpar/API/H5_api_datatype_test_parallel.c47
-rw-r--r--testpar/API/H5_api_datatype_test_parallel.h20
-rw-r--r--testpar/API/H5_api_file_test_parallel.c367
-rw-r--r--testpar/API/H5_api_file_test_parallel.h20
-rw-r--r--testpar/API/H5_api_group_test_parallel.c47
-rw-r--r--testpar/API/H5_api_group_test_parallel.h20
-rw-r--r--testpar/API/H5_api_link_test_parallel.c47
-rw-r--r--testpar/API/H5_api_link_test_parallel.h20
-rw-r--r--testpar/API/H5_api_misc_test_parallel.c47
-rw-r--r--testpar/API/H5_api_misc_test_parallel.h20
-rw-r--r--testpar/API/H5_api_object_test_parallel.c47
-rw-r--r--testpar/API/H5_api_object_test_parallel.h20
-rw-r--r--testpar/API/H5_api_test_parallel.c338
-rw-r--r--testpar/API/H5_api_test_parallel.h188
-rw-r--r--testpar/API/t_bigio.c1942
-rw-r--r--testpar/API/t_chunk_alloc.c512
-rw-r--r--testpar/API/t_coll_chunk.c1417
-rw-r--r--testpar/API/t_coll_md_read.c654
-rw-r--r--testpar/API/t_dset.c4335
-rw-r--r--testpar/API/t_file.c1032
-rw-r--r--testpar/API/t_file_image.c371
-rw-r--r--testpar/API/t_filter_read.c564
-rw-r--r--testpar/API/t_mdset.c2814
-rw-r--r--testpar/API/t_ph5basic.c192
-rw-r--r--testpar/API/t_prop.c646
-rw-r--r--testpar/API/t_pshutdown.c150
-rw-r--r--testpar/API/t_shapesame.c4516
-rw-r--r--testpar/API/t_span_tree.c2622
-rw-r--r--testpar/API/testphdf5.c1007
-rw-r--r--testpar/API/testphdf5.h343
-rw-r--r--testpar/CMakeLists.txt4
-rw-r--r--testpar/t_subfiling_vfd.c9
101 files changed, 176174 insertions, 57 deletions
diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml
index a4edb0f..ddf1038 100644
--- a/.github/workflows/codespell.yml
+++ b/.github/workflows/codespell.yml
@@ -11,5 +11,5 @@ jobs:
- uses: actions/checkout@v3
- uses: codespell-project/actions-codespell@master
with:
- skip: ./bin/trace,./hl/tools/h5watch/h5watch.c,./tools/test/h5jam/tellub.c,./config/sanitizer/LICENSE,./config/sanitizer/sanitizers.cmake,./tools/test/h5repack/testfiles/*.dat
+ skip: ./bin/trace,./hl/tools/h5watch/h5watch.c,./tools/test/h5jam/tellub.c,./config/sanitizer/LICENSE,./config/sanitizer/sanitizers.cmake,./tools/test/h5repack/testfiles/*.dat,./test/API/driver
ignore_words_list: isnt,inout,nd,parms,parm,ba,offsetP,ser,ois,had,fiter,fo,clude,refere,minnum,offsetp,creat,ans:,eiter,lastr,ans,isn't,ifset,sur,trun,dne,tthe,hda,filname,te,htmp,minnum,ake,gord,numer,ro,oce,msdos
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 9da39fa..62bbd00 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -208,20 +208,23 @@ set (HDF5_HL_F90_C_LIBSH_TARGET "${HDF5_HL_F90_C_LIB_CORENAME}-shared")
#-----------------------------------------------------------------------------
# Define some CMake variables for use later in the project
#-----------------------------------------------------------------------------
-set (HDF_CONFIG_DIR ${HDF5_SOURCE_DIR}/config)
-set (HDF_RESOURCES_DIR ${HDF5_SOURCE_DIR}/config/cmake)
-set (HDF5_SRC_DIR ${HDF5_SOURCE_DIR}/src)
-set (HDF5_TEST_SRC_DIR ${HDF5_SOURCE_DIR}/test)
-set (HDF5_CPP_SRC_DIR ${HDF5_SOURCE_DIR}/c++)
-set (HDF5_CPP_TST_DIR ${HDF5_SOURCE_DIR}/c++/test)
-set (HDF5_HL_SRC_DIR ${HDF5_SOURCE_DIR}/hl)
-set (HDF5_HL_CPP_SRC_DIR ${HDF5_SOURCE_DIR}/hl/c++)
-set (HDF5_HL_TOOLS_DIR ${HDF5_SOURCE_DIR}/hl/tools)
-set (HDF5_TOOLS_DIR ${HDF5_SOURCE_DIR}/tools)
-set (HDF5_TOOLS_SRC_DIR ${HDF5_SOURCE_DIR}/tools/src)
-set (HDF5_PERFORM_SRC_DIR ${HDF5_SOURCE_DIR}/tools/src/perform)
-set (HDF5_UTILS_DIR ${HDF5_SOURCE_DIR}/utils)
-set (HDF5_F90_SRC_DIR ${HDF5_SOURCE_DIR}/fortran)
+set (HDF_CONFIG_DIR ${HDF5_SOURCE_DIR}/config)
+set (HDF_RESOURCES_DIR ${HDF5_SOURCE_DIR}/config/cmake)
+set (HDF5_SRC_DIR ${HDF5_SOURCE_DIR}/src)
+set (HDF5_TEST_SRC_DIR ${HDF5_SOURCE_DIR}/test)
+set (HDF5_TEST_PAR_DIR ${HDF5_SOURCE_DIR}/testpar)
+set (HDF5_TEST_API_SRC_DIR ${HDF5_SOURCE_DIR}/test/API)
+set (HDF5_TEST_API_PAR_SRC_DIR ${HDF5_SOURCE_DIR}/testpar/API)
+set (HDF5_CPP_SRC_DIR ${HDF5_SOURCE_DIR}/c++)
+set (HDF5_CPP_TST_DIR ${HDF5_SOURCE_DIR}/c++/test)
+set (HDF5_HL_SRC_DIR ${HDF5_SOURCE_DIR}/hl)
+set (HDF5_HL_CPP_SRC_DIR ${HDF5_SOURCE_DIR}/hl/c++)
+set (HDF5_HL_TOOLS_DIR ${HDF5_SOURCE_DIR}/hl/tools)
+set (HDF5_TOOLS_DIR ${HDF5_SOURCE_DIR}/tools)
+set (HDF5_TOOLS_SRC_DIR ${HDF5_SOURCE_DIR}/tools/src)
+set (HDF5_PERFORM_SRC_DIR ${HDF5_SOURCE_DIR}/tools/src/perform)
+set (HDF5_UTILS_DIR ${HDF5_SOURCE_DIR}/utils)
+set (HDF5_F90_SRC_DIR ${HDF5_SOURCE_DIR}/fortran)
set (HDF5_JAVA_JNI_SRC_DIR ${HDF5_SOURCE_DIR}/java/src/jni)
set (HDF5_JAVA_HDF5_SRC_DIR ${HDF5_SOURCE_DIR}/java/src/hdf)
set (HDF5_JAVA_TEST_SRC_DIR ${HDF5_SOURCE_DIR}/java/test)
@@ -947,6 +950,25 @@ if (BUILD_TESTING)
math (EXPR CTEST_LONG_TIMEOUT "${DART_TESTING_TIMEOUT} * 2")
math (EXPR CTEST_VERY_LONG_TIMEOUT "${DART_TESTING_TIMEOUT} * 3")
+ option (HDF5_TEST_API "Execute HDF5 API tests" OFF)
+ mark_as_advanced (HDF5_TEST_API)
+ if (HDF5_TEST_API)
+ option (HDF5_TEST_API_INSTALL "Install HDF5 API tests" OFF)
+ mark_as_advanced (HDF5_TEST_API_INSTALL)
+
+ # Enable HDF5 Async API tests
+ option (HDF5_TEST_API_ENABLE_ASYNC "Enable HDF5 Async API tests" OFF)
+ mark_as_advanced (HDF5_TEST_API_ENABLE_ASYNC)
+
+ # Build and use HDF5 test driver program for API tests
+ option (HDF5_TEST_API_ENABLE_DRIVER "Enable HDF5 API test driver program" OFF)
+ mark_as_advanced (HDF5_TEST_API_ENABLE_DRIVER)
+ if (HDF5_TEST_API_ENABLE_DRIVER)
+ set (HDF5_TEST_API_SERVER "" CACHE STRING "Server executable for running API tests")
+ mark_as_advanced (HDF5_TEST_API_SERVER)
+ endif ()
+ endif ()
+
option (HDF5_TEST_VFD "Execute tests with different VFDs" OFF)
mark_as_advanced (HDF5_TEST_VFD)
if (HDF5_TEST_VFD)
@@ -1003,11 +1025,11 @@ if (BUILD_TESTING)
mark_as_advanced (HDF5_TEST_JAVA)
if (NOT HDF5_EXTERNALLY_CONFIGURED)
- if (EXISTS "${HDF5_SOURCE_DIR}/test" AND IS_DIRECTORY "${HDF5_SOURCE_DIR}/test")
+ if (EXISTS "${HDF5_TEST_SRC_DIR}" AND IS_DIRECTORY "${HDF5_TEST_SRC_DIR}")
add_subdirectory (test)
endif ()
if (H5_HAVE_PARALLEL)
- if (EXISTS "${HDF5_SOURCE_DIR}/testpar" AND IS_DIRECTORY "${HDF5_SOURCE_DIR}/testpar")
+ if (EXISTS "${HDF5_TEST_PAR_DIR}" AND IS_DIRECTORY "${HDF5_TEST_PAR_DIR}")
add_subdirectory (testpar)
endif ()
endif ()
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 687e981..a16e845 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -125,7 +125,6 @@ Please make sure that you check the items applicable to your pull request:
* [ ] Does the new feature require a change to an existing API? See "API Compatibility Macros" document (https://portal.hdfgroup.org/display/HDF5/API+Compatibility+Macros)
* Documentation
* [ ] Was the change described in the release_docs/RELEASE.txt file?
- * [ ] Was MANIFEST updated if new files had been added to the source?
* [ ] Was the new function documented in the corresponding public header file using [Doxygen](https://docs.hdfgroup.org/hdf5/develop/_r_m_t.html)?
* [ ] Was new functionality documented for the HDF5 community (the level of documentation depends on the feature; ask us what would be appropriate)
* Testing
diff --git a/config/cmake/cacheinit.cmake b/config/cmake/cacheinit.cmake
index 4460891..376b28f 100644
--- a/config/cmake/cacheinit.cmake
+++ b/config/cmake/cacheinit.cmake
@@ -49,6 +49,10 @@ set (HDF5_MINGW_STATIC_GCC_LIBS ON CACHE BOOL "Statically link libgcc/libstdc++"
set (HDF5_ALLOW_EXTERNAL_SUPPORT "TGZ" CACHE STRING "Allow External Library Building (NO GIT TGZ)" FORCE)
set_property (CACHE HDF5_ALLOW_EXTERNAL_SUPPORT PROPERTY STRINGS NO GIT TGZ)
+########################
+# compression options
+########################
+
set (ZLIB_PACKAGE_NAME "zlib" CACHE STRING "Name of ZLIB package" FORCE)
set (ZLIB_TGZ_NAME "ZLib.tar.gz" CACHE STRING "Use HDF5_ZLib from compressed file" FORCE)
set (ZLIB_TGZ_ORIGPATH "https://github.com/madler/zlib/releases/download/v1.2.13" CACHE STRING "Use ZLIB from original location" FORCE)
@@ -62,6 +66,13 @@ set (LIBAEC_TGZ_ORIGNAME "libaec-v1.0.6.tar.gz" CACHE STRING "Use LIBAEC from or
set (LIBAEC_USE_LOCALCONTENT OFF CACHE BOOL "Use local file for LIBAEC FetchContent" FORCE)
########################
+# API test options
+########################
+set (KWSYS_TGZ_ORIGPATH "https://gitlab.kitware.com/utils/kwsys/-/archive/master/kwsys-master.tar.gz" CACHE STRING "Use KWSYS from original location" FORCE)
+set (KWSYS_TGZ_ORIGNAME "kwsys-master.tar.gz" CACHE STRING "Use KWSYS from original compressed file" FORCE)
+set (KWSYS_USE_LOCALCONTENT OFF CACHE BOOL "Use local file for KWSYS FetchContent" FORCE)
+
+########################
# filter plugin options
########################
diff --git a/doxygen/dox/Overview.dox b/doxygen/dox/Overview.dox
index 70974ec..f909d8d 100644
--- a/doxygen/dox/Overview.dox
+++ b/doxygen/dox/Overview.dox
@@ -23,8 +23,8 @@ documents cover a mix of tasks, concepts, and reference, to help a specific
\par Versions
Version-specific documentation (see the version in the title area) can be found
here:
- - <a href="https://docs.hdfgroup.org/hdf5/v1_14/index.html">HDF5 1.14.x (this site)</a>
- <a href="https://docs.hdfgroup.org/hdf5/develop/index.html">HDF5 <code>develop</code> branch</a>
+ - <a href="https://docs.hdfgroup.org/hdf5/v1_14/index.html">HDF5 1.14.x (this site)</a>
- <a href="https://docs.hdfgroup.org/hdf5/v1_12/index.html">HDF5 1.12.x</a>
- <a href="https://docs.hdfgroup.org/hdf5/v1_10/index.html">HDF5 1.10.x</a>
- <a href="https://docs.hdfgroup.org/hdf5/v1_8/index.html">HDF5 1.8.x</a>
diff --git a/fortran/src/README b/fortran/src/README
index f9316b5..f73a59a 100644
--- a/fortran/src/README
+++ b/fortran/src/README
@@ -130,5 +130,4 @@ Procedure for adding a new file to the repository
Add the name of the file to the:
(1) Makefile.am located in the same directory as the newfile
(2) CMakeLists.txt located in the same directory as the newfile
- (3) MANIFEST located in the top level directory
diff --git a/hl/c++/test/Makefile.am b/hl/c++/test/Makefile.am
index 251d56a..73f1463 100644
--- a/hl/c++/test/Makefile.am
+++ b/hl/c++/test/Makefile.am
@@ -26,7 +26,7 @@ TEST_PROG=ptableTest
check_PROGRAMS=$(TEST_PROG)
# The tests depend on the hdf5, hdf5 C++, and hdf5_hl libraries
-LDADD=$(LIBH5CPP_HL) $(LIBH5_HL) $(LIBH5CPP) $(LIBHDF5)
+LDADD=$(LIBH5CPP_HL) $(LIBH5_HL) $(LIBH5TEST) $(LIBH5CPP) $(LIBHDF5)
ptableTest_SOURCES=ptableTest.cpp
diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt
index 01e1235..ec82272 100644
--- a/release_docs/RELEASE.txt
+++ b/release_docs/RELEASE.txt
@@ -47,6 +47,48 @@ New Features
Configuration:
-------------
+ - Added new CMake options for building and running HDF5 API tests
+ (Experimental)
+
+ HDF5 API tests are an experimental feature, primarily targeted
+ toward HDF5 VOL connector authors, that is currently being developed.
+ These tests exercise the HDF5 API and are being integrated back
+ into the HDF5 library from the HDF5 VOL tests repository
+ (https://github.com/HDFGroup/vol-tests). To support this feature,
+ the following new options have been added to CMake:
+
+ * HDF5_TEST_API: ON/OFF (Default: OFF)
+
+ Controls whether the HDF5 API tests will be built. These tests
+ will only be run during testing of HDF5 if the HDF5_TEST_SERIAL
+ (for serial tests) and HDF5_TEST_PARALLEL (for parallel tests)
+ options are enabled.
+
+ * HDF5_TEST_API_INSTALL: ON/OFF (Default: OFF)
+
+ Controls whether the HDF5 API test executables will be installed
+ on the system alongside the HDF5 library. This option is currently
+ not functional.
+
+ * HDF5_TEST_API_ENABLE_ASYNC: ON/OFF (Default: OFF)
+
+ Controls whether the HDF5 Async API tests will be built. These
+ tests will only be run if the VOL connector used supports Async
+ operations.
+
+ * HDF5_TEST_API_ENABLE_DRIVER: ON/OFF (Default: OFF)
+
+ Controls whether to build the HDF5 API test driver program. This
+ test driver program is useful for VOL connectors that use a
+ client/server model where the server needs to be up and running
+ before the VOL connector can function. This option is currently
+ not functional.
+
+ * HDF5_TEST_API_SERVER: String (Default: "")
+
+ Used to specify a path to the server executable that the test
+ driver program should execute.
+
- Added support for CMake presets file.
CMake supports two main files, CMakePresets.json and CMakeUserPresets.json,
diff --git a/src/H5Ocache.c b/src/H5Ocache.c
index 72261fa..66b092a 100644
--- a/src/H5Ocache.c
+++ b/src/H5Ocache.c
@@ -534,7 +534,7 @@ H5O__cache_notify(H5AC_notify_action_t action, void *_thing)
for (u = 0; u < oh->nmesgs; u++)
if (oh->mesg[u].chunkno == 0)
oh->mesg[u].dirty = FALSE;
-#ifdef H5O_DEBUG
+#ifndef NDEBUG
/* Reset the number of messages dirtied by decoding */
oh->ndecode_dirtied = 0;
#endif
diff --git a/test/API/CMakeLists.txt b/test/API/CMakeLists.txt
new file mode 100644
index 0000000..dd2bca2
--- /dev/null
+++ b/test/API/CMakeLists.txt
@@ -0,0 +1,319 @@
+# Copyright by The HDF Group.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the COPYING file, which can be found at the root of the source code
+# distribution tree, or in https://www.hdfgroup.org/licenses.
+# If you do not have access to either file, you may request a copy from
+# help@hdfgroup.org.
+#
+
+
+cmake_minimum_required (VERSION 3.18)
+project (HDF5_TEST_API C)
+
+#------------------------------------------------------------------------------
+# Setup testing configuration file
+#------------------------------------------------------------------------------
+if (HDF5_TEST_API_ENABLE_ASYNC)
+ set (H5_API_TEST_HAVE_ASYNC 1)
+endif ()
+
+configure_file(
+ ${CMAKE_CURRENT_SOURCE_DIR}/H5_api_test_config.h.in
+ ${HDF5_TEST_BINARY_DIR}/H5_api_test_config.h
+)
+
+#------------------------------------------------------------------------------
+# Compile kwsys library and setup TestDriver
+#------------------------------------------------------------------------------
+if (HDF5_TEST_API_ENABLE_DRIVER)
+ add_subdirectory (driver)
+endif ()
+
+#------------------------------------------------------------------------------
+# Define for API tests
+#------------------------------------------------------------------------------
+
+set (HDF5_API_TESTS
+ attribute
+ dataset
+ datatype
+ file
+ group
+ link
+ misc
+ object
+)
+
+if (HDF5_TEST_API_ENABLE_ASYNC)
+ set (HDF5_API_TESTS
+ ${HDF5_API_TESTS}
+ async
+ )
+endif ()
+
+# Ported HDF5 tests
+set (HDF5_API_TESTS_EXTRA
+ testhdf5
+)
+
+# List of files generated by the HDF5 API tests which
+# should be cleaned up in case the test failed to remove
+# them
+set (HDF5_API_TESTS_FILES
+ H5_api_test.h5
+ H5_api_async_test.h5
+ H5_api_async_test_0.h5
+ H5_api_async_test_1.h5
+ H5_api_async_test_2.h5
+ H5_api_async_test_3.h5
+ H5_api_async_test_4.h5
+ test_file.h5
+ invalid_params_file.h5
+ excl_flag_file.h5
+ overlapping_file.h5
+ file_permission.h5
+ flush_file.h5
+ property_list_test_file1.h5
+ property_list_test_file2.h5
+ intent_test_file.h5
+ file_obj_count1.h5
+ file_obj_count2.h5
+ file_mount.h5
+ file_name_retrieval.h5
+ filespace_info.h5
+ test_file_id.h5
+ test_close_degree.h5
+ test_free_sections.h5
+ file_size.h5
+ file_info.h5
+ double_group_open.h5
+ ext_link_file.h5
+ ext_link_file_2.h5
+ ext_link_file_3.h5
+ ext_link_file_4.h5
+ ext_link_file_ping_pong_1.h5
+ ext_link_file_ping_pong_2.h5
+ ext_link_invalid_params_file.h5
+ object_copy_test_file.h5
+)
+
+#-----------------------------------------------------------------------------
+# Build the main API test executable
+#-----------------------------------------------------------------------------
+foreach (api_test ${HDF5_API_TESTS})
+ set (HDF5_API_TEST_SRCS
+ ${HDF5_API_TEST_SRCS}
+ ${CMAKE_CURRENT_SOURCE_DIR}/H5_api_${api_test}_test.c
+ )
+endforeach ()
+
+set (HDF5_API_TEST_SRCS
+ ${HDF5_API_TEST_SRCS}
+ ${CMAKE_CURRENT_SOURCE_DIR}/H5_api_test.c
+ ${HDF5_TEST_API_SRC_DIR}/H5_api_test_util.c
+)
+
+add_executable (h5_api_test ${HDF5_API_TEST_SRCS})
+target_include_directories (
+ h5_api_test
+ PRIVATE
+ "${HDF5_SRC_INCLUDE_DIRS}"
+ "${HDF5_TEST_SRC_DIR}"
+ "${HDF5_TEST_API_SRC_DIR}"
+ "${HDF5_SRC_BINARY_DIR}"
+ "${HDF5_TEST_BINARY_DIR}"
+)
+target_compile_options (
+ h5_api_test
+ PRIVATE
+ "${HDF5_CMAKE_C_FLAGS}"
+)
+target_compile_definitions (
+ h5_api_test
+ PRIVATE
+ $<$<CONFIG:Developer>:${HDF5_DEVELOPER_DEFS}>
+)
+if (NOT BUILD_SHARED_LIBS)
+ TARGET_C_PROPERTIES (h5_api_test STATIC)
+ target_link_libraries (
+ h5_api_test
+ PRIVATE
+ ${HDF5_TEST_LIB_TARGET}
+ )
+else ()
+ TARGET_C_PROPERTIES (h5_api_test SHARED)
+ target_link_libraries (
+ h5_api_test
+ PRIVATE
+ ${HDF5_TEST_LIBSH_TARGET}
+ )
+endif ()
+set_target_properties (
+ h5_api_test
+ PROPERTIES
+ FOLDER test/API
+)
+# Add Target to clang-format
+if (HDF5_ENABLE_FORMATTERS)
+ clang_format (HDF5_TEST_h5_api_test_FORMAT h5_api_test)
+endif ()
+
+#-----------------------------------------------------------------------------
+# Build the ported HDF5 test executables
+#-----------------------------------------------------------------------------
+foreach (api_test_extra ${HDF5_API_TESTS_EXTRA})
+ unset (HDF5_API_TEST_EXTRA_SRCS)
+
+ set (HDF5_API_TEST_EXTRA_SRCS
+ ${HDF5_API_TEST_EXTRA_SRCS}
+ ${CMAKE_CURRENT_SOURCE_DIR}/${api_test_extra}.c
+ )
+
+ if (${api_test_extra} STREQUAL "testhdf5")
+ set (HDF5_API_TEST_EXTRA_SRCS
+ ${HDF5_API_TEST_EXTRA_SRCS}
+ ${CMAKE_CURRENT_SOURCE_DIR}/tarray.c
+ ${CMAKE_CURRENT_SOURCE_DIR}/tattr.c
+ ${CMAKE_CURRENT_SOURCE_DIR}/tchecksum.c
+ ${CMAKE_CURRENT_SOURCE_DIR}/tconfig.c
+ ${CMAKE_CURRENT_SOURCE_DIR}/tcoords.c
+ ${CMAKE_CURRENT_SOURCE_DIR}/tfile.c
+ ${CMAKE_CURRENT_SOURCE_DIR}/tgenprop.c
+ ${CMAKE_CURRENT_SOURCE_DIR}/th5o.c
+ ${CMAKE_CURRENT_SOURCE_DIR}/th5s.c
+ ${CMAKE_CURRENT_SOURCE_DIR}/tid.c
+ ${CMAKE_CURRENT_SOURCE_DIR}/titerate.c
+ ${CMAKE_CURRENT_SOURCE_DIR}/tmisc.c
+ ${CMAKE_CURRENT_SOURCE_DIR}/trefer.c
+ ${CMAKE_CURRENT_SOURCE_DIR}/tselect.c
+ ${CMAKE_CURRENT_SOURCE_DIR}/ttime.c
+ ${CMAKE_CURRENT_SOURCE_DIR}/tunicode.c
+ ${CMAKE_CURRENT_SOURCE_DIR}/tvlstr.c
+ ${CMAKE_CURRENT_SOURCE_DIR}/tvltypes.c
+ )
+ endif ()
+
+ add_executable (h5_api_test_${api_test_extra} ${HDF5_API_TEST_EXTRA_SRCS})
+ target_include_directories (
+ h5_api_test_${api_test_extra}
+ PRIVATE
+ "${HDF5_SRC_INCLUDE_DIRS}"
+ "${HDF5_TEST_SRC_DIR}"
+ "${HDF5_TEST_API_SRC_DIR}"
+ "${HDF5_SRC_BINARY_DIR}"
+ "${HDF5_TEST_BINARY_DIR}"
+ )
+ target_compile_options (
+ h5_api_test_${api_test_extra}
+ PRIVATE
+ "${HDF5_CMAKE_C_FLAGS}"
+ )
+ target_compile_definitions (
+ h5_api_test_${api_test_extra}
+ PRIVATE
+ $<$<CONFIG:Developer>:${HDF5_DEVELOPER_DEFS}>
+ )
+ if (NOT BUILD_SHARED_LIBS)
+ TARGET_C_PROPERTIES (h5_api_test_${api_test_extra} STATIC)
+ target_link_libraries (h5_api_test_${api_test_extra} PRIVATE ${HDF5_TEST_LIB_TARGET})
+ else ()
+ TARGET_C_PROPERTIES (h5_api_test_${api_test_extra} SHARED)
+ target_link_libraries (h5_api_test_${api_test_extra} PRIVATE ${HDF5_TEST_LIBSH_TARGET})
+ endif ()
+ set_target_properties (
+ h5_api_test_${api_test_extra}
+ PROPERTIES
+ FOLDER test/API
+ )
+ # Add Target to clang-format
+ if (HDF5_ENABLE_FORMATTERS)
+ clang_format (HDF5_TEST_h5_api_test_${api_test_extra}_FORMAT h5_api_test_${api_test_extra})
+ endif ()
+endforeach ()
+
+#-----------------------------------------------------------------------------
+# Add tests if HDF5 serial testing is enabled
+#-----------------------------------------------------------------------------
+if (HDF5_TEST_SERIAL)
+ if (HDF5_TEST_API_ENABLE_DRIVER)
+ if ("${HDF5_TEST_API_SERVER}" STREQUAL "")
+ message (FATAL_ERROR "Please set HDF5_TEST_API_SERVER to point to a server executable for the test driver program.")
+ endif ()
+
+ # Driver options
+ if (HDF5_TEST_API_SERVER_ALLOW_ERRORS)
+ set (HDF5_TEST_API_DRIVER_EXTRA_FLAGS --allow-server-errors)
+ endif ()
+ if (HDF5_TEST_API_CLIENT_HELPER)
+ set (HDF5_TEST_API_DRIVER_EXTRA_FLAGS ${HDF5_TEST_API_DRIVER_EXTRA_FLAGS}
+ --client-helper ${HDF5_TEST_API_CLIENT_HELPER}
+ )
+ endif ()
+ if (HDF5_TEST_API_CLIENT_INIT)
+ set (HDF5_TEST_API_DRIVER_EXTRA_FLAGS ${HDF5_TEST_API_DRIVER_EXTRA_FLAGS}
+ --client-init ${HDF5_TEST_API_CLIENT_INIT}
+ )
+ endif ()
+
+ set(last_api_test "")
+ foreach (api_test ${HDF5_API_TESTS})
+ add_test (
+ NAME "h5_api_test_${api_test}"
+ COMMAND $<TARGET_FILE:h5_api_test_driver>
+ --server ${HDF5_TEST_API_SERVER}
+ --client $<TARGET_FILE:h5_api_test> "${api_test}"
+ --serial
+ ${HDF5_TEST_API_DRIVER_EXTRA_FLAGS}
+ )
+
+ set_tests_properties("h5_api_test_${api_test}" PROPERTIES DEPENDS "${last_api_test}")
+
+ set(last_api_test "h5_api_test_${api_test}")
+ endforeach ()
+
+ foreach (hdf5_test ${HDF5_API_TESTS_EXTRA})
+ add_test (
+ NAME "h5_api_test_${hdf5_test}"
+ COMMAND $<TARGET_FILE:h5_api_test_driver>
+ --server ${HDF5_TEST_API_SERVER}
+ --client $<TARGET_FILE:h5_api_test_${hdf5_test}>
+ --serial
+ ${HDF5_TEST_API_DRIVER_EXTRA_FLAGS}
+ )
+ endforeach ()
+
+ # Hook external tests to same test suite
+ foreach (ext_api_test ${HDF5_API_EXT_SERIAL_TESTS})
+ add_test (
+ NAME "h5_api_ext_test_${ext_api_test}"
+ COMMAND $<TARGET_FILE:h5_api_test_driver>
+ --server ${HDF5_TEST_API_SERVER}
+ --client $<TARGET_FILE:${ext_api_test}>
+ --serial
+ ${HDF5_TEST_API_DRIVER_EXTRA_FLAGS}
+ )
+ endforeach ()
+ else ()
+ set(last_api_test "")
+ foreach (api_test ${HDF5_API_TESTS})
+ add_test (
+ NAME "h5_api_test_${api_test}"
+ COMMAND $<TARGET_FILE:h5_api_test> "${api_test}"
+ )
+
+ set_tests_properties("h5_api_test_${api_test}" PROPERTIES DEPENDS "${last_api_test}")
+
+ set(last_api_test "h5_api_test_${api_test}")
+ endforeach ()
+
+ foreach (hdf5_test ${HDF5_API_TESTS_EXTRA})
+ add_test (
+ NAME "h5_api_test_${hdf5_test}"
+ COMMAND $<TARGET_FILE:h5_api_test_${hdf5_test}>
+ )
+ endforeach ()
+ endif ()
+endif ()
diff --git a/test/API/H5_api_async_test.c b/test/API/H5_api_async_test.c
new file mode 100644
index 0000000..b5208ba
--- /dev/null
+++ b/test/API/H5_api_async_test.c
@@ -0,0 +1,2730 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include "H5_api_async_test.h"
+
+#ifdef H5ESpublic_H
+
+static int test_one_dataset_io(void);
+static int test_multi_dataset_io(void);
+static int test_multi_file_dataset_io(void);
+static int test_multi_file_grp_dset_io(void);
+static int test_set_extent(void);
+static int test_attribute_exists(void);
+static int test_attribute_io(void);
+static int test_attribute_io_tconv(void);
+static int test_attribute_io_compound(void);
+static int test_group(void);
+static int test_link(void);
+static int test_ocopy_orefresh(void);
+static int test_file_reopen(void);
+
+/*
+ * The array of async tests to be performed.
+ */
+static int (*async_tests[])(void) = {
+ test_one_dataset_io,
+ test_multi_dataset_io,
+ test_multi_file_dataset_io,
+ test_multi_file_grp_dset_io,
+ test_set_extent,
+ test_attribute_exists,
+ test_attribute_io,
+ test_attribute_io_tconv,
+ test_attribute_io_compound,
+ test_group,
+ test_link,
+ test_ocopy_orefresh,
+ test_file_reopen,
+};
+
+/* Highest "printf" file created (starting at 0) */
+int max_printf_file = -1;
+
+/*
+ * Create file and dataset, write to dataset
+ */
+static int
+test_one_dataset_io(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ hid_t es_id = H5I_INVALID_HID;
+ hsize_t dims[2] = {6, 10};
+ size_t num_in_progress;
+ hbool_t op_failed;
+ int wbuf[6][10];
+ int rbuf[6][10];
+ int i, j;
+
+ TESTING_MULTIPART("single dataset I/O");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, dataset, or flush aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ /* Create dataspace */
+ if ((space_id = H5Screate_simple(2, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ /* Create event stack */
+ if ((es_id = H5EScreate()) < 0)
+ TEST_ERROR;
+
+ /* Create file asynchronously */
+ if ((file_id = H5Fcreate_async(ASYNC_API_TEST_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Create the dataset asynchronously */
+ if ((dset_id = H5Dcreate_async(file_id, "dset", H5T_NATIVE_INT, space_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(single_dset_eswait)
+ {
+ TESTING_2("synchronization using H5ESwait()");
+
+ /* Initialize wbuf */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++)
+ wbuf[i][j] = 10 * i + j;
+
+ /* Write the dataset asynchronously */
+ if (H5Dwrite_async(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf, es_id) < 0)
+ PART_TEST_ERROR(single_dset_eswait);
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(single_dset_eswait);
+ if (op_failed)
+ PART_TEST_ERROR(single_dset_eswait);
+
+ /* Read the dataset asynchronously */
+ if (H5Dread_async(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf, es_id) < 0)
+ PART_TEST_ERROR(single_dset_eswait);
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(single_dset_eswait);
+ if (op_failed)
+ PART_TEST_ERROR(single_dset_eswait);
+
+ /* Verify the read data */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++)
+ if (wbuf[i][j] != rbuf[i][j]) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ PART_ERROR(single_dset_eswait);
+ } /* end if */
+
+ PASSED();
+ }
+ PART_END(single_dset_eswait);
+
+ PART_BEGIN(single_dset_dclose)
+ {
+ TESTING_2("synchronization using H5Dclose()");
+
+ /* Update wbuf */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++)
+ wbuf[i][j] += 6 * 10;
+
+ /* Write the dataset asynchronously */
+ if (H5Dwrite_async(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf, es_id) < 0)
+ PART_TEST_ERROR(single_dset_dclose);
+
+ /* Close the dataset synchronously */
+ if (H5Dclose(dset_id) < 0)
+ PART_TEST_ERROR(single_dset_dclose);
+
+ /* Re-open the dataset asynchronously */
+ if ((dset_id = H5Dopen_async(file_id, "dset", H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(single_dset_dclose);
+
+ /* Read the dataset asynchronously */
+ if (H5Dread_async(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf, es_id) < 0)
+ PART_TEST_ERROR(single_dset_dclose);
+
+ /* Close the dataset synchronously */
+ if (H5Dclose(dset_id) < 0)
+ PART_TEST_ERROR(single_dset_dclose);
+
+ /* Verify the read data */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++)
+ if (wbuf[i][j] != rbuf[i][j]) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ PART_ERROR(single_dset_dclose);
+ } /* end if */
+
+ /* Re-open the dataset asynchronously */
+ if ((dset_id = H5Dopen_async(file_id, "dset", H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(single_dset_dclose);
+
+ PASSED();
+ }
+ PART_END(single_dset_dclose);
+
+ PART_BEGIN(single_dset_dflush)
+ {
+ TESTING_2("synchronization using H5Oflush_async()");
+
+ /* Update wbuf */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++)
+ wbuf[i][j] += 6 * 10;
+
+ /* Write the dataset asynchronously */
+ if (H5Dwrite_async(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf, es_id) < 0)
+ PART_TEST_ERROR(single_dset_dflush);
+
+ /* Flush the dataset asynchronously. This will effectively work as a
+ * barrier, guaranteeing the read takes place after the write. */
+ if (H5Oflush_async(dset_id, es_id) < 0)
+ PART_TEST_ERROR(single_dset_dflush);
+
+ /* Read the dataset asynchronously */
+ if (H5Dread_async(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf, es_id) < 0)
+ PART_TEST_ERROR(single_dset_dflush);
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(single_dset_dflush);
+ if (op_failed)
+ PART_TEST_ERROR(single_dset_dflush);
+
+ /* Verify the read data */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++)
+ if (wbuf[i][j] != rbuf[i][j]) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ PART_ERROR(single_dset_dflush);
+ } /* end if */
+
+ PASSED();
+ }
+ PART_END(single_dset_dflush);
+
+ PART_BEGIN(single_dset_fclose)
+ {
+ TESTING_2("synchronization using H5Fclose()");
+
+ /* Update wbuf */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++)
+ wbuf[i][j] += 6 * 10;
+
+ /* Write the dataset asynchronously */
+ if (H5Dwrite_async(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf, es_id) < 0)
+ PART_TEST_ERROR(single_dset_fclose);
+
+ /* Close the dataset asynchronously */
+ if (H5Dclose_async(dset_id, es_id) < 0)
+ PART_TEST_ERROR(single_dset_fclose);
+
+ /* Close the file synchronously */
+ if (H5Fclose(file_id) < 0)
+ PART_TEST_ERROR(single_dset_fclose);
+
+ /* Reopen the file asynchronously. */
+ if ((file_id = H5Fopen_async(ASYNC_API_TEST_FILE, H5F_ACC_RDONLY, H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(single_dset_fclose);
+
+ /* Re-open the dataset asynchronously */
+ if ((dset_id = H5Dopen_async(file_id, "dset", H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(single_dset_fclose);
+
+ /* Read the dataset asynchronously */
+ if (H5Dread_async(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf, es_id) < 0)
+ PART_TEST_ERROR(single_dset_fclose);
+
+ /* Close the dataset asynchronously */
+ if (H5Dclose_async(dset_id, es_id) < 0)
+ PART_TEST_ERROR(single_dset_fclose);
+
+ /* Close the file synchronously */
+ if (H5Fclose(file_id) < 0)
+ PART_TEST_ERROR(single_dset_fclose);
+
+ /* Verify the read data */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++)
+ if (wbuf[i][j] != rbuf[i][j]) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ PART_ERROR(single_dset_fclose);
+ } /* end if */
+
+ PASSED();
+ }
+ PART_END(single_dset_fclose);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5ESclose(es_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ H5Dclose(dset_id);
+ H5Fclose(file_id);
+ H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed);
+ H5ESclose(es_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+} /* end test_one_dataset_io() */
+
+/*
+ * Create file and multiple datasets, write to them and read from them
+ */
+static int
+test_multi_dataset_io(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t dset_id[5] = {H5I_INVALID_HID, H5I_INVALID_HID, H5I_INVALID_HID, H5I_INVALID_HID, H5I_INVALID_HID};
+ hid_t space_id = H5I_INVALID_HID;
+ hid_t es_id = H5I_INVALID_HID;
+ hsize_t dims[2] = {6, 10};
+ size_t num_in_progress;
+ hbool_t op_failed;
+ char dset_name[32];
+ int wbuf[5][6][10];
+ int rbuf[5][6][10];
+ int i, j, k;
+
+ TESTING_MULTIPART("multi dataset I/O");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, dataset, or flush aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ /* Create dataspace */
+ if ((space_id = H5Screate_simple(2, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ /* Create event stack */
+ if ((es_id = H5EScreate()) < 0)
+ TEST_ERROR;
+
+ /* Create file asynchronously */
+ if ((file_id = H5Fcreate_async(ASYNC_API_TEST_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(multi_dset_open)
+ {
+ TESTING_2("keeping datasets open");
+
+ /* Loop over datasets */
+ for (i = 0; i < 5; i++) {
+ /* Set dataset name */
+ sprintf(dset_name, "dset%d", i);
+
+ /* Create the dataset asynchronously */
+ if ((dset_id[i] = H5Dcreate_async(file_id, dset_name, H5T_NATIVE_INT, space_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_dset_open);
+
+ /* Initialize wbuf. Must use a new slice of wbuf for each dset
+ * since we can't overwrite the buffers until I/O is done. */
+ for (j = 0; j < 6; j++)
+ for (k = 0; k < 10; k++)
+ wbuf[i][j][k] = 6 * 10 * i + 10 * j + k;
+
+ /* Write the dataset asynchronously */
+ if (H5Dwrite_async(dset_id[i], H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf[i],
+ es_id) < 0)
+ PART_TEST_ERROR(multi_dset_open);
+ } /* end for */
+
+ /* Flush the file asynchronously. This will effectively work as a
+ * barrier, guaranteeing the read takes place after the write. */
+ if (H5Fflush_async(file_id, H5F_SCOPE_LOCAL, es_id) < 0)
+ PART_TEST_ERROR(multi_dset_open);
+
+ /* Loop over datasets */
+ for (i = 0; i < 5; i++) {
+ /* Read the dataset asynchronously */
+ if (H5Dread_async(dset_id[i], H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf[i], es_id) <
+ 0)
+ PART_TEST_ERROR(multi_dset_open);
+ } /* end for */
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(multi_dset_open);
+ if (op_failed)
+ PART_TEST_ERROR(multi_dset_open);
+ /*printf("\nwbuf:\n");
+ for(i = 0; i < 5; i++) {
+ for(j = 0; j < 6; j++) {
+ for(k = 0; k < 10; k++)
+ printf("%d ", wbuf[i][j][k]);
+ printf("\n");
+ }
+ printf("\n");
+ }
+ printf("\nrbuf:\n");
+ for(i = 0; i < 5; i++) {
+ for(j = 0; j < 6; j++) {
+ for(k = 0; k < 10; k++)
+ printf("%d ", rbuf[i][j][k]);
+ printf("\n");
+ }
+ printf("\n");
+ }*/
+ /* Verify the read data */
+ for (i = 0; i < 5; i++)
+ for (j = 0; j < 6; j++)
+ for (k = 0; k < 10; k++)
+ if (wbuf[i][j][k] != rbuf[i][j][k]) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ PART_ERROR(multi_dset_open);
+ } /* end if */
+
+ /* Close the datasets */
+ for (i = 0; i < 5; i++)
+ if (H5Dclose(dset_id[i]) < 0)
+ PART_TEST_ERROR(multi_dset_open);
+
+ PASSED();
+ }
+ PART_END(multi_dset_open);
+
+ PART_BEGIN(multi_dset_close)
+ {
+ TESTING_2("closing datasets between I/O");
+
+ /* Loop over datasets */
+ for (i = 0; i < 5; i++) {
+ /* Set dataset name */
+ sprintf(dset_name, "dset%d", i);
+
+ /* Open the dataset asynchronously */
+ if ((dset_id[0] = H5Dopen_async(file_id, dset_name, H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_dset_close);
+
+ /* Update wbuf */
+ for (j = 0; j < 6; j++)
+ for (k = 0; k < 10; k++)
+ wbuf[i][j][k] += 5 * 6 * 10;
+
+ /* Write the dataset asynchronously */
+ if (H5Dwrite_async(dset_id[0], H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf[i],
+ es_id) < 0)
+ PART_TEST_ERROR(multi_dset_close);
+
+ /* Close the dataset asynchronously */
+ if (H5Dclose_async(dset_id[0], es_id) < 0)
+ PART_TEST_ERROR(multi_dset_close);
+ } /* end for */
+
+ /* Flush the file asynchronously. This will effectively work as a
+ * barrier, guaranteeing the read takes place after the write. */
+ if (H5Fflush_async(file_id, H5F_SCOPE_LOCAL, es_id) < 0)
+ PART_TEST_ERROR(multi_dset_close);
+
+ /* Loop over datasets */
+ for (i = 0; i < 5; i++) {
+ /* Set dataset name */
+ sprintf(dset_name, "dset%d", i);
+
+ /* Open the dataset asynchronously */
+ if ((dset_id[0] = H5Dopen_async(file_id, dset_name, H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_dset_close);
+
+ /* Read the dataset asynchronously */
+ if (H5Dread_async(dset_id[0], H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf[i], es_id) <
+ 0)
+ PART_TEST_ERROR(multi_dset_close);
+
+ /* Close the dataset asynchronously */
+ if (H5Dclose_async(dset_id[0], es_id) < 0)
+ PART_TEST_ERROR(multi_dset_close);
+ } /* end for */
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(multi_dset_close);
+ if (op_failed)
+ PART_TEST_ERROR(multi_dset_close);
+ /*printf("\nwbuf:\n");
+ for(i = 0; i < 5; i++) {
+ for(j = 0; j < 6; j++) {
+ for(k = 0; k < 10; k++)
+ printf("%d ", wbuf[i][j][k]);
+ printf("\n");
+ }
+ printf("\n");
+ }
+ printf("\nrbuf:\n");
+ for(i = 0; i < 5; i++) {
+ for(j = 0; j < 6; j++) {
+ for(k = 0; k < 10; k++)
+ printf("%d ", rbuf[i][j][k]);
+ printf("\n");
+ }
+ printf("\n");
+ }*/
+ /* Verify the read data */
+ for (i = 0; i < 5; i++)
+ for (j = 0; j < 6; j++)
+ for (k = 0; k < 10; k++)
+ if (wbuf[i][j][k] != rbuf[i][j][k]) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ PART_ERROR(multi_dset_close);
+ } /* end if */
+
+ PASSED();
+ }
+ PART_END(multi_dset_close);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Fclose_async(file_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5ESclose(es_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ for (i = 0; i < 5; i++)
+ H5Dclose(dset_id[i]);
+ H5Fclose(file_id);
+ H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed);
+ H5ESclose(es_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+} /* end test_multi_dataset_io() */
+
+/*
+ * Create multiple files, each with a single dataset, write to them and read
+ * from them
+ */
+static int
+test_multi_file_dataset_io(void)
+{
+ hid_t file_id[5] = {H5I_INVALID_HID, H5I_INVALID_HID, H5I_INVALID_HID, H5I_INVALID_HID, H5I_INVALID_HID};
+ hid_t dset_id[5] = {H5I_INVALID_HID, H5I_INVALID_HID, H5I_INVALID_HID, H5I_INVALID_HID, H5I_INVALID_HID};
+ hid_t space_id = H5I_INVALID_HID;
+ hid_t es_id = H5I_INVALID_HID;
+ hsize_t dims[2] = {6, 10};
+ size_t num_in_progress;
+ hbool_t op_failed;
+ char file_name[32];
+ int wbuf[5][6][10];
+ int rbuf[5][6][10];
+ int i, j, k;
+
+ TESTING_MULTIPART("multi file dataset I/O");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, dataset, or flush aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ /* Create dataspace */
+ if ((space_id = H5Screate_simple(2, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ /* Create event stack */
+ if ((es_id = H5EScreate()) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(multi_file_dset_open)
+ {
+ TESTING_2("keeping files and datasets open");
+
+ /* Loop over files */
+ for (i = 0; i < 5; i++) {
+ /* Set file name */
+ sprintf(file_name, ASYNC_API_TEST_FILE_PRINTF, i);
+
+ /* Create file asynchronously */
+ if ((file_id[i] =
+ H5Fcreate_async(file_name, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_dset_open);
+ if (i > max_printf_file)
+ max_printf_file = i;
+
+ /* Create the dataset asynchronously */
+ if ((dset_id[i] = H5Dcreate_async(file_id[i], "dset", H5T_NATIVE_INT, space_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_dset_open);
+
+ /* Initialize wbuf. Must use a new slice of wbuf for each dset
+ * since we can't overwrite the buffers until I/O is done. */
+ for (j = 0; j < 6; j++)
+ for (k = 0; k < 10; k++)
+ wbuf[i][j][k] = 6 * 10 * i + 10 * j + k;
+
+ /* Write the dataset asynchronously */
+ if (H5Dwrite_async(dset_id[i], H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf[i],
+ es_id) < 0)
+ PART_TEST_ERROR(multi_file_dset_open);
+ } /* end for */
+
+ /* Loop over files */
+ for (i = 0; i < 5; i++) {
+ /* Flush the dataset asynchronously. This will effectively work as a
+ * barrier, guaranteeing the read takes place after the write. */
+ if (H5Oflush_async(dset_id[i], es_id) < 0)
+ PART_TEST_ERROR(multi_file_dset_open);
+
+ /* Read the dataset asynchronously */
+ if (H5Dread_async(dset_id[i], H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf[i], es_id) <
+ 0)
+ PART_TEST_ERROR(multi_file_dset_open);
+ } /* end for */
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(multi_file_dset_open);
+ if (op_failed)
+ PART_TEST_ERROR(multi_file_dset_open);
+
+ /* Verify the read data */
+ for (i = 0; i < 5; i++)
+ for (j = 0; j < 6; j++)
+ for (k = 0; k < 10; k++)
+ if (wbuf[i][j][k] != rbuf[i][j][k]) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ PART_ERROR(multi_file_dset_open);
+ } /* end if */
+
+ /* Close the datasets */
+ for (i = 0; i < 5; i++)
+ if (H5Dclose(dset_id[i]) < 0)
+ PART_TEST_ERROR(multi_file_dset_open);
+
+ PASSED();
+ }
+ PART_END(multi_file_dset_open);
+
+ PART_BEGIN(multi_file_dset_dclose)
+ {
+ TESTING_2("closing datasets between I/O");
+
+ /* Loop over files */
+ for (i = 0; i < 5; i++) {
+ /* Open the dataset asynchronously */
+ if ((dset_id[0] = H5Dopen_async(file_id[i], "dset", H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_dset_dclose);
+
+ /* Update wbuf */
+ for (j = 0; j < 6; j++)
+ for (k = 0; k < 10; k++)
+ wbuf[i][j][k] += 5 * 6 * 10;
+
+ /* Write the dataset asynchronously */
+ if (H5Dwrite_async(dset_id[0], H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf[i],
+ es_id) < 0)
+ PART_TEST_ERROR(multi_file_dset_dclose);
+
+ /* Close the dataset asynchronously */
+ if (H5Dclose_async(dset_id[0], es_id) < 0)
+ PART_TEST_ERROR(multi_file_dset_dclose);
+ } /* end for */
+
+ /* Loop over files */
+ for (i = 0; i < 5; i++) {
+ /* Flush the file asynchronously. This will effectively work as a
+ * barrier, guaranteeing the read takes place after the write. */
+ if (H5Fflush_async(file_id[i], H5F_SCOPE_LOCAL, es_id) < 0)
+ PART_TEST_ERROR(multi_file_dset_open);
+
+ /* Open the dataset asynchronously */
+ if ((dset_id[0] = H5Dopen_async(file_id[i], "dset", H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_dset_dclose);
+
+ /* Read the dataset asynchronously */
+ if (H5Dread_async(dset_id[0], H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf[i], es_id) <
+ 0)
+ PART_TEST_ERROR(multi_file_dset_dclose);
+
+ /* Close the dataset asynchronously */
+ if (H5Dclose_async(dset_id[0], es_id) < 0)
+ PART_TEST_ERROR(multi_file_dset_dclose);
+ } /* end for */
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(multi_file_dset_dclose);
+ if (op_failed)
+ PART_TEST_ERROR(multi_file_dset_dclose);
+
+ /* Verify the read data */
+ for (i = 0; i < 5; i++)
+ for (j = 0; j < 6; j++)
+ for (k = 0; k < 10; k++)
+ if (wbuf[i][j][k] != rbuf[i][j][k]) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ PART_ERROR(multi_file_dset_dclose);
+ } /* end if */
+
+ /* Close the files */
+ for (i = 0; i < 5; i++)
+ if (H5Fclose(file_id[i]) < 0)
+ PART_TEST_ERROR(multi_file_dset_dclose);
+
+ PASSED();
+ }
+ PART_END(multi_file_dset_dclose);
+
+ PART_BEGIN(multi_file_dset_fclose)
+ {
+ TESTING_2("closing files between I/O");
+
+ /* Loop over files */
+ for (i = 0; i < 5; i++) {
+ /* Set file name */
+ sprintf(file_name, ASYNC_API_TEST_FILE_PRINTF, i);
+
+ /* Open the file asynchronously */
+ if ((file_id[0] = H5Fopen_async(file_name, H5F_ACC_RDWR, H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_dset_fclose);
+
+ /* Open the dataset asynchronously */
+ if ((dset_id[0] = H5Dopen_async(file_id[0], "dset", H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_dset_fclose);
+
+ /* Update wbuf */
+ for (j = 0; j < 6; j++)
+ for (k = 0; k < 10; k++)
+ wbuf[i][j][k] += 5 * 6 * 10;
+
+ /* Write the dataset asynchronously */
+ if (H5Dwrite_async(dset_id[0], H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf[i],
+ es_id) < 0)
+ PART_TEST_ERROR(multi_file_dset_fclose);
+
+ /* Close the dataset asynchronously */
+ if (H5Dclose_async(dset_id[0], es_id) < 0)
+ PART_TEST_ERROR(multi_file_dset_fclose);
+
+ /* Close the file asynchronously */
+ if (H5Fclose_async(file_id[0], es_id) < 0)
+ PART_TEST_ERROR(multi_file_dset_fclose);
+ } /* end for */
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(multi_file_dset_fclose);
+ if (op_failed)
+ PART_TEST_ERROR(multi_file_dset_fclose);
+
+ /* Loop over files */
+ for (i = 0; i < 5; i++) {
+ /* Set file name */
+ sprintf(file_name, ASYNC_API_TEST_FILE_PRINTF, i);
+
+ /* Open the file asynchronously */
+ if ((file_id[0] = H5Fopen_async(file_name, H5F_ACC_RDONLY, H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_dset_fclose);
+
+ /* Open the dataset asynchronously */
+ if ((dset_id[0] = H5Dopen_async(file_id[0], "dset", H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_dset_fclose);
+
+ /* Read the dataset asynchronously */
+ if (H5Dread_async(dset_id[0], H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf[i], es_id) <
+ 0)
+ PART_TEST_ERROR(multi_file_dset_fclose);
+
+ /* Close the dataset asynchronously */
+ if (H5Dclose_async(dset_id[0], es_id) < 0)
+ PART_TEST_ERROR(multi_file_dset_fclose);
+
+ /* Close the file asynchronously */
+ if (H5Fclose_async(file_id[0], es_id) < 0)
+ PART_TEST_ERROR(multi_file_dset_fclose);
+ } /* end for */
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(multi_file_dset_fclose);
+ if (op_failed)
+ PART_TEST_ERROR(multi_file_dset_fclose);
+
+ /* Verify the read data */
+ for (i = 0; i < 5; i++)
+ for (j = 0; j < 6; j++)
+ for (k = 0; k < 10; k++)
+ if (wbuf[i][j][k] != rbuf[i][j][k]) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ PART_ERROR(multi_file_dset_fclose);
+ } /* end if */
+
+ PASSED();
+ }
+ PART_END(multi_file_dset_fclose);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5ESclose(es_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ for (i = 0; i < 5; i++) {
+ H5Dclose(dset_id[i]);
+ H5Fclose(file_id[i]);
+ } /* end for */
+ H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed);
+ H5ESclose(es_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+} /* end test_multi_file_dataset_io() */
+
+/*
+ * Create multiple files, each with a single group and dataset, write to them
+ * and read from them
+ */
+static int
+test_multi_file_grp_dset_io(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t grp_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ hid_t es_id = H5I_INVALID_HID;
+ hsize_t dims[2] = {6, 10};
+ size_t num_in_progress;
+ hbool_t op_failed;
+ char file_name[32];
+ int wbuf[5][6][10];
+ int rbuf[5][6][10];
+ int i, j, k;
+
+ TESTING_MULTIPART("multi file dataset I/O with groups");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ /* Create dataspace */
+ if ((space_id = H5Screate_simple(2, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ /* Create event stack */
+ if ((es_id = H5EScreate()) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(multi_file_grp_dset_no_kick)
+ {
+ TESTING_2("without intermediate calls to H5ESwait()");
+
+ /* Loop over files */
+ for (i = 0; i < 5; i++) {
+ /* Set file name */
+ sprintf(file_name, ASYNC_API_TEST_FILE_PRINTF, i);
+
+ /* Create file asynchronously */
+ if ((file_id = H5Fcreate_async(file_name, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT, es_id)) <
+ 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+ if (i > max_printf_file)
+ max_printf_file = i;
+
+ /* Create the group asynchronously */
+ if ((grp_id = H5Gcreate_async(file_id, "grp", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT, es_id)) <
+ 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+
+ /* Create the dataset asynchronously */
+ if ((dset_id = H5Dcreate_async(grp_id, "dset", H5T_NATIVE_INT, space_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+
+ /* Initialize wbuf. Must use a new slice of wbuf for each dset
+ * since we can't overwrite the buffers until I/O is done. */
+ for (j = 0; j < 6; j++)
+ for (k = 0; k < 10; k++)
+ wbuf[i][j][k] = 6 * 10 * i + 10 * j + k;
+
+ /* Write the dataset asynchronously */
+ if (H5Dwrite_async(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf[i], es_id) <
+ 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+
+ /* Close the dataset asynchronously */
+ if (H5Dclose_async(dset_id, es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+
+ /* Close the group asynchronously */
+ if (H5Gclose_async(grp_id, es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+
+ /* Close the file asynchronously */
+ if (H5Fclose_async(file_id, es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+ } /* end for */
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+ if (op_failed)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+
+ /* Loop over files */
+ for (i = 0; i < 5; i++) {
+ /* Set file name */
+ sprintf(file_name, ASYNC_API_TEST_FILE_PRINTF, i);
+
+ /* Open the file asynchronously */
+ if ((file_id = H5Fopen_async(file_name, H5F_ACC_RDONLY, H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+
+ /* Open the group asynchronously */
+ if ((grp_id = H5Gopen_async(file_id, "grp", H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+
+ /* Open the dataset asynchronously */
+ if ((dset_id = H5Dopen_async(grp_id, "dset", H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+
+ /* Read the dataset asynchronously */
+ if (H5Dread_async(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf[i], es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+
+ /* Close the dataset asynchronously */
+ if (H5Dclose_async(dset_id, es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+
+ /* Close the group asynchronously */
+ if (H5Gclose_async(grp_id, es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+
+ /* Close the file asynchronously */
+ if (H5Fclose_async(file_id, es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+ } /* end for */
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+ if (op_failed)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+
+ /* Verify the read data */
+ for (i = 0; i < 5; i++)
+ for (j = 0; j < 6; j++)
+ for (k = 0; k < 10; k++)
+ if (wbuf[i][j][k] != rbuf[i][j][k]) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ PART_ERROR(multi_file_grp_dset_no_kick);
+ } /* end if */
+
+ PASSED();
+ }
+ PART_END(multi_file_grp_dset_no_kick);
+
+ PART_BEGIN(multi_file_grp_dset_kick)
+ {
+ TESTING_2("with intermediate calls to H5ESwait() (0 timeout)");
+
+ /* Loop over files */
+ for (i = 0; i < 5; i++) {
+ /* Set file name */
+ sprintf(file_name, ASYNC_API_TEST_FILE_PRINTF, i);
+
+ /* Create file asynchronously */
+ if ((file_id = H5Fcreate_async(file_name, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT, es_id)) <
+ 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+ if (i > max_printf_file)
+ max_printf_file = i;
+
+ /* Create the group asynchronously */
+ if ((grp_id = H5Gcreate_async(file_id, "grp", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT, es_id)) <
+ 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Create the dataset asynchronously */
+ if ((dset_id = H5Dcreate_async(grp_id, "dset", H5T_NATIVE_INT, space_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Update wbuf */
+ for (j = 0; j < 6; j++)
+ for (k = 0; k < 10; k++)
+ wbuf[i][j][k] += 5 * 6 * 10;
+
+ /* Write the dataset asynchronously */
+ if (H5Dwrite_async(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf[i], es_id) <
+ 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Close the dataset asynchronously */
+ if (H5Dclose_async(dset_id, es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Close the group asynchronously */
+ if (H5Gclose_async(grp_id, es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Close the file asynchronously */
+ if (H5Fclose_async(file_id, es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Kick the event stack to make progress */
+ if (H5ESwait(es_id, 0, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+ if (op_failed)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+ } /* end for */
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+ if (op_failed)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Loop over files */
+ for (i = 0; i < 5; i++) {
+ /* Set file name */
+ sprintf(file_name, ASYNC_API_TEST_FILE_PRINTF, i);
+
+ /* Open the file asynchronously */
+ if ((file_id = H5Fopen_async(file_name, H5F_ACC_RDONLY, H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Open the group asynchronously */
+ if ((grp_id = H5Gopen_async(file_id, "grp", H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Open the dataset asynchronously */
+ if ((dset_id = H5Dopen_async(grp_id, "dset", H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Read the dataset asynchronously */
+ if (H5Dread_async(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf[i], es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Close the dataset asynchronously */
+ if (H5Dclose_async(dset_id, es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Close the group asynchronously */
+ if (H5Gclose_async(grp_id, es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Close the file asynchronously */
+ if (H5Fclose_async(file_id, es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Kick the event stack to make progress */
+ if (H5ESwait(es_id, 0, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+ if (op_failed)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+ } /* end for */
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+ if (op_failed)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Verify the read data */
+ for (i = 0; i < 5; i++)
+ for (j = 0; j < 6; j++)
+ for (k = 0; k < 10; k++)
+ if (wbuf[i][j][k] != rbuf[i][j][k]) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ PART_ERROR(multi_file_grp_dset_kick);
+ } /* end if */
+
+ PASSED();
+ }
+ PART_END(multi_file_grp_dset_kick);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5ESclose(es_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ H5Gclose(grp_id);
+ H5Dclose(dset_id);
+ H5Fclose(file_id);
+ H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed);
+ H5ESclose(es_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+} /* end test_multi_file_grp_dset_io() */
+
+/*
+ * Create file and dataset, write to dataset
+ */
+static int
+test_set_extent(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id[6] = {H5I_INVALID_HID, H5I_INVALID_HID, H5I_INVALID_HID,
+ H5I_INVALID_HID, H5I_INVALID_HID, H5I_INVALID_HID};
+ hid_t fspace_out[6] = {H5I_INVALID_HID, H5I_INVALID_HID, H5I_INVALID_HID,
+ H5I_INVALID_HID, H5I_INVALID_HID, H5I_INVALID_HID};
+ hid_t mspace_id = H5I_INVALID_HID;
+ hid_t dcpl_id = H5I_INVALID_HID;
+ hid_t es_id = H5I_INVALID_HID;
+ hsize_t dims[2] = {1, 10};
+ hsize_t mdims[2] = {7, 10};
+ hsize_t cdims[2] = {2, 3};
+ hsize_t start[2] = {0, 0};
+ hsize_t count[2] = {1, 10};
+ size_t num_in_progress;
+ hbool_t op_failed;
+ htri_t tri_ret;
+ int wbuf[6][10];
+ int rbuf[6][10];
+ int i, j;
+
+ TESTING("H5Dset_extent() and H5Dget_space()");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, dataset, dataset more, or flush aren't supported with "
+ "this connector\n");
+ return 0;
+ }
+
+ /* Create file dataspace */
+ if ((fspace_id[0] = H5Screate_simple(2, dims, mdims)) < 0)
+ TEST_ERROR;
+
+ /* Create memory dataspace */
+ if ((mspace_id = H5Screate_simple(1, &dims[1], NULL)) < 0)
+ TEST_ERROR;
+
+ /* Create DCPL */
+ if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR;
+
+ /* Set chunking */
+ if (H5Pset_chunk(dcpl_id, 2, cdims) < 0)
+ TEST_ERROR;
+
+ /* Initialize wbuf */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++)
+ wbuf[i][j] = 10 * i + j;
+
+ /* Create event stack */
+ if ((es_id = H5EScreate()) < 0)
+ TEST_ERROR;
+
+ /* Create file asynchronously */
+ if ((file_id = H5Fcreate_async(ASYNC_API_TEST_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Create the dataset asynchronously */
+ if ((dset_id = H5Dcreate_async(file_id, "dset", H5T_NATIVE_INT, fspace_id[0], H5P_DEFAULT, dcpl_id,
+ H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Extend the first dataset from 1 to 6, 1 at a time */
+ for (i = 0; i < 6; i++) {
+ /* No need to extend on the first iteration */
+ if (i) {
+ /* Copy dataspace */
+ if ((fspace_id[i] = H5Scopy(fspace_id[i - 1])) < 0)
+ TEST_ERROR;
+
+ /* Extend datapace */
+ dims[0] = (hsize_t)(i + 1);
+ if (H5Sset_extent_simple(fspace_id[i], 2, dims, mdims) < 0)
+ TEST_ERROR;
+
+ /* Extend dataset asynchronously */
+ if (H5Dset_extent_async(dset_id, dims, es_id) < 0)
+ TEST_ERROR;
+
+ /* Select hyperslab in file space to match new region */
+ start[0] = (hsize_t)i;
+ if (H5Sselect_hyperslab(fspace_id[i], H5S_SELECT_SET, start, NULL, count, NULL) < 0)
+ TEST_ERROR;
+ } /* end if */
+
+ /* Get dataset dataspace */
+ if ((fspace_out[i] = H5Dget_space_async(dset_id, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Write the dataset slice asynchronously */
+ if (H5Dwrite_async(dset_id, H5T_NATIVE_INT, mspace_id, fspace_id[i], H5P_DEFAULT, wbuf[i], es_id) < 0)
+ TEST_ERROR;
+ } /* end for */
+
+ /* Flush the dataset asynchronously. This will effectively work as a
+ * barrier, guaranteeing the read takes place after the write. */
+ if (H5Oflush_async(dset_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Read the entire dataset asynchronously */
+ if (H5Dread_async(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Verify extents are correct. We do not need to wait because of the
+ * "future id" capability. */
+ for (i = 0; i < 6; i++) {
+ if ((tri_ret = H5Sextent_equal(fspace_id[i], fspace_out[i])) < 0)
+ TEST_ERROR;
+ if (!tri_ret)
+ FAIL_PUTS_ERROR(" dataspaces are not equal\n");
+ if (i && H5Sclose(fspace_id[i]) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_out[i]) < 0)
+ TEST_ERROR;
+ } /* end for */
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Verify the read data */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++)
+ if (wbuf[i][j] != rbuf[i][j])
+ FAIL_PUTS_ERROR(" data verification failed\n");
+
+ /*
+ * Now try extending the dataset, closing it, reopening it, and getting the
+ * space.
+ */
+ /* Extend datapace */
+ dims[0] = (hsize_t)7;
+ if (H5Sset_extent_simple(fspace_id[0], 2, dims, mdims) < 0)
+ TEST_ERROR;
+
+ /* Extend dataset asynchronously */
+ if (H5Dset_extent_async(dset_id, dims, es_id) < 0)
+ TEST_ERROR;
+
+ /* Close dataset asynchronously */
+ if (H5Dclose_async(dset_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Open dataset asynchronously */
+ if ((dset_id = H5Dopen_async(file_id, "dset", H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Get dataset dataspace asynchronously */
+ if ((fspace_out[0] = H5Dget_space_async(dset_id, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Verify the extents match */
+ if ((tri_ret = H5Sextent_equal(fspace_id[0], fspace_out[0])) < 0)
+ TEST_ERROR;
+ if (!tri_ret)
+ FAIL_PUTS_ERROR(" dataspaces are not equal\n");
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Close */
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(mspace_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id[0]) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_out[0]) < 0)
+ TEST_ERROR;
+ if (H5Pclose(dcpl_id) < 0)
+ TEST_ERROR;
+ if (H5ESclose(es_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(mspace_id);
+ for (i = 0; i < 6; i++) {
+ H5Sclose(fspace_id[i]);
+ H5Sclose(fspace_out[i]);
+ } /* end for */
+ H5Pclose(dcpl_id);
+ H5Dclose(dset_id);
+ H5Fclose(file_id);
+ H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed);
+ H5ESclose(es_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+} /* end test_set_extent() */
+
+/*
+ * Test H5Aexists()
+ */
+static int
+test_attribute_exists(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ hid_t es_id = H5I_INVALID_HID;
+ hsize_t dims[2] = {6, 10};
+ hbool_t exists1;
+ hbool_t exists2;
+ size_t num_in_progress;
+ hbool_t op_failed;
+
+ TESTING("H5Aexists()");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, dataset, dataset more, attribute, or flush aren't "
+ "supported with this connector\n");
+ return 0;
+ }
+
+ /* Create dataspace */
+ if ((space_id = H5Screate_simple(2, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ /* Create event stack */
+ if ((es_id = H5EScreate()) < 0)
+ TEST_ERROR;
+
+ /* Open file asynchronously */
+ if ((file_id = H5Fopen_async(ASYNC_API_TEST_FILE, H5F_ACC_RDWR, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Create the dataset asynchronously */
+ if ((dset_id = H5Dcreate_async(file_id, "attr_exists_dset", H5T_NATIVE_INT, space_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Check if the attribute exists asynchronously */
+ if (H5Aexists_async(dset_id, "attr", &exists1, es_id) < 0)
+ TEST_ERROR;
+
+ /* Flush the dataset asynchronously. This will effectively work as a
+ * barrier, guaranteeing the create takes place after the existence check
+ */
+ if (H5Oflush_async(dset_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Create the attribute asynchronously */
+ if ((attr_id =
+ H5Acreate_async(dset_id, "attr", H5T_NATIVE_INT, space_id, H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Flush the dataset asynchronously. This will effectively work as a
+ * barrier, guaranteeing the existence check takes place after the create.
+ */
+ if (H5Oflush_async(dset_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Check if the attribute exists asynchronously */
+ if (H5Aexists_async(dset_id, "attr", &exists2, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Check if H5Aexists returned the correct values */
+ if (exists1)
+ FAIL_PUTS_ERROR(" H5Aexists returned TRUE for an attribute that should not exist");
+ if (!exists2)
+ FAIL_PUTS_ERROR(" H5Aexists returned FALSE for an attribute that should exist");
+
+ /* Close */
+ if (H5Aclose_async(attr_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose_async(dset_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose_async(file_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (H5ESclose(es_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ H5Aclose(attr_id);
+ H5Dclose(dset_id);
+ H5Fclose(file_id);
+ H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed);
+ H5ESclose(es_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+} /* end test_attribute_io() */
+
+/*
+ * Create file, dataset, and attribute, write to attribute
+ */
+static int
+test_attribute_io(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ hid_t es_id = H5I_INVALID_HID;
+ hsize_t dims[2] = {6, 10};
+ size_t num_in_progress;
+ hbool_t op_failed;
+ int wbuf[6][10];
+ int rbuf[6][10];
+ int i, j;
+
+ TESTING("attribute I/O");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, dataset, dataset more, attribute, or flush aren't "
+ "supported with this connector\n");
+ return 0;
+ }
+
+ /* Create dataspace */
+ if ((space_id = H5Screate_simple(2, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ /* Create event stack */
+ if ((es_id = H5EScreate()) < 0)
+ TEST_ERROR;
+
+ /* Open file asynchronously */
+ if ((file_id = H5Fopen_async(ASYNC_API_TEST_FILE, H5F_ACC_RDWR, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Create the dataset asynchronously */
+ if ((dset_id = H5Dcreate_async(file_id, "attr_dset", H5T_NATIVE_INT, space_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Create the attribute asynchronously */
+ if ((attr_id =
+ H5Acreate_async(dset_id, "attr", H5T_NATIVE_INT, space_id, H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Initialize wbuf */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++)
+ wbuf[i][j] = 10 * i + j;
+
+ /* Write the attribute asynchronously */
+ if (H5Awrite_async(attr_id, H5T_NATIVE_INT, wbuf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Flush the dataset asynchronously. This will effectively work as a
+ * barrier, guaranteeing the read takes place after the write. */
+ if (H5Oflush_async(dset_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Read the attribute asynchronously */
+ if (H5Aread_async(attr_id, H5T_NATIVE_INT, rbuf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Verify the read data */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++)
+ if (wbuf[i][j] != rbuf[i][j])
+ FAIL_PUTS_ERROR(" data verification failed\n");
+
+ /* Close the attribute asynchronously */
+ if (H5Aclose_async(attr_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Open the attribute asynchronously */
+ if ((attr_id = H5Aopen_async(dset_id, "attr", H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Read the attribute asynchronously */
+ if (H5Aread_async(attr_id, H5T_NATIVE_INT, rbuf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Verify the read data */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++)
+ if (wbuf[i][j] != rbuf[i][j])
+ FAIL_PUTS_ERROR(" data verification failed\n");
+
+ /* Close out of order to see if it trips things up */
+ if (H5Dclose_async(dset_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Aclose_async(attr_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose_async(file_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (H5ESclose(es_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ H5Aclose(attr_id);
+ H5Dclose(dset_id);
+ H5Fclose(file_id);
+ H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed);
+ H5ESclose(es_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+} /* end test_attribute_io() */
+
+/*
+ * Create file, dataset, and attribute, write to attribute with type conversion
+ */
+static int
+test_attribute_io_tconv(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ hid_t es_id = H5I_INVALID_HID;
+ hsize_t dims[2] = {6, 10};
+ size_t num_in_progress;
+ hbool_t op_failed;
+ int wbuf[6][10];
+ int rbuf[6][10];
+ int i, j;
+
+ TESTING("attribute I/O with type conversion");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, attribute, or flush aren't supported with this connector\n");
+ return 0;
+ }
+
+ /* Create dataspace */
+ if ((space_id = H5Screate_simple(2, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ /* Create event stack */
+ if ((es_id = H5EScreate()) < 0)
+ TEST_ERROR;
+
+ /* Open file asynchronously */
+ if ((file_id = H5Fopen_async(ASYNC_API_TEST_FILE, H5F_ACC_RDWR, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Create the attribute asynchronously by name */
+ if ((attr_id = H5Acreate_by_name_async(file_id, "attr_dset", "attr_tconv", H5T_STD_U16BE, space_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Initialize wbuf */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++)
+ wbuf[i][j] = 10 * i + j;
+
+ /* Write the attribute asynchronously */
+ if (H5Awrite_async(attr_id, H5T_NATIVE_INT, wbuf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Flush the dataset asynchronously. This will effectively work as a
+ * barrier, guaranteeing the read takes place after the write. */
+ if (H5Fflush_async(file_id, H5F_SCOPE_LOCAL, es_id) < 0)
+ TEST_ERROR;
+
+ /* Read the attribute asynchronously */
+ if (H5Aread_async(attr_id, H5T_NATIVE_INT, rbuf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Verify the read data */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++)
+ if (wbuf[i][j] != rbuf[i][j])
+ FAIL_PUTS_ERROR(" data verification failed\n");
+
+ /* Close the attribute asynchronously */
+ if (H5Aclose_async(attr_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Open the attribute asynchronously */
+ if ((attr_id =
+ H5Aopen_by_name_async(file_id, "attr_dset", "attr_tconv", H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Read the attribute asynchronously */
+ if (H5Aread_async(attr_id, H5T_NATIVE_INT, rbuf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Verify the read data */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++)
+ if (wbuf[i][j] != rbuf[i][j])
+ FAIL_PUTS_ERROR(" data verification failed\n");
+
+ /* Close */
+ if (H5Aclose_async(attr_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose_async(file_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (H5ESclose(es_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ H5Aclose(attr_id);
+ H5Fclose(file_id);
+ H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed);
+ H5ESclose(es_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+} /* end test_attribute_io_tconv() */
+
+/*
+ * Create file, dataset, and attribute, write to attribute with compound type
+ * conversion
+ */
+typedef struct tattr_cmpd_t {
+ int a;
+ int b;
+} tattr_cmpd_t;
+
+static int
+test_attribute_io_compound(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ hid_t mtype_id = H5I_INVALID_HID;
+ hid_t ftype_id = H5I_INVALID_HID;
+ hid_t mtypea_id = H5I_INVALID_HID;
+ hid_t mtypeb_id = H5I_INVALID_HID;
+ hid_t es_id = H5I_INVALID_HID;
+ hsize_t dims[2] = {6, 10};
+ size_t num_in_progress;
+ hbool_t op_failed;
+ tattr_cmpd_t wbuf[6][10];
+ tattr_cmpd_t rbuf[6][10];
+ tattr_cmpd_t fbuf[6][10];
+ int i, j;
+
+ TESTING("attribute I/O with compound type conversion");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, attribute, or flush aren't supported with this connector\n");
+ return 0;
+ }
+
+ /* Create datatype */
+ if ((mtype_id = H5Tcreate(H5T_COMPOUND, sizeof(tattr_cmpd_t))) < 0)
+ TEST_ERROR;
+ if (H5Tinsert(mtype_id, "a_name", HOFFSET(tattr_cmpd_t, a), H5T_NATIVE_INT) < 0)
+ TEST_ERROR;
+ if (H5Tinsert(mtype_id, "b_name", HOFFSET(tattr_cmpd_t, b), H5T_NATIVE_INT) < 0)
+ TEST_ERROR;
+
+ if ((mtypea_id = H5Tcreate(H5T_COMPOUND, sizeof(tattr_cmpd_t))) < 0)
+ TEST_ERROR;
+ if (H5Tinsert(mtypea_id, "a_name", HOFFSET(tattr_cmpd_t, a), H5T_NATIVE_INT) < 0)
+ TEST_ERROR;
+
+ if ((mtypeb_id = H5Tcreate(H5T_COMPOUND, sizeof(tattr_cmpd_t))) < 0)
+ TEST_ERROR;
+ if (H5Tinsert(mtypeb_id, "b_name", HOFFSET(tattr_cmpd_t, b), H5T_NATIVE_INT) < 0)
+ TEST_ERROR;
+
+ if ((ftype_id = H5Tcreate(H5T_COMPOUND, 2 + 8)) < 0)
+ TEST_ERROR;
+ if (H5Tinsert(ftype_id, "a_name", 0, H5T_STD_U16BE) < 0)
+ TEST_ERROR;
+ if (H5Tinsert(ftype_id, "b_name", 2, H5T_STD_I64LE) < 0)
+ TEST_ERROR;
+
+ /* Create dataspace */
+ if ((space_id = H5Screate_simple(2, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ /* Create event stack */
+ if ((es_id = H5EScreate()) < 0)
+ TEST_ERROR;
+
+ /* Open file asynchronously */
+ if ((file_id = H5Fopen_async(ASYNC_API_TEST_FILE, H5F_ACC_RDWR, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Create the attribute asynchronously by name */
+ if ((attr_id = H5Acreate_by_name_async(file_id, "attr_dset", "attr_cmpd", ftype_id, space_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Initialize wbuf */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++) {
+ wbuf[i][j].a = 2 * (10 * i + j);
+ wbuf[i][j].b = 2 * (10 * i + j) + 1;
+ } /* end for */
+
+ /* Write the attribute asynchronously */
+ if (H5Awrite_async(attr_id, mtype_id, wbuf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Update fbuf */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++) {
+ fbuf[i][j].a = wbuf[i][j].a;
+ fbuf[i][j].b = wbuf[i][j].b;
+ } /* end for */
+
+ /* Flush the dataset asynchronously. This will effectively work as a
+ * barrier, guaranteeing the read takes place after the write. */
+ if (H5Fflush_async(file_id, H5F_SCOPE_LOCAL, es_id) < 0)
+ TEST_ERROR;
+
+ /* Read the attribute asynchronously */
+ if (H5Aread_async(attr_id, mtype_id, rbuf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Verify the read data */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++) {
+ if (rbuf[i][j].a != fbuf[i][j].a)
+ FAIL_PUTS_ERROR(" data verification failed\n");
+ if (rbuf[i][j].b != fbuf[i][j].b)
+ FAIL_PUTS_ERROR(" data verification failed\n");
+ } /* end for */
+
+ /* Clear the read buffer */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++) {
+ rbuf[i][j].a = -2;
+ rbuf[i][j].b = -2;
+ } /* end for */
+
+ /* Read the attribute asynchronously (element a only) */
+ if (H5Aread_async(attr_id, mtypea_id, rbuf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Verify the read data */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++) {
+ if (rbuf[i][j].a != fbuf[i][j].a)
+ FAIL_PUTS_ERROR(" data verification failed\n");
+ if (rbuf[i][j].b != -2)
+ FAIL_PUTS_ERROR(" data verification failed\n");
+ } /* end for */
+
+ /* Clear the read buffer */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++) {
+ rbuf[i][j].a = -2;
+ rbuf[i][j].b = -2;
+ } /* end for */
+
+ /* Read the attribute asynchronously (element b only) */
+ if (H5Aread_async(attr_id, mtypeb_id, rbuf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Verify the read data */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++) {
+ if (rbuf[i][j].a != -2)
+ FAIL_PUTS_ERROR(" data verification failed\n");
+ if (rbuf[i][j].b != fbuf[i][j].b)
+ FAIL_PUTS_ERROR(" data verification failed\n");
+ } /* end for */
+
+ /* Update wbuf */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++) {
+ wbuf[i][j].a += 2 * 6 * 10;
+ wbuf[i][j].b += 2 * 6 * 10;
+ } /* end for */
+
+ /* Write the attribute asynchronously (element a only) */
+ if (H5Awrite_async(attr_id, mtypea_id, wbuf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Update fbuf */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++)
+ fbuf[i][j].a = wbuf[i][j].a;
+
+ /* Flush the dataset asynchronously. This will effectively work as a
+ * barrier, guaranteeing the read takes place after the write. */
+ if (H5Fflush_async(file_id, H5F_SCOPE_LOCAL, es_id) < 0)
+ TEST_ERROR;
+
+ /* Clear the read buffer */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++) {
+ rbuf[i][j].a = -2;
+ rbuf[i][j].b = -2;
+ } /* end for */
+
+ /* Read the attribute asynchronously */
+ if (H5Aread_async(attr_id, mtype_id, rbuf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Verify the read data */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++) {
+ if (rbuf[i][j].a != fbuf[i][j].a)
+ FAIL_PUTS_ERROR(" data verification failed\n");
+ if (rbuf[i][j].b != fbuf[i][j].b)
+ FAIL_PUTS_ERROR(" data verification failed\n");
+ } /* end for */
+
+ /* Update wbuf */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++) {
+ wbuf[i][j].a += 2 * 6 * 10;
+ wbuf[i][j].b += 2 * 6 * 10;
+ } /* end for */
+
+ /* Write the attribute asynchronously (element b only) */
+ if (H5Awrite_async(attr_id, mtypeb_id, wbuf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Update fbuf */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++)
+ fbuf[i][j].b = wbuf[i][j].b;
+
+ /* Flush the dataset asynchronously. This will effectively work as a
+ * barrier, guaranteeing the read takes place after the write. */
+ if (H5Fflush_async(file_id, H5F_SCOPE_LOCAL, es_id) < 0)
+ TEST_ERROR;
+
+ /* Clear the read buffer */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++) {
+ rbuf[i][j].a = -2;
+ rbuf[i][j].b = -2;
+ } /* end for */
+
+ /* Read the attribute asynchronously */
+ if (H5Aread_async(attr_id, mtype_id, rbuf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Verify the read data */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 10; j++) {
+ if (rbuf[i][j].a != fbuf[i][j].a)
+ FAIL_PUTS_ERROR(" data verification failed\n");
+ if (rbuf[i][j].b != fbuf[i][j].b)
+ FAIL_PUTS_ERROR(" data verification failed\n");
+ } /* end for */
+
+ /* Close */
+ if (H5Aclose_async(attr_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose_async(file_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(mtype_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(ftype_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(mtypea_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(mtypeb_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (H5ESclose(es_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ H5Tclose(mtype_id);
+ H5Tclose(ftype_id);
+ H5Tclose(mtypea_id);
+ H5Tclose(mtypeb_id);
+ H5Aclose(attr_id);
+ H5Fclose(file_id);
+ H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed);
+ H5ESclose(es_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+} /* end test_attribute_io_compound() */
+
+/*
+ * Test group interfaces
+ */
+static int
+test_group(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t parent_group_id = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t subgroup_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+ hid_t es_id = H5I_INVALID_HID;
+ H5G_info_t info1;
+ H5G_info_t info2;
+ H5G_info_t info3;
+ size_t num_in_progress;
+ hbool_t op_failed;
+
+ TESTING("group operations");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_MORE) || !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, group more, or creation order aren't supported "
+ "with this connector\n");
+ return 0;
+ }
+
+ /* Create GCPL */
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0)
+ TEST_ERROR;
+
+ /* Track creation order */
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0)
+ TEST_ERROR;
+
+ /* Create event stack */
+ if ((es_id = H5EScreate()) < 0)
+ TEST_ERROR;
+
+ /* Open file asynchronously */
+ if ((file_id = H5Fopen_async(ASYNC_API_TEST_FILE, H5F_ACC_RDWR, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Create the parent group asynchronously */
+ if ((parent_group_id =
+ H5Gcreate_async(file_id, "group_parent", H5P_DEFAULT, gcpl_id, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Create 3 subgroups asynchronously, the first with no sub-subgroups, the
+ * second with 1, and the third with 2 */
+ if ((group_id =
+ H5Gcreate_async(parent_group_id, "group1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+ if (H5Gclose_async(group_id, es_id) < 0)
+ TEST_ERROR;
+
+ if ((group_id =
+ H5Gcreate_async(parent_group_id, "group2", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+ if ((subgroup_id = H5Gcreate_async(group_id, "subgroup1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT, es_id)) <
+ 0)
+ TEST_ERROR;
+ if (H5Gclose_async(subgroup_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose_async(group_id, es_id) < 0)
+ TEST_ERROR;
+
+ if ((group_id =
+ H5Gcreate_async(parent_group_id, "group3", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+ if ((subgroup_id = H5Gcreate_async(group_id, "subgroup1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT, es_id)) <
+ 0)
+ TEST_ERROR;
+ if (H5Gclose_async(subgroup_id, es_id) < 0)
+ TEST_ERROR;
+ if ((subgroup_id = H5Gcreate_async(group_id, "subgroup2", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT, es_id)) <
+ 0)
+ TEST_ERROR;
+ if (H5Gclose_async(subgroup_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose_async(group_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Flush the file asynchronously. This will effectively work as a barrier,
+ * guaranteeing the read takes place after the write. */
+ if (H5Fflush_async(file_id, H5F_SCOPE_LOCAL, es_id) < 0)
+ TEST_ERROR;
+
+ /* Test H5Gget_info_async */
+ /* Open group1 asynchronously */
+ if ((group_id = H5Gopen_async(parent_group_id, "group1", H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Get info */
+ if (H5Gget_info_async(group_id, &info1, es_id) < 0)
+ TEST_ERROR;
+
+ /* Test H5Gget_info_by_idx_async */
+ if (H5Gget_info_by_idx_async(parent_group_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 1, &info2,
+ H5P_DEFAULT, es_id) < 0)
+ TEST_ERROR;
+
+ /* Test H5Gget_info_by_name_async */
+ if (H5Gget_info_by_name_async(parent_group_id, "group3", &info3, H5P_DEFAULT, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Verify group infos */
+ if (info1.nlinks != 0)
+ FAIL_PUTS_ERROR(" incorrect number of links");
+ if (info2.nlinks != 1)
+ FAIL_PUTS_ERROR(" incorrect number of links");
+ if (info3.nlinks != 2)
+ FAIL_PUTS_ERROR(" incorrect number of links");
+
+ /* Close */
+ if (H5Gclose_async(group_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose_async(parent_group_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose_async(file_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (H5ESclose(es_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ H5Gclose(group_id);
+ H5Gclose(parent_group_id);
+ H5Fclose(file_id);
+ H5Pclose(gcpl_id);
+ H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed);
+ H5ESclose(es_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+} /* end test_group() */
+
+/*
+ * Test link interfaces
+ */
+static int
+test_link(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t parent_group_id = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+ hid_t es_id = H5I_INVALID_HID;
+ hbool_t existsh1;
+ hbool_t existsh2;
+ hbool_t existsh3;
+ hbool_t existss1;
+ hbool_t existss2;
+ hbool_t existss3;
+ size_t num_in_progress;
+ hbool_t op_failed;
+
+ TESTING("link operations");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_HARD_LINKS) || !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, link, hard link, soft link, flush, or creation order "
+ "aren't supported with this connector\n");
+ return 0;
+ }
+
+ /* Create GCPL */
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0)
+ TEST_ERROR;
+
+ /* Track creation order */
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0)
+ TEST_ERROR;
+
+ /* Create event stack */
+ if ((es_id = H5EScreate()) < 0)
+ TEST_ERROR;
+
+ /* Open file asynchronously */
+ if ((file_id = H5Fopen_async(ASYNC_API_TEST_FILE, H5F_ACC_RDWR, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Create the parent group asynchronously */
+ if ((parent_group_id =
+ H5Gcreate_async(file_id, "link_parent", H5P_DEFAULT, gcpl_id, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Create subgroup asynchronously. */
+ if ((group_id = H5Gcreate_async(parent_group_id, "group", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT, es_id)) <
+ 0)
+ TEST_ERROR;
+ if (H5Gclose_async(group_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Flush the parent group asynchronously. This will effectively work as a
+ * barrier, guaranteeing the link to the subgroup is visible to later tasks.
+ */
+ if (H5Oflush_async(parent_group_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Create hard link asynchronously */
+ if (H5Lcreate_hard_async(parent_group_id, "group", parent_group_id, "hard_link", H5P_DEFAULT, H5P_DEFAULT,
+ es_id) < 0)
+ TEST_ERROR;
+
+ /* Flush the parent group asynchronously. This will effectively work as a
+ * barrier, guaranteeing the soft link create takes place after the hard
+ * link create. */
+ if (H5Oflush_async(parent_group_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Create soft link asynchronously */
+ if (H5Lcreate_soft_async("/link_parent/group", parent_group_id, "soft_link", H5P_DEFAULT, H5P_DEFAULT,
+ es_id) < 0)
+ TEST_ERROR;
+
+ /* Flush the parent group asynchronously. This will effectively work as a
+ * barrier, guaranteeing the read takes place after the writes. */
+ if (H5Oflush_async(parent_group_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Check if hard link exists */
+ if (H5Lexists_async(parent_group_id, "hard_link", &existsh1, H5P_DEFAULT, es_id) < 0)
+ TEST_ERROR;
+
+ /* Check if soft link exists */
+ if (H5Lexists_async(parent_group_id, "soft_link", &existss1, H5P_DEFAULT, es_id) < 0)
+ TEST_ERROR;
+
+ /* Flush the parent group asynchronously. This will effectively work as a
+ * barrier, guaranteeing the delete takes place after the reads. */
+ if (H5Oflush_async(parent_group_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Delete soft link by index */
+ if (H5Ldelete_by_idx_async(parent_group_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 2, H5P_DEFAULT, es_id) <
+ 0)
+ TEST_ERROR;
+
+ /* Flush the parent group asynchronously. This will effectively work as a
+ * barrier, guaranteeing the read takes place after the delete. */
+ if (H5Oflush_async(parent_group_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Check if hard link exists */
+ if (H5Lexists_async(parent_group_id, "hard_link", &existsh2, H5P_DEFAULT, es_id) < 0)
+ TEST_ERROR;
+
+ /* Check if soft link exists */
+ if (H5Lexists_async(parent_group_id, "soft_link", &existss2, H5P_DEFAULT, es_id) < 0)
+ TEST_ERROR;
+
+ /* Flush the parent group asynchronously. This will effectively work as a
+ * barrier, guaranteeing the delete takes place after the reads. */
+ if (H5Oflush_async(parent_group_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Delete hard link */
+ if (H5Ldelete_async(parent_group_id, "hard_link", H5P_DEFAULT, es_id) < 0)
+ TEST_ERROR;
+
+ /* Flush the parent group asynchronously. This will effectively work as a
+ * barrier, guaranteeing the read takes place after the delete. */
+ if (H5Oflush_async(parent_group_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Check if hard link exists */
+ if (H5Lexists_async(parent_group_id, "hard_link", &existsh3, H5P_DEFAULT, es_id) < 0)
+ TEST_ERROR;
+
+ /* Check if soft link exists */
+ if (H5Lexists_async(parent_group_id, "soft_link", &existss3, H5P_DEFAULT, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Check if existence returns were correct */
+ if (!existsh1)
+ FAIL_PUTS_ERROR(" link exists returned FALSE for link that should exist");
+ if (!existss1)
+ FAIL_PUTS_ERROR(" link exists returned FALSE for link that should exist");
+ if (!existsh2)
+ FAIL_PUTS_ERROR(" link exists returned FALSE for link that should exist");
+ if (existss2)
+ FAIL_PUTS_ERROR(" link exists returned TRUE for link that should not exist");
+ if (existsh3)
+ FAIL_PUTS_ERROR(" link exists returned TRUE for link that should not exist");
+ if (existsh3)
+ FAIL_PUTS_ERROR(" link exists returned TRUE for link that should not exist");
+
+ /* Close */
+ if (H5Gclose_async(parent_group_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose_async(file_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (H5ESclose(es_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id);
+ H5Gclose(parent_group_id);
+ H5Fclose(file_id);
+ H5Pclose(gcpl_id);
+ H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed);
+ H5ESclose(es_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+} /* end test_link() */
+
+/*
+ * Test H5Ocopy() and H5Orefresh()
+ */
+static int
+test_ocopy_orefresh(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t parent_group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ hid_t es_id = H5I_INVALID_HID;
+ hsize_t dims[2] = {6, 10};
+ size_t num_in_progress;
+ hbool_t op_failed;
+
+ TESTING("H5Ocopy() and H5Orefresh()");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, object more, flush, or refresh aren't "
+ "supported with this connector\n");
+ return 0;
+ }
+
+ /* Create dataspace */
+ if ((space_id = H5Screate_simple(2, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ /* Create event stack */
+ if ((es_id = H5EScreate()) < 0)
+ TEST_ERROR;
+
+ /* Open file asynchronously */
+ if ((file_id = H5Fopen_async(ASYNC_API_TEST_FILE, H5F_ACC_RDWR, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Create the parent group asynchronously */
+ if ((parent_group_id =
+ H5Gcreate_async(file_id, "ocopy_parent", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Create dataset asynchronously. */
+ if ((dset_id = H5Dcreate_async(parent_group_id, "dset", H5T_NATIVE_INT, space_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+ if (H5Dclose_async(dset_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Flush the parent group asynchronously. This will effectively work as a
+ * barrier, guaranteeing the copy takes place after dataset create. */
+ if (H5Oflush_async(parent_group_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Copy dataset */
+ if (H5Ocopy_async(parent_group_id, "dset", parent_group_id, "copied_dset", H5P_DEFAULT, H5P_DEFAULT,
+ es_id) < 0)
+ TEST_ERROR;
+
+ /* Flush the parent group asynchronously. This will effectively work as a
+ * barrier, guaranteeing the dataset open takes place copy. */
+ if (H5Oflush_async(parent_group_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Open the copied dataset asynchronously */
+ if ((dset_id = H5Dopen_async(parent_group_id, "copied_dset", H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Refresh the copied dataset asynchronously */
+ if (H5Orefresh(dset_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Close */
+ if (H5Dclose_async(dset_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose_async(parent_group_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose_async(file_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (H5ESclose(es_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ H5Gclose(parent_group_id);
+ H5Fclose(file_id);
+ H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed);
+ H5ESclose(es_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+} /* end test_ocopy_orefresh() */
+
+/*
+ * Test H5Freopen()
+ */
+static int
+test_file_reopen(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t reopened_file_id = H5I_INVALID_HID;
+ hid_t es_id = H5I_INVALID_HID;
+ size_t num_in_progress;
+ hbool_t op_failed;
+
+ TESTING("H5Freopen()");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_MORE)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file or file more aren't supported with this connector\n");
+ return 0;
+ }
+
+ /* Create event stack */
+ if ((es_id = H5EScreate()) < 0)
+ TEST_ERROR;
+
+ /* Open file asynchronously */
+ if ((file_id = H5Fopen_async(ASYNC_API_TEST_FILE, H5F_ACC_RDWR, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Reopen file asynchronously */
+ if ((reopened_file_id = H5Freopen_async(file_id, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Close */
+ if (H5Fclose_async(reopened_file_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose_async(file_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (H5ESclose(es_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(reopened_file_id);
+ H5Fclose(file_id);
+ H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed);
+ H5ESclose(es_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+} /* end test_file_reopen() */
+
+/*
+ * Cleanup temporary test files
+ */
+static void
+cleanup_files(void)
+{
+ char file_name[64];
+ int i;
+
+ H5Fdelete(ASYNC_API_TEST_FILE, H5P_DEFAULT);
+ for (i = 0; i <= max_printf_file; i++) {
+ HDsnprintf(file_name, 64, ASYNC_API_TEST_FILE_PRINTF, i);
+ H5Fdelete(file_name, H5P_DEFAULT);
+ } /* end for */
+}
+
+int
+H5_api_async_test(void)
+{
+ size_t i;
+ int nerrors;
+
+ HDprintf("**********************************************\n");
+ HDprintf("* *\n");
+ HDprintf("* API Async Tests *\n");
+ HDprintf("* *\n");
+ HDprintf("**********************************************\n\n");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_ASYNC)) {
+ SKIPPED();
+ HDprintf(" Async APIs aren't supported with this connector\n");
+ return 0;
+ }
+
+ for (i = 0, nerrors = 0; i < ARRAY_LENGTH(async_tests); i++) {
+ nerrors += (*async_tests[i])() ? 1 : 0;
+ }
+
+ HDprintf("\n");
+
+ HDprintf("Cleaning up testing files\n");
+ cleanup_files();
+
+ return nerrors;
+}
+
+#else /* H5ESpublic_H */
+
+int
+H5_api_async_test(void)
+{
+ HDprintf("**********************************************\n");
+ HDprintf("* *\n");
+ HDprintf("* API Async Tests *\n");
+ HDprintf("* *\n");
+ HDprintf("**********************************************\n\n");
+
+ HDprintf("SKIPPED due to no async support in HDF5 library\n");
+
+ return 0;
+}
+
+#endif /* H5ESpublic_H */
diff --git a/test/API/H5_api_async_test.h b/test/API/H5_api_async_test.h
new file mode 100644
index 0000000..f6df48a
--- /dev/null
+++ b/test/API/H5_api_async_test.h
@@ -0,0 +1,29 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifndef H5_API_ASYNC_TEST_H
+#define H5_API_ASYNC_TEST_H
+
+#include "H5_api_test.h"
+
+int H5_api_async_test(void);
+
+/************************************************
+ * *
+ * API async test defines *
+ * *
+ ************************************************/
+
+#define ASYNC_API_TEST_FILE "H5_api_async_test.h5"
+#define ASYNC_API_TEST_FILE_PRINTF "H5_api_async_test_%d.h5"
+
+#endif
diff --git a/test/API/H5_api_attribute_test.c b/test/API/H5_api_attribute_test.c
new file mode 100644
index 0000000..7f767a7
--- /dev/null
+++ b/test/API/H5_api_attribute_test.c
@@ -0,0 +1,11027 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include "H5_api_attribute_test.h"
+
+/*
+ * TODO: Additional tests to be written:
+ *
+ * - Test for creating a large attribute.
+ * - Test for checking that object's max. attr. creation
+ * order value gets reset when all attributes are removed.
+ */
+
+static int test_create_attribute_on_root(void);
+static int test_create_attribute_on_dataset(void);
+static int test_create_attribute_on_datatype(void);
+static int test_create_attribute_with_null_space(void);
+static int test_create_attribute_with_scalar_space(void);
+static int test_create_attribute_with_space_in_name(void);
+static int test_create_attribute_invalid_params(void);
+static int test_open_attribute(void);
+static int test_open_attribute_invalid_params(void);
+static int test_write_attribute(void);
+static int test_write_attribute_invalid_params(void);
+static int test_read_attribute(void);
+static int test_read_attribute_invalid_params(void);
+static int test_read_empty_attribute(void);
+static int test_close_attribute_invalid_id(void);
+static int test_get_attribute_space_and_type(void);
+static int test_get_attribute_space_and_type_invalid_params(void);
+static int test_attribute_property_lists(void);
+static int test_get_attribute_name(void);
+static int test_get_attribute_name_invalid_params(void);
+static int test_get_attribute_storage_size(void);
+static int test_get_attribute_info(void);
+static int test_get_attribute_info_invalid_params(void);
+static int test_rename_attribute(void);
+static int test_rename_attribute_invalid_params(void);
+static int test_attribute_iterate_group(void);
+static int test_attribute_iterate_dataset(void);
+static int test_attribute_iterate_datatype(void);
+static int test_attribute_iterate_index_saving(void);
+static int test_attribute_iterate_invalid_params(void);
+static int test_attribute_iterate_0_attributes(void);
+static int test_delete_attribute(void);
+static int test_delete_attribute_invalid_params(void);
+static int test_attribute_exists(void);
+static int test_attribute_exists_invalid_params(void);
+static int test_attribute_many(void);
+static int test_attribute_duplicate_id(void);
+static int test_get_number_attributes(void);
+static int test_attr_shared_dtype(void);
+
+static herr_t attr_iter_callback1(hid_t location_id, const char *attr_name, const H5A_info_t *ainfo,
+ void *op_data);
+static herr_t attr_iter_callback2(hid_t location_id, const char *attr_name, const H5A_info_t *ainfo,
+ void *op_data);
+
+/*
+ * The array of attribute tests to be performed.
+ */
+static int (*attribute_tests[])(void) = {test_create_attribute_on_root,
+ test_create_attribute_on_dataset,
+ test_create_attribute_on_datatype,
+ test_create_attribute_with_null_space,
+ test_create_attribute_with_scalar_space,
+ test_create_attribute_with_space_in_name,
+ test_create_attribute_invalid_params,
+ test_open_attribute,
+ test_open_attribute_invalid_params,
+ test_write_attribute,
+ test_write_attribute_invalid_params,
+ test_read_attribute,
+ test_read_attribute_invalid_params,
+ test_read_empty_attribute,
+ test_close_attribute_invalid_id,
+ test_get_attribute_space_and_type,
+ test_get_attribute_space_and_type_invalid_params,
+ test_attribute_property_lists,
+ test_get_attribute_name,
+ test_get_attribute_name_invalid_params,
+ test_get_attribute_storage_size,
+ test_get_attribute_info,
+ test_get_attribute_info_invalid_params,
+ test_rename_attribute,
+ test_rename_attribute_invalid_params,
+ test_attribute_iterate_group,
+ test_attribute_iterate_dataset,
+ test_attribute_iterate_datatype,
+ test_attribute_iterate_index_saving,
+ test_attribute_iterate_invalid_params,
+ test_attribute_iterate_0_attributes,
+ test_delete_attribute,
+ test_delete_attribute_invalid_params,
+ test_attribute_exists,
+ test_attribute_exists_invalid_params,
+ test_attribute_duplicate_id,
+ test_attribute_many,
+ test_get_number_attributes,
+ test_attr_shared_dtype};
+
+/*
+ * A test to check that an attribute can be created on
+ * the root group.
+ */
+static int
+test_create_attribute_on_root(void)
+{
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID, attr_id2 = H5I_INVALID_HID;
+ hid_t attr_dtype1 = H5I_INVALID_HID, attr_dtype2 = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("attribute creation on the root group");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file or attribute aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((space_id = generate_random_dataspace(ATTRIBUTE_CREATE_ON_ROOT_SPACE_RANK, NULL, NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_dtype1 = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+ if ((attr_dtype2 = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Acreate2)
+ {
+ TESTING_2("H5Acreate on the root group");
+
+ if ((attr_id = H5Acreate2(file_id, ATTRIBUTE_CREATE_ON_ROOT_ATTR_NAME, attr_dtype1, space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s' using H5Acreate\n",
+ ATTRIBUTE_CREATE_ON_ROOT_ATTR_NAME);
+ PART_ERROR(H5Acreate2);
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(file_id, ATTRIBUTE_CREATE_ON_ROOT_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_CREATE_ON_ROOT_ATTR_NAME);
+ PART_ERROR(H5Acreate2);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' did not exist\n", ATTRIBUTE_CREATE_ON_ROOT_ATTR_NAME);
+ PART_ERROR(H5Acreate2);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Acreate2);
+
+ PART_BEGIN(H5Acreate_by_name)
+ {
+ TESTING_2("H5Acreate_by_name on the root group");
+
+ if ((attr_id2 = H5Acreate_by_name(file_id, "/", ATTRIBUTE_CREATE_ON_ROOT_ATTR_NAME2, attr_dtype2,
+ space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute on root group using H5Acreate_by_name\n");
+ PART_ERROR(H5Acreate_by_name);
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(file_id, ATTRIBUTE_CREATE_ON_ROOT_ATTR_NAME2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_CREATE_ON_ROOT_ATTR_NAME2);
+ PART_ERROR(H5Acreate_by_name);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' did not exist\n", ATTRIBUTE_CREATE_ON_ROOT_ATTR_NAME2);
+ PART_ERROR(H5Acreate_by_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Acreate_by_name);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype1) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype2) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id2) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ H5Tclose(attr_dtype1);
+ H5Tclose(attr_dtype2);
+ H5Aclose(attr_id);
+ H5Aclose(attr_id2);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that an attribute can be created on
+ * a dataset.
+ */
+static int
+test_create_attribute_on_dataset(void)
+{
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID, attr_id2 = H5I_INVALID_HID;
+ hid_t attr_dtype1 = H5I_INVALID_HID, attr_dtype2 = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t dset_space_id = H5I_INVALID_HID;
+ hid_t attr_space_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("attribute creation on a dataset");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, or attribute aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", ATTRIBUTE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_CREATE_ON_DATASET_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n", ATTRIBUTE_CREATE_ON_DATASET_GROUP_NAME);
+ goto error;
+ }
+
+ if ((dset_space_id =
+ generate_random_dataspace(ATTRIBUTE_CREATE_ON_DATASET_DSET_SPACE_RANK, NULL, NULL, FALSE)) < 0)
+ TEST_ERROR;
+ if ((attr_space_id =
+ generate_random_dataspace(ATTRIBUTE_CREATE_ON_DATASET_ATTR_SPACE_RANK, NULL, NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+ if ((attr_dtype1 = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+ if ((attr_dtype2 = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, ATTRIBUTE_CREATE_ON_DATASET_DSET_NAME, dset_dtype, dset_space_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Acreate_on_dataset)
+ {
+ TESTING_2("H5Acreate on a dataset");
+
+ if ((attr_id = H5Acreate2(dset_id, ATTRIBUTE_CREATE_ON_DATASET_ATTR_NAME, attr_dtype1,
+ attr_space_id, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ PART_ERROR(H5Acreate_on_dataset);
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(dset_id, ATTRIBUTE_CREATE_ON_DATASET_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_CREATE_ON_DATASET_ATTR_NAME);
+ PART_ERROR(H5Acreate_on_dataset);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' did not exist\n", ATTRIBUTE_CREATE_ON_DATASET_ATTR_NAME);
+ PART_ERROR(H5Acreate_on_dataset);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Acreate_on_dataset);
+
+ PART_BEGIN(H5Acreate_by_name_on_dataset)
+ {
+ TESTING_2("H5Acreate_by_name on a dataset");
+
+ if ((attr_id2 = H5Acreate_by_name(group_id, ATTRIBUTE_CREATE_ON_DATASET_DSET_NAME,
+ ATTRIBUTE_CREATE_ON_DATASET_ATTR_NAME2, attr_dtype2,
+ attr_space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute on dataset by name\n");
+ PART_ERROR(H5Acreate_by_name_on_dataset);
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(dset_id, ATTRIBUTE_CREATE_ON_DATASET_ATTR_NAME2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_CREATE_ON_DATASET_ATTR_NAME2);
+ PART_ERROR(H5Acreate_by_name_on_dataset);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' did not exist\n", ATTRIBUTE_CREATE_ON_DATASET_ATTR_NAME2);
+ PART_ERROR(H5Acreate_by_name_on_dataset);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Acreate_by_name_on_dataset);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(dset_space_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(attr_space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype1) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype2) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id2) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(dset_space_id);
+ H5Sclose(attr_space_id);
+ H5Tclose(dset_dtype);
+ H5Tclose(attr_dtype1);
+ H5Tclose(attr_dtype2);
+ H5Dclose(dset_id);
+ H5Aclose(attr_id);
+ H5Aclose(attr_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that an attribute can be created on
+ * a committed datatype.
+ */
+static int
+test_create_attribute_on_datatype(void)
+{
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t type_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID, attr_id2 = H5I_INVALID_HID;
+ hid_t attr_dtype1 = H5I_INVALID_HID, attr_dtype2 = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("attribute creation on a committed datatype");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, stored datatype, or attribute aren't supported "
+ "with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", ATTRIBUTE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_CREATE_ON_DATATYPE_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n", ATTRIBUTE_CREATE_ON_DATATYPE_GROUP_NAME);
+ goto error;
+ }
+
+ if ((type_id = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create datatype\n");
+ goto error;
+ }
+
+ if (H5Tcommit2(group_id, ATTRIBUTE_CREATE_ON_DATATYPE_DTYPE_NAME, type_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't commit datatype\n");
+ goto error;
+ }
+
+ if ((space_id = generate_random_dataspace(ATTRIBUTE_CREATE_ON_DATATYPE_SPACE_RANK, NULL, NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_dtype1 = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+ if ((attr_dtype2 = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Acreate_on_datatype)
+ {
+ TESTING_2("H5Acreate on a committed datatype");
+
+ if ((attr_id = H5Acreate2(type_id, ATTRIBUTE_CREATE_ON_DATATYPE_ATTR_NAME, attr_dtype1, space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute on datatype using H5Acreate\n");
+ PART_ERROR(H5Acreate_on_datatype);
+ }
+
+ if ((attr_exists = H5Aexists(type_id, ATTRIBUTE_CREATE_ON_DATATYPE_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ PART_ERROR(H5Acreate_on_datatype);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ PART_ERROR(H5Acreate_on_datatype);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Acreate_on_datatype);
+
+ PART_BEGIN(H5Acreate_by_name_on_datatype)
+ {
+ TESTING_2("H5Acreate_by_name on a committed datatype");
+
+ if ((attr_id2 = H5Acreate_by_name(group_id, ATTRIBUTE_CREATE_ON_DATATYPE_DTYPE_NAME,
+ ATTRIBUTE_CREATE_ON_DATATYPE_ATTR_NAME2, attr_dtype2, space_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute on datatype using H5Acreate_by_name\n");
+ PART_ERROR(H5Acreate_by_name_on_datatype);
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(type_id, ATTRIBUTE_CREATE_ON_DATATYPE_ATTR_NAME2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ PART_ERROR(H5Acreate_by_name_on_datatype);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ PART_ERROR(H5Acreate_by_name_on_datatype);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Acreate_by_name_on_datatype);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype1) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype2) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id2) < 0)
+ TEST_ERROR;
+ if (H5Tclose(type_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ H5Tclose(attr_dtype1);
+ H5Tclose(attr_dtype2);
+ H5Aclose(attr_id);
+ H5Aclose(attr_id2);
+ H5Tclose(type_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that creating an attribute with a
+ * NULL dataspace is not problematic.
+ */
+static int
+test_create_attribute_with_null_space(void)
+{
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t attr_dtype = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+
+ TESTING("attribute creation with a NULL dataspace");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or attribute aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file\n");
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_CREATE_NULL_DATASPACE_TEST_SUBGROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup\n");
+ goto error;
+ }
+
+ if ((space_id = H5Screate(H5S_NULL)) < 0)
+ TEST_ERROR;
+
+ if ((attr_dtype = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_CREATE_NULL_DATASPACE_TEST_ATTR_NAME, attr_dtype, space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_CREATE_NULL_DATASPACE_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ goto error;
+ }
+
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Aopen(group_id, ATTRIBUTE_CREATE_NULL_DATASPACE_TEST_ATTR_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open attribute\n");
+ goto error;
+ }
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ H5Tclose(attr_dtype);
+ H5Aclose(attr_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that creating an attribute with a
+ * scalar dataspace is not problematic.
+ */
+static int
+test_create_attribute_with_scalar_space(void)
+{
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t attr_dtype = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+
+ TESTING("attribute creation with a SCALAR dataspace");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or attribute aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file\n");
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_CREATE_SCALAR_DATASPACE_TEST_SUBGROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup\n");
+ goto error;
+ }
+
+ if ((space_id = H5Screate(H5S_SCALAR)) < 0)
+ TEST_ERROR;
+
+ if ((attr_dtype = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_CREATE_SCALAR_DATASPACE_TEST_ATTR_NAME, attr_dtype,
+ space_id, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_CREATE_SCALAR_DATASPACE_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ goto error;
+ }
+
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Aopen(group_id, ATTRIBUTE_CREATE_SCALAR_DATASPACE_TEST_ATTR_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open attribute\n");
+ goto error;
+ }
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ H5Tclose(attr_dtype);
+ H5Aclose(attr_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a space in an attribute's name
+ * is not problematic.
+ */
+static int
+test_create_attribute_with_space_in_name(void)
+{
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t attr_id2 = H5I_INVALID_HID;
+ hid_t attr_dtype = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+
+ TESTING("attribute creation with a space in attribute's name");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or attribute aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file\n");
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_CREATE_WITH_SPACE_IN_NAME_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n",
+ ATTRIBUTE_CREATE_WITH_SPACE_IN_NAME_GROUP_NAME);
+ goto error;
+ }
+
+ if ((space_id =
+ generate_random_dataspace(ATTRIBUTE_CREATE_WITH_SPACE_IN_NAME_SPACE_RANK, NULL, NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_dtype = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_CREATE_WITH_SPACE_IN_NAME_ATTR_NAME, attr_dtype, space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_CREATE_WITH_SPACE_IN_NAME_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ goto error;
+ }
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ H5Tclose(attr_dtype);
+ H5Aclose(attr_id);
+ H5Aclose(attr_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that an attribute can't be created when
+ * H5Acreate is passed invalid parameters.
+ */
+static int
+test_create_attribute_invalid_params(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t attr_dtype = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("attribute creation with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or attribute aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", ATTRIBUTE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_CREATE_INVALID_PARAMS_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group\n");
+ goto error;
+ }
+
+ if ((space_id = generate_random_dataspace(ATTRIBUTE_CREATE_INVALID_PARAMS_SPACE_RANK, NULL, NULL, TRUE)) <
+ 0)
+ TEST_ERROR;
+ if ((attr_dtype = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Acreate_invalid_loc_id)
+ {
+ TESTING_2("H5Acreate with invalid loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Acreate2(H5I_INVALID_HID, ATTRIBUTE_CREATE_INVALID_PARAMS_ATTR_NAME, attr_dtype,
+ space_id, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created attribute using H5Acreate with an invalid loc_id!\n");
+ H5Aclose(attr_id);
+ PART_ERROR(H5Acreate_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Acreate_invalid_loc_id);
+
+ PART_BEGIN(H5Acreate_invalid_attr_name)
+ {
+ TESTING_2("H5Acreate with invalid attribute name");
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Acreate2(group_id, NULL, attr_dtype, space_id, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created attribute using H5Acreate with a NULL name!\n");
+ H5Aclose(attr_id);
+ PART_ERROR(H5Acreate_invalid_attr_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Acreate2(group_id, "", attr_dtype, space_id, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created attribute using H5Acreate with an invalid name of ''!\n");
+ H5Aclose(attr_id);
+ PART_ERROR(H5Acreate_invalid_attr_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Acreate_invalid_attr_name);
+
+ PART_BEGIN(H5Acreate_invalid_datatype)
+ {
+ TESTING_2("H5Acreate with an invalid datatype");
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Acreate2(group_id, ATTRIBUTE_CREATE_INVALID_PARAMS_ATTR_NAME, H5I_INVALID_HID,
+ space_id, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created attribute using H5Acreate with an invalid datatype!\n");
+ H5Aclose(attr_id);
+ PART_ERROR(H5Acreate_invalid_datatype);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Acreate_invalid_datatype);
+
+ PART_BEGIN(H5Acreate_invalid_dataspace)
+ {
+ TESTING_2("H5Acreate with an invalid dataspace");
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Acreate2(group_id, ATTRIBUTE_CREATE_INVALID_PARAMS_ATTR_NAME, attr_dtype,
+ H5I_INVALID_HID, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created attribute using H5Acreate with an invalid dataspace!\n");
+ H5Aclose(attr_id);
+ PART_ERROR(H5Acreate_invalid_dataspace);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Acreate_invalid_dataspace);
+
+ PART_BEGIN(H5Acreate_invalid_acpl)
+ {
+ TESTING_2("H5Acreate with an invalid ACPL");
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Acreate2(group_id, ATTRIBUTE_CREATE_INVALID_PARAMS_ATTR_NAME, attr_dtype,
+ space_id, H5I_INVALID_HID, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created attribute using H5Acreate with an invalid ACPL!\n");
+ H5Aclose(attr_id);
+ PART_ERROR(H5Acreate_invalid_acpl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Acreate_invalid_acpl);
+
+ PART_BEGIN(H5Acreate_invalid_aapl)
+ {
+ TESTING_2("H5Acreate with an invalid AAPL");
+#ifndef NO_INVALID_PROPERTY_LIST_TESTS
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Acreate2(group_id, ATTRIBUTE_CREATE_INVALID_PARAMS_ATTR_NAME, attr_dtype,
+ space_id, H5P_DEFAULT, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created attribute using H5Acreate with an invalid AAPL!\n");
+ H5Aclose(attr_id);
+ PART_ERROR(H5Acreate_invalid_aapl);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Acreate_invalid_aapl);
+#endif
+ }
+ PART_END(H5Acreate_invalid_aapl);
+
+ PART_BEGIN(H5Acreate_by_name_invalid_loc_id)
+ {
+ TESTING_2("H5Acreate_by_name with an invalid loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Acreate_by_name(H5I_INVALID_HID, ATTRIBUTE_CREATE_INVALID_PARAMS_GROUP_NAME,
+ ATTRIBUTE_CREATE_INVALID_PARAMS_ATTR_NAME, attr_dtype, space_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created attribute using H5Acreate_by_name with an invalid loc_id!\n");
+ H5Aclose(attr_id);
+ PART_ERROR(H5Acreate_by_name_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Acreate_by_name_invalid_loc_id);
+
+ PART_BEGIN(H5Acreate_by_name_invalid_obj_name)
+ {
+ TESTING_2("H5Acreate_by_name with invalid object name");
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Acreate_by_name(group_id, NULL, ATTRIBUTE_CREATE_INVALID_PARAMS_ATTR_NAME,
+ attr_dtype, space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created attribute using H5Acreate_by_name with a NULL object name!\n");
+ H5Aclose(attr_id);
+ PART_ERROR(H5Acreate_by_name_invalid_obj_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Acreate_by_name(group_id, "", ATTRIBUTE_CREATE_INVALID_PARAMS_ATTR_NAME,
+ attr_dtype, space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(
+ " created attribute using H5Acreate_by_name with an invalid object name of ''!\n");
+ H5Aclose(attr_id);
+ PART_ERROR(H5Acreate_by_name_invalid_obj_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Acreate_by_name_invalid_obj_name);
+
+ PART_BEGIN(H5Acreate_by_name_invalid_attr_name)
+ {
+ TESTING_2("H5Acreate_by_name with invalid attribute name");
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Acreate_by_name(container_group, ATTRIBUTE_CREATE_INVALID_PARAMS_GROUP_NAME, NULL,
+ attr_dtype, space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created attribute using H5Acreate_by_name with a NULL attribute name!\n");
+ H5Aclose(attr_id);
+ PART_ERROR(H5Acreate_by_name_invalid_attr_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Acreate_by_name(container_group, ATTRIBUTE_CREATE_INVALID_PARAMS_GROUP_NAME, "",
+ attr_dtype, space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(
+ " created attribute using H5Acreate_by_name with an invalid attribute name of ''!\n");
+ H5Aclose(attr_id);
+ PART_ERROR(H5Acreate_by_name_invalid_attr_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Acreate_by_name_invalid_attr_name);
+
+ PART_BEGIN(H5Acreate_by_name_invalid_datatype)
+ {
+ TESTING_2("H5Acreate_by_name with invalid datatype");
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Acreate_by_name(container_group, ATTRIBUTE_CREATE_INVALID_PARAMS_GROUP_NAME,
+ ATTRIBUTE_CREATE_INVALID_PARAMS_ATTR_NAME, H5I_INVALID_HID,
+ space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created attribute using H5Acreate_by_name with an invalid datatype!\n");
+ H5Aclose(attr_id);
+ PART_ERROR(H5Acreate_by_name_invalid_datatype);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Acreate_by_name_invalid_datatype);
+
+ PART_BEGIN(H5Acreate_by_name_invalid_dataspace)
+ {
+ TESTING_2("H5Acreate_by_name with invalid dataspace");
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Acreate_by_name(container_group, ATTRIBUTE_CREATE_INVALID_PARAMS_GROUP_NAME,
+ ATTRIBUTE_CREATE_INVALID_PARAMS_ATTR_NAME, attr_dtype,
+ H5I_INVALID_HID, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created attribute using H5Acreate_by_name with an invalid dataspace!\n");
+ H5Aclose(attr_id);
+ PART_ERROR(H5Acreate_by_name_invalid_dataspace);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Acreate_by_name_invalid_dataspace);
+
+ PART_BEGIN(H5Acreate_by_name_invalid_acpl)
+ {
+ TESTING_2("H5Acreate_by_name with invalid ACPL");
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Acreate_by_name(container_group, ATTRIBUTE_CREATE_INVALID_PARAMS_GROUP_NAME,
+ ATTRIBUTE_CREATE_INVALID_PARAMS_ATTR_NAME, attr_dtype, space_id,
+ H5I_INVALID_HID, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created attribute using H5Acreate_by_name with an invalid ACPL!\n");
+ H5Aclose(attr_id);
+ PART_ERROR(H5Acreate_by_name_invalid_acpl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Acreate_by_name_invalid_acpl);
+
+ PART_BEGIN(H5Acreate_by_name_invalid_aapl)
+ {
+ TESTING_2("H5Acreate_by_name with invalid AAPL");
+#ifndef NO_INVALID_PROPERTY_LIST_TESTS
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Acreate_by_name(container_group, ATTRIBUTE_CREATE_INVALID_PARAMS_GROUP_NAME,
+ ATTRIBUTE_CREATE_INVALID_PARAMS_ATTR_NAME, attr_dtype, space_id,
+ H5P_DEFAULT, H5I_INVALID_HID, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created attribute using H5Acreate_by_name with an invalid AAPL!\n");
+ H5Aclose(attr_id);
+ PART_ERROR(H5Acreate_by_name_invalid_aapl);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Acreate_by_name_invalid_aapl);
+#endif
+ }
+ PART_END(H5Acreate_by_name_invalid_aapl);
+
+ PART_BEGIN(H5Acreate_by_name_invalid_lapl)
+ {
+ TESTING_2("H5Acreate_by_name with invalid LAPL");
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Acreate_by_name(container_group, ATTRIBUTE_CREATE_INVALID_PARAMS_GROUP_NAME,
+ ATTRIBUTE_CREATE_INVALID_PARAMS_ATTR_NAME, attr_dtype, space_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created attribute using H5Acreate_by_name with an invalid LAPL!\n");
+ H5Aclose(attr_id);
+ PART_ERROR(H5Acreate_by_name_invalid_lapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Acreate_by_name_invalid_lapl);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ H5Tclose(attr_dtype);
+ H5Aclose(attr_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test for H5Aopen(_by_idx).
+ */
+static int
+test_open_attribute(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ hid_t attr_type = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("attribute opening");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or attribute aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", ATTRIBUTE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create GCPL for attribute creation order tracking\n");
+ goto error;
+ }
+
+ if (H5Pset_attr_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set attribute creation order tracking\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_OPEN_TEST_GROUP_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n", ATTRIBUTE_OPEN_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((space_id = generate_random_dataspace(ATTRIBUTE_OPEN_TEST_SPACE_RANK, NULL, NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_type = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ /* Create several attributes */
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_OPEN_TEST_ATTR_NAME, attr_type, space_id, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s'\n", ATTRIBUTE_OPEN_TEST_ATTR_NAME);
+ goto error;
+ }
+
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_OPEN_TEST_ATTR_NAME2, attr_type, space_id, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s'\n", ATTRIBUTE_OPEN_TEST_ATTR_NAME2);
+ goto error;
+ }
+
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_OPEN_TEST_ATTR_NAME3, attr_type, space_id, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s'\n", ATTRIBUTE_OPEN_TEST_ATTR_NAME3);
+ goto error;
+ }
+
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Aopen)
+ {
+ TESTING_2("H5Aopen");
+
+ if ((attr_id = H5Aopen(group_id, ATTRIBUTE_OPEN_TEST_ATTR_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open attribute '%s' using H5Aopen\n", ATTRIBUTE_OPEN_TEST_ATTR_NAME);
+ PART_ERROR(H5Aopen);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close attribute '%s'\n", ATTRIBUTE_OPEN_TEST_ATTR_NAME);
+ PART_ERROR(H5Aopen);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aopen);
+
+ PART_BEGIN(H5Aopen_by_name)
+ {
+ TESTING_2("H5Aopen_by_name");
+
+ if ((attr_id = H5Aopen_by_name(container_group, ATTRIBUTE_OPEN_TEST_GROUP_NAME,
+ ATTRIBUTE_OPEN_TEST_ATTR_NAME, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open attribute '%s' using H5Aopen_by_name\n",
+ ATTRIBUTE_OPEN_TEST_ATTR_NAME);
+ PART_ERROR(H5Aopen_by_name);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close attribute '%s'\n", ATTRIBUTE_OPEN_TEST_ATTR_NAME);
+ PART_ERROR(H5Aopen_by_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aopen_by_name);
+
+ PART_BEGIN(H5Aopen_by_idx_crt_order_increasing)
+ {
+ TESTING_2("H5Aopen_by_idx by creation order in increasing order");
+
+ if ((attr_id = H5Aopen_by_idx(container_group, ATTRIBUTE_OPEN_TEST_GROUP_NAME, H5_INDEX_CRT_ORDER,
+ H5_ITER_INC, 0, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open attribute '%s' at index %d using H5Aopen_by_idx by creation "
+ "order in increasing order\n",
+ ATTRIBUTE_OPEN_TEST_ATTR_NAME, 0);
+ PART_ERROR(H5Aopen_by_idx_crt_order_increasing);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close attribute '%s'\n", ATTRIBUTE_OPEN_TEST_ATTR_NAME);
+ PART_ERROR(H5Aopen_by_idx_crt_order_increasing);
+ }
+
+ if ((attr_id = H5Aopen_by_idx(container_group, ATTRIBUTE_OPEN_TEST_GROUP_NAME, H5_INDEX_CRT_ORDER,
+ H5_ITER_INC, 1, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open attribute '%s' at index %d using H5Aopen_by_idx by creation "
+ "order in increasing order\n",
+ ATTRIBUTE_OPEN_TEST_ATTR_NAME2, 1);
+ PART_ERROR(H5Aopen_by_idx_crt_order_increasing);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close attribute '%s'\n", ATTRIBUTE_OPEN_TEST_ATTR_NAME2);
+ PART_ERROR(H5Aopen_by_idx_crt_order_increasing);
+ }
+
+ if ((attr_id = H5Aopen_by_idx(container_group, ATTRIBUTE_OPEN_TEST_GROUP_NAME, H5_INDEX_CRT_ORDER,
+ H5_ITER_INC, 2, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open attribute '%s' at index %d using H5Aopen_by_idx by creation "
+ "order in increasing order\n",
+ ATTRIBUTE_OPEN_TEST_ATTR_NAME3, 2);
+ PART_ERROR(H5Aopen_by_idx_crt_order_increasing);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close attribute '%s'\n", ATTRIBUTE_OPEN_TEST_ATTR_NAME3);
+ PART_ERROR(H5Aopen_by_idx_crt_order_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aopen_by_idx_crt_order_increasing);
+
+ PART_BEGIN(H5Aopen_by_idx_crt_order_decreasing)
+ {
+ TESTING_2("H5Aopen_by_idx by creation order in decreasing order");
+
+ if ((attr_id = H5Aopen_by_idx(container_group, ATTRIBUTE_OPEN_TEST_GROUP_NAME, H5_INDEX_CRT_ORDER,
+ H5_ITER_DEC, 2, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open attribute '%s' at index %d using H5Aopen_by_idx by creation "
+ "order in decreasing order\n",
+ ATTRIBUTE_OPEN_TEST_ATTR_NAME, 2);
+ PART_ERROR(H5Aopen_by_idx_crt_order_decreasing);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close attribute '%s'\n", ATTRIBUTE_OPEN_TEST_ATTR_NAME);
+ PART_ERROR(H5Aopen_by_idx_crt_order_decreasing);
+ }
+
+ if ((attr_id = H5Aopen_by_idx(container_group, ATTRIBUTE_OPEN_TEST_GROUP_NAME, H5_INDEX_CRT_ORDER,
+ H5_ITER_DEC, 1, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open attribute '%s' at index %d using H5Aopen_by_idx by creation "
+ "order in decreasing order\n",
+ ATTRIBUTE_OPEN_TEST_ATTR_NAME2, 1);
+ PART_ERROR(H5Aopen_by_idx_crt_order_decreasing);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close attribute '%s'\n", ATTRIBUTE_OPEN_TEST_ATTR_NAME2);
+ PART_ERROR(H5Aopen_by_idx_crt_order_decreasing);
+ }
+
+ if ((attr_id = H5Aopen_by_idx(container_group, ATTRIBUTE_OPEN_TEST_GROUP_NAME, H5_INDEX_CRT_ORDER,
+ H5_ITER_DEC, 0, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open attribute '%s' at index %d using H5Aopen_by_idx by creation "
+ "order in decreasing order\n",
+ ATTRIBUTE_OPEN_TEST_ATTR_NAME3, 0);
+ PART_ERROR(H5Aopen_by_idx_crt_order_decreasing);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close attribute '%s'\n", ATTRIBUTE_OPEN_TEST_ATTR_NAME3);
+ PART_ERROR(H5Aopen_by_idx_crt_order_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aopen_by_idx_crt_order_decreasing);
+
+ PART_BEGIN(H5Aopen_by_idx_name_order_increasing)
+ {
+ TESTING_2("H5Aopen_by_idx by alphabetical order in increasing order");
+
+ if ((attr_id = H5Aopen_by_idx(container_group, ATTRIBUTE_OPEN_TEST_GROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_INC, 0, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open attribute '%s' at index %d using H5Aopen_by_idx by alphabetical "
+ "order in increasing order\n",
+ ATTRIBUTE_OPEN_TEST_ATTR_NAME, 0);
+ PART_ERROR(H5Aopen_by_idx_name_order_increasing);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close attribute '%s'\n", ATTRIBUTE_OPEN_TEST_ATTR_NAME);
+ PART_ERROR(H5Aopen_by_idx_name_order_increasing);
+ }
+
+ if ((attr_id = H5Aopen_by_idx(container_group, ATTRIBUTE_OPEN_TEST_GROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_INC, 1, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open attribute '%s' at index %d using H5Aopen_by_idx by alphabetical "
+ "order in increasing order\n",
+ ATTRIBUTE_OPEN_TEST_ATTR_NAME2, 1);
+ PART_ERROR(H5Aopen_by_idx_name_order_increasing);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close attribute '%s'\n", ATTRIBUTE_OPEN_TEST_ATTR_NAME2);
+ PART_ERROR(H5Aopen_by_idx_name_order_increasing);
+ }
+
+ if ((attr_id = H5Aopen_by_idx(container_group, ATTRIBUTE_OPEN_TEST_GROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_INC, 2, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open attribute '%s' at index %d using H5Aopen_by_idx by alphabetical "
+ "order in increasing order\n",
+ ATTRIBUTE_OPEN_TEST_ATTR_NAME3, 2);
+ PART_ERROR(H5Aopen_by_idx_name_order_increasing);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close attribute '%s'\n", ATTRIBUTE_OPEN_TEST_ATTR_NAME3);
+ PART_ERROR(H5Aopen_by_idx_name_order_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aopen_by_idx_name_order_increasing);
+
+ PART_BEGIN(H5Aopen_by_idx_name_order_decreasing)
+ {
+ TESTING_2("H5Aopen_by_idx by alphabetical order in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ if ((attr_id = H5Aopen_by_idx(container_group, ATTRIBUTE_OPEN_TEST_GROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_DEC, 2, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open attribute '%s' at index %lld using H5Aopen_by_idx by "
+ "alphabetical order in decreasing order\n",
+ ATTRIBUTE_OPEN_TEST_ATTR_NAME, 2);
+ PART_ERROR(H5Aopen_by_idx_name_order_decreasing);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close attribute '%s'\n", ATTRIBUTE_OPEN_TEST_ATTR_NAME);
+ PART_ERROR(H5Aopen_by_idx_name_order_decreasing);
+ }
+
+ if ((attr_id = H5Aopen_by_idx(container_group, ATTRIBUTE_OPEN_TEST_GROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_DEC, 1, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open attribute '%s' at index %lld using H5Aopen_by_idx by "
+ "alphabetical order in decreasing order\n",
+ ATTRIBUTE_OPEN_TEST_ATTR_NAME2, 1);
+ PART_ERROR(H5Aopen_by_idx_name_order_decreasing);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close attribute '%s'\n", ATTRIBUTE_OPEN_TEST_ATTR_NAME2);
+ PART_ERROR(H5Aopen_by_idx_name_order_decreasing);
+ }
+
+ if ((attr_id = H5Aopen_by_idx(container_group, ATTRIBUTE_OPEN_TEST_GROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_DEC, 0, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open attribute '%s' at index %lld using H5Aopen_by_idx by "
+ "alphabetical order in decreasing order\n",
+ ATTRIBUTE_OPEN_TEST_ATTR_NAME3, 0);
+ PART_ERROR(H5Aopen_by_idx_name_order_decreasing);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close attribute '%s'\n", ATTRIBUTE_OPEN_TEST_ATTR_NAME3);
+ PART_ERROR(H5Aopen_by_idx_name_order_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Aopen_by_idx_name_order_decreasing);
+#endif
+ }
+ PART_END(H5Aopen_by_idx_name_order_decreasing);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_type) < 0)
+ TEST_ERROR;
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ H5Tclose(attr_type);
+ H5Aclose(attr_id);
+ H5Pclose(gcpl_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that an attribute can't be opened when
+ * H5Aopen(_by_name/_by_idx) is passed invalid parameters.
+ */
+static int
+test_open_attribute_invalid_params(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ hid_t attr_type = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("attribute opening with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or attribute aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", ATTRIBUTE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n", ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((space_id =
+ generate_random_dataspace(ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_SPACE_RANK, NULL, NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_type = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME, attr_type, space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s'\n", ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME);
+ goto error;
+ }
+
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Aopen_invalid_loc_id)
+ {
+ TESTING_2("H5Aopen with an invalid loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Aopen(H5I_INVALID_HID, ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened attribute '%s' using H5Aopen with an invalid loc_id!\n",
+ ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME);
+ H5Aclose(attr_id);
+ PART_ERROR(H5Aopen_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aopen_invalid_loc_id);
+
+ PART_BEGIN(H5Aopen_invalid_attr_name)
+ {
+ TESTING_2("H5Aopen with an invalid attribute name");
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Aopen(group_id, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened attribute '%s' using H5Aopen with a NULL attribute name!\n",
+ ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME);
+ H5Aclose(attr_id);
+ PART_ERROR(H5Aopen_invalid_attr_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Aopen(group_id, "", H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened attribute '%s' using H5Aopen with an invalid attribute name of ''!\n",
+ ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME);
+ H5Aclose(attr_id);
+ PART_ERROR(H5Aopen_invalid_attr_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aopen_invalid_attr_name);
+
+ PART_BEGIN(H5Aopen_invalid_aapl)
+ {
+ TESTING_2("H5Aopen with an invalid AAPL");
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Aopen(group_id, ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened attribute '%s' using H5Aopen with an invalid AAPL!\n",
+ ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME);
+ H5Aclose(attr_id);
+ PART_ERROR(H5Aopen_invalid_aapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aopen_invalid_aapl);
+
+ PART_BEGIN(H5Aopen_by_name_invalid_loc_id)
+ {
+ TESTING_2("H5Aopen_by_name with an invalid loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ attr_id =
+ H5Aopen_by_name(H5I_INVALID_HID, ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_GROUP_NAME,
+ ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened attribute '%s' using H5Aopen_by_name with an invalid loc_id!\n",
+ ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME);
+ H5Aclose(attr_id);
+ PART_ERROR(H5Aopen_by_name_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aopen_by_name_invalid_loc_id);
+
+ PART_BEGIN(H5Aopen_by_name_invalid_obj_name)
+ {
+ TESTING_2("H5Aopen_by_name with an invalid object name");
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Aopen_by_name(container_group, NULL, ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME,
+ H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened attribute '%s' using H5Aopen_by_name with a NULL object name!\n",
+ ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME);
+ H5Aclose(attr_id);
+ PART_ERROR(H5Aopen_by_name_invalid_obj_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Aopen_by_name(container_group, "", ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME,
+ H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(
+ " opened attribute '%s' using H5Aopen_by_name with an invalid object name of ''!\n",
+ ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME);
+ H5Aclose(attr_id);
+ PART_ERROR(H5Aopen_by_name_invalid_obj_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aopen_by_name_invalid_obj_name);
+
+ PART_BEGIN(H5Aopen_by_name_invalid_attr_name)
+ {
+ TESTING_2("H5Aopen_by_name with an invalid attribute name");
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Aopen_by_name(container_group, ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_GROUP_NAME,
+ NULL, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened attribute '%s' using H5Aopen_by_name with a NULL attribute name!\n",
+ ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME);
+ H5Aclose(attr_id);
+ PART_ERROR(H5Aopen_by_name_invalid_attr_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Aopen_by_name(container_group, ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_GROUP_NAME, "",
+ H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(
+ " opened attribute '%s' using H5Aopen_by_name with an invalid attribute name of ''!\n",
+ ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME);
+ H5Aclose(attr_id);
+ PART_ERROR(H5Aopen_by_name_invalid_attr_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aopen_by_name_invalid_attr_name);
+
+ PART_BEGIN(H5Aopen_by_name_invalid_aapl)
+ {
+ TESTING_2("H5Aopen_by_name with an invalid AAPL");
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Aopen_by_name(container_group, ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_GROUP_NAME,
+ ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME, H5I_INVALID_HID,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened attribute '%s' using H5Aopen_by_name with an invalid AAPL!\n",
+ ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME);
+ H5Aclose(attr_id);
+ PART_ERROR(H5Aopen_by_name_invalid_aapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aopen_by_name_invalid_aapl);
+
+ PART_BEGIN(H5Aopen_by_name_invalid_lapl)
+ {
+ TESTING_2("H5Aopen_by_name with an invalid LAPL");
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Aopen_by_name(container_group, ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_GROUP_NAME,
+ ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME, H5P_DEFAULT,
+ H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened attribute '%s' using H5Aopen_by_name with an invalid LAPL!\n",
+ ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME);
+ H5Aclose(attr_id);
+ PART_ERROR(H5Aopen_by_name_invalid_lapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aopen_by_name_invalid_lapl);
+
+ PART_BEGIN(H5Aopen_by_idx_invalid_loc_id)
+ {
+ TESTING_2("H5Aopen_by_idx with an invalid loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Aopen_by_idx(H5I_INVALID_HID, ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_GROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_INC, 0, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened attribute '%s' using H5Aopen_by_idx with an invalid loc_id!\n",
+ ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME);
+ H5Aclose(attr_id);
+ PART_ERROR(H5Aopen_by_idx_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aopen_by_idx_invalid_loc_id);
+
+ PART_BEGIN(H5Aopen_by_idx_invalid_obj_name)
+ {
+ TESTING_2("H5Aopen_by_idx with an invalid object name");
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Aopen_by_idx(container_group, NULL, H5_INDEX_NAME, H5_ITER_INC, 0, H5P_DEFAULT,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened attribute '%s' using H5Aopen_by_idx with a NULL object name!\n",
+ ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME);
+ H5Aclose(attr_id);
+ PART_ERROR(H5Aopen_by_idx_invalid_obj_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Aopen_by_idx(container_group, "", H5_INDEX_NAME, H5_ITER_INC, 0, H5P_DEFAULT,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(
+ " opened attribute '%s' using H5Aopen_by_idx with an invalid object name of ''!\n",
+ ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME);
+ H5Aclose(attr_id);
+ PART_ERROR(H5Aopen_by_idx_invalid_obj_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aopen_by_idx_invalid_obj_name);
+
+ PART_BEGIN(H5Aopen_by_idx_invalid_index_type)
+ {
+ TESTING_2("H5Aopen_by_idx with an invalid index type");
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Aopen_by_idx(container_group, ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_GROUP_NAME,
+ H5_INDEX_UNKNOWN, H5_ITER_INC, 0, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened attribute '%s' using H5Aopen_by_idx with invalid index type "
+ "H5_INDEX_UNKNOWN!\n",
+ ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME);
+ H5Aclose(attr_id);
+ PART_ERROR(H5Aopen_by_idx_invalid_index_type);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Aopen_by_idx(container_group, ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_GROUP_NAME,
+ H5_INDEX_N, H5_ITER_INC, 0, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(
+ " opened attribute '%s' using H5Aopen_by_idx with invalid index type H5_INDEX_N!\n",
+ ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME);
+ H5Aclose(attr_id);
+ PART_ERROR(H5Aopen_by_idx_invalid_index_type);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aopen_by_idx_invalid_index_type);
+
+ PART_BEGIN(H5Aopen_by_idx_invalid_iter_order)
+ {
+ TESTING_2("H5Aopen_by_idx with an invalid iteration order");
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Aopen_by_idx(container_group, ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_GROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_UNKNOWN, 0, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened attribute '%s' using H5Aopen_by_idx with invalid iteration order "
+ "H5_ITER_UNKNOWN!\n",
+ ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME);
+ H5Aclose(attr_id);
+ PART_ERROR(H5Aopen_by_idx_invalid_iter_order);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Aopen_by_idx(container_group, ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_GROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_N, 0, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened attribute '%s' using H5Aopen_by_idx with invalid iteration order "
+ "H5_ITER_N!\n",
+ ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME);
+ H5Aclose(attr_id);
+ PART_ERROR(H5Aopen_by_idx_invalid_iter_order);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aopen_by_idx_invalid_iter_order);
+
+ PART_BEGIN(H5Aopen_by_idx_invalid_aapl)
+ {
+ TESTING_2("H5Aopen_by_idx with an invalid AAPL");
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Aopen_by_idx(container_group, ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_GROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_INC, 0, H5I_INVALID_HID, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened attribute '%s' using H5Aopen_by_idx with an invalid AAPL!\n",
+ ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME);
+ H5Aclose(attr_id);
+ PART_ERROR(H5Aopen_by_idx_invalid_aapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aopen_by_idx_invalid_aapl);
+
+ PART_BEGIN(H5Aopen_by_idx_invalid_lapl)
+ {
+ TESTING_2("H5Aopen_by_idx with an invalid LAPL");
+
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Aopen_by_idx(container_group, ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_GROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_INC, 0, H5P_DEFAULT, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened attribute '%s' using H5Aopen_by_idx with an invalid LAPL!\n",
+ ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME);
+ H5Aclose(attr_id);
+ PART_ERROR(H5Aopen_by_idx_invalid_lapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aopen_by_idx_invalid_lapl);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_type) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ H5Tclose(attr_type);
+ H5Aclose(attr_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a simple write to an attribute
+ * can be made.
+ */
+static int
+test_write_attribute(void)
+{
+ hsize_t dims[ATTRIBUTE_WRITE_TEST_SPACE_RANK];
+ size_t i, data_size;
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ void *data = NULL;
+
+ TESTING("H5Awrite");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, attribute, or file flush aren't supported with "
+ "this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file\n");
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_WRITE_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n", ATTRIBUTE_WRITE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((space_id = generate_random_dataspace(ATTRIBUTE_WRITE_TEST_SPACE_RANK, NULL, dims, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_WRITE_TEST_ATTR_NAME, ATTRIBUTE_WRITE_TEST_ATTR_DTYPE,
+ space_id, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_WRITE_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ goto error;
+ }
+
+ for (i = 0, data_size = 1; i < ATTRIBUTE_WRITE_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= ATTRIBUTE_WRITE_TEST_ATTR_DTYPE_SIZE;
+
+ if (NULL == (data = HDmalloc(data_size)))
+ TEST_ERROR;
+
+ for (i = 0; i < data_size / ATTRIBUTE_WRITE_TEST_ATTR_DTYPE_SIZE; i++)
+ ((int *)data)[i] = (int)i;
+
+ if (H5Awrite(attr_id, ATTRIBUTE_WRITE_TEST_ATTR_DTYPE, data) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to attribute\n");
+ goto error;
+ }
+
+ /* Make sure that the attribute can be flushed to the file */
+ if (H5Fflush(file_id, H5F_SCOPE_GLOBAL) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't flush the attribute\n");
+ goto error;
+ }
+
+ if (data) {
+ HDfree(data);
+ data = NULL;
+ }
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (data)
+ HDfree(data);
+ H5Sclose(space_id);
+ H5Aclose(attr_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that writing an attribute fails when
+ * H5Awrite is passed invalid parameters.
+ */
+static int
+test_write_attribute_invalid_params(void)
+{
+ hsize_t dims[ATTRIBUTE_WRITE_INVALID_PARAMS_TEST_SPACE_RANK];
+ size_t i, data_size;
+ htri_t attr_exists;
+ herr_t err_ret = -1;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ void *data = NULL;
+
+ TESTING_MULTIPART("H5Awrite with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or attribute aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", ATTRIBUTE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_WRITE_INVALID_PARAMS_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n",
+ ATTRIBUTE_WRITE_INVALID_PARAMS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((space_id =
+ generate_random_dataspace(ATTRIBUTE_WRITE_INVALID_PARAMS_TEST_SPACE_RANK, NULL, dims, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_WRITE_INVALID_PARAMS_TEST_ATTR_NAME,
+ ATTRIBUTE_WRITE_INVALID_PARAMS_TEST_ATTR_DTYPE, space_id, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_WRITE_INVALID_PARAMS_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ goto error;
+ }
+
+ for (i = 0, data_size = 1; i < ATTRIBUTE_WRITE_INVALID_PARAMS_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= ATTRIBUTE_WRITE_INVALID_PARAMS_TEST_ATTR_DTYPE_SIZE;
+
+ if (NULL == (data = HDmalloc(data_size)))
+ TEST_ERROR;
+
+ for (i = 0; i < data_size / ATTRIBUTE_WRITE_INVALID_PARAMS_TEST_ATTR_DTYPE_SIZE; i++)
+ ((int *)data)[i] = (int)i;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Awrite_invalid_attr_id)
+ {
+ TESTING_2("H5Awrite with an invalid attr_id");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Awrite(H5I_INVALID_HID, ATTRIBUTE_WRITE_INVALID_PARAMS_TEST_ATTR_DTYPE, data);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" wrote to attribute using an invalid attr_id!\n");
+ PART_ERROR(H5Awrite_invalid_attr_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Awrite_invalid_attr_id);
+
+ PART_BEGIN(H5Awrite_invalid_datatype)
+ {
+ TESTING_2("H5Awrite with an invalid datatype");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Awrite(attr_id, H5I_INVALID_HID, data);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" wrote to attribute using an invalid datatype!\n");
+ PART_ERROR(H5Awrite_invalid_datatype);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Awrite_invalid_datatype);
+
+ PART_BEGIN(H5Awrite_invalid_data_buf)
+ {
+ TESTING_2("H5Awrite with an invalid data buffer");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Awrite(attr_id, ATTRIBUTE_WRITE_INVALID_PARAMS_TEST_ATTR_DTYPE, NULL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" wrote to attribute using an invalid data buffer!\n");
+ PART_ERROR(H5Awrite_invalid_data_buf);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Awrite_invalid_data_buf);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (data) {
+ HDfree(data);
+ data = NULL;
+ }
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (data)
+ HDfree(data);
+ H5Sclose(space_id);
+ H5Aclose(attr_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that simple data can be read back
+ * and verified after it has been written to an
+ * attribute.
+ */
+static int
+test_read_attribute(void)
+{
+ hsize_t dims[ATTRIBUTE_READ_TEST_SPACE_RANK];
+ size_t i, data_size;
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ void *data = NULL;
+ void *read_buf = NULL;
+
+ TESTING("H5Aread");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or attribute aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", ATTRIBUTE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_READ_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n", ATTRIBUTE_READ_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((space_id = generate_random_dataspace(ATTRIBUTE_READ_TEST_SPACE_RANK, NULL, dims, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_READ_TEST_ATTR_NAME, ATTRIBUTE_READ_TEST_ATTR_DTYPE,
+ space_id, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_READ_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ goto error;
+ }
+
+ for (i = 0, data_size = 1; i < ATTRIBUTE_READ_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= ATTRIBUTE_READ_TEST_ATTR_DTYPE_SIZE;
+
+ if (NULL == (data = HDmalloc(data_size)))
+ TEST_ERROR;
+ if (NULL == (read_buf = HDcalloc(1, data_size)))
+ TEST_ERROR;
+
+ for (i = 0; i < data_size / ATTRIBUTE_READ_TEST_ATTR_DTYPE_SIZE; i++)
+ ((int *)data)[i] = (int)i;
+
+ if (H5Awrite(attr_id, ATTRIBUTE_READ_TEST_ATTR_DTYPE, data) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to attribute\n");
+ goto error;
+ }
+
+ if (data) {
+ HDfree(data);
+ data = NULL;
+ }
+
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Aopen(group_id, ATTRIBUTE_READ_TEST_ATTR_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open attribute\n");
+ goto error;
+ }
+
+ if (H5Aread(attr_id, ATTRIBUTE_READ_TEST_ATTR_DTYPE, read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from attribute\n");
+ goto error;
+ }
+
+ for (i = 0; i < data_size / ATTRIBUTE_READ_TEST_ATTR_DTYPE_SIZE; i++) {
+ if (((int *)read_buf)[i] != (int)i) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ goto error;
+ }
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (data)
+ HDfree(data);
+ if (read_buf)
+ HDfree(read_buf);
+ H5Sclose(space_id);
+ H5Aclose(attr_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that reading an attribute fails when
+ * H5Aread is passed invalid parameters.
+ */
+static int
+test_read_attribute_invalid_params(void)
+{
+ hsize_t dims[ATTRIBUTE_READ_INVALID_PARAMS_TEST_SPACE_RANK];
+ size_t i, data_size;
+ htri_t attr_exists;
+ herr_t err_ret = -1;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ void *data = NULL;
+ void *read_buf = NULL;
+
+ TESTING_MULTIPART("H5Aread with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or attribute aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", ATTRIBUTE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_READ_INVALID_PARAMS_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n", ATTRIBUTE_READ_INVALID_PARAMS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((space_id =
+ generate_random_dataspace(ATTRIBUTE_READ_INVALID_PARAMS_TEST_SPACE_RANK, NULL, dims, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_READ_INVALID_PARAMS_TEST_ATTR_NAME,
+ ATTRIBUTE_READ_INVALID_PARAMS_TEST_ATTR_DTYPE, space_id, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_READ_INVALID_PARAMS_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ goto error;
+ }
+
+ for (i = 0, data_size = 1; i < ATTRIBUTE_READ_INVALID_PARAMS_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= ATTRIBUTE_READ_INVALID_PARAMS_TEST_ATTR_DTYPE_SIZE;
+
+ if (NULL == (data = HDmalloc(data_size)))
+ TEST_ERROR;
+ if (NULL == (read_buf = HDcalloc(1, data_size)))
+ TEST_ERROR;
+
+ for (i = 0; i < data_size / ATTRIBUTE_READ_INVALID_PARAMS_TEST_ATTR_DTYPE_SIZE; i++)
+ ((int *)data)[i] = (int)i;
+
+ if (H5Awrite(attr_id, ATTRIBUTE_READ_INVALID_PARAMS_TEST_ATTR_DTYPE, data) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to attribute\n");
+ goto error;
+ }
+
+ if (data) {
+ HDfree(data);
+ data = NULL;
+ }
+
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Aopen(group_id, ATTRIBUTE_READ_INVALID_PARAMS_TEST_ATTR_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open attribute\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Aread_invalid_attr_id)
+ {
+ TESTING_2("H5Aread with an invalid attr_id");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aread(H5I_INVALID_HID, ATTRIBUTE_READ_INVALID_PARAMS_TEST_ATTR_DTYPE, read_buf);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" read attribute with an invalid attr_id!\n");
+ PART_ERROR(H5Aread_invalid_attr_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aread_invalid_attr_id);
+
+ PART_BEGIN(H5Aread_invalid_datatype)
+ {
+ TESTING_2("H5Aread with an invalid datatype");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aread(attr_id, H5I_INVALID_HID, read_buf);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" read attribute with an invalid datatype!\n");
+ PART_ERROR(H5Aread_invalid_datatype);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aread_invalid_datatype);
+
+ PART_BEGIN(H5Aread_invalid_read_buf)
+ {
+ TESTING_2("H5Aread with an invalid read buffer");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aread(attr_id, ATTRIBUTE_READ_INVALID_PARAMS_TEST_ATTR_DTYPE, NULL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" read attribute with an invalid read buffer!\n");
+ PART_ERROR(H5Aread_invalid_read_buf);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aread_invalid_read_buf);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (data)
+ HDfree(data);
+ if (read_buf)
+ HDfree(read_buf);
+ H5Sclose(space_id);
+ H5Aclose(attr_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * Test reading an empty attribute is ok
+ */
+static int
+test_read_empty_attribute(void)
+{
+ hsize_t dims[ATTRIBUTE_READ_EMPTY_SPACE_RANK];
+ size_t i, data_size;
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ void *read_buf = NULL;
+
+ TESTING("reading an empty attribute");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or attribute aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", ATTRIBUTE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_READ_EMPTY_ATTR_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n", ATTRIBUTE_READ_EMPTY_ATTR_GROUP_NAME);
+ goto error;
+ }
+
+ if ((space_id = generate_random_dataspace(ATTRIBUTE_READ_EMPTY_SPACE_RANK, NULL, dims, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_READ_EMPTY_ATTR_NAME, ATTRIBUTE_READ_EMPTY_DTYPE, space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_READ_EMPTY_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ goto error;
+ }
+
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Aopen(group_id, ATTRIBUTE_READ_EMPTY_ATTR_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open attribute\n");
+ goto error;
+ }
+
+ for (i = 0, data_size = 1; i < ATTRIBUTE_READ_EMPTY_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= ATTRIBUTE_READ_EMPTY_DTYPE_SIZE;
+
+ if (NULL == (read_buf = HDcalloc(1, data_size)))
+ TEST_ERROR;
+
+ if (H5Aread(attr_id, ATTRIBUTE_READ_EMPTY_DTYPE, read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from attribute\n");
+ goto error;
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ H5Sclose(space_id);
+ H5Aclose(attr_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+/*
+ * A test to check that H5Aclose fails when it is passed
+ * an invalid attribute ID.
+ */
+static int
+test_close_attribute_invalid_id(void)
+{
+ herr_t err_ret = -1;
+ hid_t file_id = H5I_INVALID_HID;
+
+ TESTING("H5Aclose with an invalid attribute ID");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file or attribute aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aclose(H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Aclose succeeded with an invalid attribute ID!\n");
+ goto error;
+ }
+
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that valid copies of an attribute's
+ * dataspace and datatype can be retrieved with
+ * H5Aget_space and H5Aget_type, respectively.
+ */
+static int
+test_get_attribute_space_and_type(void)
+{
+ hsize_t attr_dims[ATTRIBUTE_GET_SPACE_TYPE_TEST_SPACE_RANK];
+ size_t i;
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t attr_dtype = H5I_INVALID_HID;
+ hid_t attr_space_id = H5I_INVALID_HID;
+ hid_t tmp_type_id = H5I_INVALID_HID;
+ hid_t tmp_space_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("retrieval of an attribute's dataspace and datatype");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_MORE)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or attribute aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", ATTRIBUTE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_GET_SPACE_TYPE_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n", ATTRIBUTE_GET_SPACE_TYPE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((attr_space_id =
+ generate_random_dataspace(ATTRIBUTE_GET_SPACE_TYPE_TEST_SPACE_RANK, NULL, attr_dims, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_dtype = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_GET_SPACE_TYPE_TEST_ATTR_NAME, attr_dtype, attr_space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_GET_SPACE_TYPE_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ /* Retrieve the attribute's datatype and dataspace and verify them */
+ PART_BEGIN(H5Aget_type)
+ {
+ TESTING_2("H5Aget_type");
+
+ if ((tmp_type_id = H5Aget_type(attr_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve attribute's datatype\n");
+ PART_ERROR(H5Aget_type);
+ }
+
+ {
+ htri_t types_equal = H5Tequal(tmp_type_id, attr_dtype);
+
+ if (types_equal < 0) {
+ H5_FAILED();
+ HDprintf(" datatype was invalid\n");
+ PART_ERROR(H5Aget_type);
+ }
+
+ if (!types_equal) {
+ H5_FAILED();
+ HDprintf(" attribute's datatype did not match\n");
+ PART_ERROR(H5Aget_type);
+ }
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_type);
+
+ PART_BEGIN(H5Aget_space)
+ {
+ TESTING_2("H5Aget_space");
+
+ if ((tmp_space_id = H5Aget_space(attr_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve attribute's dataspace\n");
+ PART_ERROR(H5Aget_space);
+ }
+
+ {
+ hsize_t space_dims[ATTRIBUTE_GET_SPACE_TYPE_TEST_SPACE_RANK];
+
+ if (H5Sget_simple_extent_dims(tmp_space_id, space_dims, NULL) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve dimensions of dataspace\n");
+ PART_ERROR(H5Aget_space);
+ }
+
+ for (i = 0; i < ATTRIBUTE_GET_SPACE_TYPE_TEST_SPACE_RANK; i++)
+ if (space_dims[i] != attr_dims[i]) {
+ H5_FAILED();
+ HDprintf(" attribute's dataspace dims didn't match\n");
+ PART_ERROR(H5Aget_space);
+ }
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_space);
+
+ /* Now close the attribute and verify that this still works after opening an
+ * attribute instead of creating it
+ */
+ if (attr_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Aclose(attr_id);
+ }
+ H5E_END_TRY;
+ attr_id = H5I_INVALID_HID;
+ }
+ if (tmp_type_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(tmp_type_id);
+ }
+ H5E_END_TRY;
+ tmp_type_id = H5I_INVALID_HID;
+ }
+ if (tmp_space_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(tmp_space_id);
+ }
+ H5E_END_TRY;
+ tmp_space_id = H5I_INVALID_HID;
+ }
+
+ PART_BEGIN(H5Aget_type_reopened)
+ {
+ TESTING_2("H5Aget_type after re-opening an attribute");
+
+ if ((attr_id = H5Aopen(group_id, ATTRIBUTE_GET_SPACE_TYPE_TEST_ATTR_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open attribute '%s'\n", ATTRIBUTE_GET_SPACE_TYPE_TEST_ATTR_NAME);
+ PART_ERROR(H5Aget_type_reopened);
+ }
+
+ if ((tmp_type_id = H5Aget_type(attr_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve attribute's datatype\n");
+ PART_ERROR(H5Aget_type_reopened);
+ }
+
+ {
+ htri_t types_equal = H5Tequal(tmp_type_id, attr_dtype);
+
+ if (types_equal < 0) {
+ H5_FAILED();
+ HDprintf(" datatype was invalid\n");
+ PART_ERROR(H5Aget_type_reopened);
+ }
+
+ if (!types_equal) {
+ H5_FAILED();
+ HDprintf(" attribute's datatype did not match\n");
+ PART_ERROR(H5Aget_type_reopened);
+ }
+ }
+
+ if (attr_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Aclose(attr_id);
+ }
+ H5E_END_TRY;
+ attr_id = H5I_INVALID_HID;
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_type_reopened);
+
+ PART_BEGIN(H5Aget_space_reopened)
+ {
+ TESTING_2("H5Aget_space after re-opening an attribute");
+
+ if ((attr_id = H5Aopen(group_id, ATTRIBUTE_GET_SPACE_TYPE_TEST_ATTR_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open attribute '%s'\n", ATTRIBUTE_GET_SPACE_TYPE_TEST_ATTR_NAME);
+ PART_ERROR(H5Aget_space_reopened);
+ }
+
+ if ((tmp_space_id = H5Aget_space(attr_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve attribute's dataspace\n");
+ PART_ERROR(H5Aget_space_reopened);
+ }
+
+ {
+ hsize_t space_dims[ATTRIBUTE_GET_SPACE_TYPE_TEST_SPACE_RANK];
+
+ if (H5Sget_simple_extent_dims(tmp_space_id, space_dims, NULL) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve dimensions of dataspace\n");
+ PART_ERROR(H5Aget_space_reopened);
+ }
+
+ for (i = 0; i < ATTRIBUTE_GET_SPACE_TYPE_TEST_SPACE_RANK; i++) {
+ if (space_dims[i] != attr_dims[i]) {
+ H5_FAILED();
+ HDprintf(" dataspace dims didn't match!\n");
+ PART_ERROR(H5Aget_space_reopened);
+ }
+ }
+ }
+
+ if (attr_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Aclose(attr_id);
+ }
+ H5E_END_TRY;
+ attr_id = H5I_INVALID_HID;
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_space_reopened);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(tmp_space_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(attr_space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(tmp_type_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(tmp_space_id);
+ H5Sclose(attr_space_id);
+ H5Tclose(tmp_type_id);
+ H5Tclose(attr_dtype);
+ H5Aclose(attr_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that an attribute's dataspace and datatype
+ * can't be retrieved when H5Aget_space and H5Aget_type are passed
+ * invalid parameters, respectively.
+ */
+static int
+test_get_attribute_space_and_type_invalid_params(void)
+{
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t attr_dtype = H5I_INVALID_HID;
+ hid_t attr_space_id = H5I_INVALID_HID;
+ hid_t tmp_type_id = H5I_INVALID_HID;
+ hid_t tmp_space_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("H5Aget_type/H5Aget_space with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_MORE)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or attribute aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", ATTRIBUTE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_GET_SPACE_TYPE_INVALID_PARAMS_TEST_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n",
+ ATTRIBUTE_GET_SPACE_TYPE_INVALID_PARAMS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((attr_space_id = generate_random_dataspace(ATTRIBUTE_GET_SPACE_TYPE_INVALID_PARAMS_TEST_SPACE_RANK,
+ NULL, NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_dtype = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_GET_SPACE_TYPE_INVALID_PARAMS_TEST_ATTR_NAME, attr_dtype,
+ attr_space_id, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_GET_SPACE_TYPE_INVALID_PARAMS_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ /* Retrieve the attribute's datatype and dataspace and verify them */
+ PART_BEGIN(H5Aget_type_invalid_attr_id)
+ {
+ TESTING_2("H5Aget_type with an invalid attr_id");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_type_id = H5Aget_type(H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (tmp_type_id >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved copy of attribute's datatype using an invalid attr_id!\n");
+ PART_ERROR(H5Aget_type_invalid_attr_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_type_invalid_attr_id);
+
+ PART_BEGIN(H5Aget_space_invalid_attr_id)
+ {
+ TESTING_2("H5Aget_space with an invalid attr_id");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_space_id = H5Aget_space(H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (tmp_space_id >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved copy of attribute's dataspace using an invalid attr_id!\n");
+ PART_ERROR(H5Aget_space_invalid_attr_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_space_invalid_attr_id);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(attr_space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(tmp_space_id);
+ H5Sclose(attr_space_id);
+ H5Tclose(tmp_type_id);
+ H5Tclose(attr_dtype);
+ H5Aclose(attr_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that an ACPL used for attribute creation
+ * can be persisted and that a valid copy of that ACPL can
+ * be retrieved later with a call to H5Aget_create_plist.
+ */
+static int
+test_attribute_property_lists(void)
+{
+ H5T_cset_t encoding = H5T_CSET_UTF8;
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t attr_id1 = H5I_INVALID_HID, attr_id2 = H5I_INVALID_HID;
+ hid_t attr_dtype1 = H5I_INVALID_HID, attr_dtype2 = H5I_INVALID_HID;
+ hid_t acpl_id1 = H5I_INVALID_HID, acpl_id2 = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("attribute property list operations");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GET_PLIST)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, attribute, or getting property list aren't "
+ "supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file\n");
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_PROPERTY_LIST_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group\n");
+ goto error;
+ }
+
+ if ((space_id = generate_random_dataspace(ATTRIBUTE_PROPERTY_LIST_TEST_SPACE_RANK, NULL, NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_dtype1 = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+ if ((attr_dtype2 = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((acpl_id1 = H5Pcreate(H5P_ATTRIBUTE_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create ACPL\n");
+ goto error;
+ }
+
+ if (H5Pset_char_encoding(acpl_id1, encoding) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set ACPL property value\n");
+ goto error;
+ }
+
+ if ((attr_id1 = H5Acreate2(group_id, ATTRIBUTE_PROPERTY_LIST_TEST_ATTRIBUTE_NAME1, attr_dtype1, space_id,
+ acpl_id1, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ if ((attr_id2 = H5Acreate2(group_id, ATTRIBUTE_PROPERTY_LIST_TEST_ATTRIBUTE_NAME2, attr_dtype2, space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ if (H5Pclose(acpl_id1) < 0)
+ TEST_ERROR;
+
+ /* Verify the attributes have been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_PROPERTY_LIST_TEST_ATTRIBUTE_NAME1)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ goto error;
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_PROPERTY_LIST_TEST_ATTRIBUTE_NAME2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Aget_create_plist)
+ {
+ TESTING_2("H5Aget_create_plist");
+
+ /* Try to retrieve copies of the two property lists, one which has the property set and one which
+ * does not */
+ if ((acpl_id1 = H5Aget_create_plist(attr_id1)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get property list\n");
+ PART_ERROR(H5Aget_create_plist);
+ }
+
+ if ((acpl_id2 = H5Aget_create_plist(attr_id2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get property list\n");
+ PART_ERROR(H5Aget_create_plist);
+ }
+
+ /* Ensure that property list 1 has the property list set and property list 2 does not */
+ encoding = H5T_CSET_ERROR;
+
+ if (H5Pget_char_encoding(acpl_id1, &encoding) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve ACPL property value\n");
+ PART_ERROR(H5Aget_create_plist);
+ }
+
+ if (H5T_CSET_UTF8 != encoding) {
+ H5_FAILED();
+ HDprintf(" ACPL property value was incorrect\n");
+ PART_ERROR(H5Aget_create_plist);
+ }
+
+ encoding = H5T_CSET_ERROR;
+
+ if (H5Pget_char_encoding(acpl_id2, &encoding) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve ACPL property value\n");
+ PART_ERROR(H5Aget_create_plist);
+ }
+
+ if (H5T_CSET_UTF8 == encoding) {
+ H5_FAILED();
+ HDprintf(" ACPL property value was set!\n");
+ PART_ERROR(H5Aget_create_plist);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_create_plist);
+
+ /* Now close the property lists and attribute and see if we can still retrieve copies of
+ * the property lists upon opening (instead of creating) an attribute
+ */
+ if (acpl_id1 >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(acpl_id1);
+ }
+ H5E_END_TRY;
+ acpl_id1 = H5I_INVALID_HID;
+ }
+ if (acpl_id2 >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(acpl_id2);
+ }
+ H5E_END_TRY;
+ acpl_id2 = H5I_INVALID_HID;
+ }
+ if (attr_id1 >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Aclose(attr_id1);
+ }
+ H5E_END_TRY;
+ attr_id1 = H5I_INVALID_HID;
+ }
+ if (attr_id2 >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Aclose(attr_id2);
+ }
+ H5E_END_TRY;
+ attr_id2 = H5I_INVALID_HID;
+ }
+
+ PART_BEGIN(H5Aget_create_plist_reopened)
+ {
+ TESTING_2("H5Aget_create_plist after re-opening an attribute");
+
+ if ((attr_id1 = H5Aopen(group_id, ATTRIBUTE_PROPERTY_LIST_TEST_ATTRIBUTE_NAME1, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open attribute '%s'\n", ATTRIBUTE_PROPERTY_LIST_TEST_ATTRIBUTE_NAME1);
+ PART_ERROR(H5Aget_create_plist_reopened);
+ }
+
+ if ((attr_id2 = H5Aopen(group_id, ATTRIBUTE_PROPERTY_LIST_TEST_ATTRIBUTE_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open attribute '%s'\n", ATTRIBUTE_PROPERTY_LIST_TEST_ATTRIBUTE_NAME2);
+ PART_ERROR(H5Aget_create_plist_reopened);
+ }
+
+ if ((acpl_id1 = H5Aget_create_plist(attr_id1)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get property list\n");
+ PART_ERROR(H5Aget_create_plist_reopened);
+ }
+
+ if ((acpl_id2 = H5Aget_create_plist(attr_id2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get property list\n");
+ PART_ERROR(H5Aget_create_plist_reopened);
+ }
+
+ /* XXX: Check the value to be tested as above */
+ PASSED();
+ }
+ PART_END(H5Aget_create_plist_reopened);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Pclose(acpl_id1) < 0)
+ TEST_ERROR;
+ if (H5Pclose(acpl_id2) < 0)
+ TEST_ERROR;
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype1) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype2) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id1) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id2) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(acpl_id1);
+ H5Pclose(acpl_id2);
+ H5Sclose(space_id);
+ H5Tclose(attr_dtype1);
+ H5Tclose(attr_dtype2);
+ H5Aclose(attr_id1);
+ H5Aclose(attr_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that an attribute's name can be
+ * correctly retrieved with H5Aget_name and
+ * H5Aget_name_by_idx.
+ */
+static int
+test_get_attribute_name(void)
+{
+ ssize_t name_buf_size;
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t attr_dtype = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+ char *name_buf = NULL;
+
+ TESTING_MULTIPART("retrieval of an attribute's name");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, attribute, or creation order aren't supported "
+ "with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", ATTRIBUTE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create GCPL for attribute creation order tracking\n");
+ goto error;
+ }
+
+ if (H5Pset_attr_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set attribute creation order tracking\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_GET_NAME_TEST_GROUP_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n", ATTRIBUTE_GET_NAME_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((space_id = generate_random_dataspace(ATTRIBUTE_GET_NAME_TEST_SPACE_RANK, NULL, NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_dtype = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ /* Create several attributes */
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME, attr_dtype, space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s'\n", ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME);
+ goto error;
+ }
+
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME2, attr_dtype, space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s'\n", ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME2);
+ goto error;
+ }
+
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME3, attr_dtype, space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s'\n", ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME3);
+ goto error;
+ }
+
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+
+ /* Verify the attributes have been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n", ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME);
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' did not exist\n", ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME);
+ goto error;
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME2);
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' did not exist\n", ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME2);
+ goto error;
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME3)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME3);
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' did not exist\n", ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME3);
+ goto error;
+ }
+
+ /* Allocate the name buffer */
+ name_buf_size = strlen(ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME) + 2;
+ if (NULL == (name_buf = (char *)HDmalloc((size_t)name_buf_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for storing attribute's name\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Aget_name)
+ {
+ TESTING_2("H5Aget_name");
+
+ if ((attr_id = H5Aopen(group_id, ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open attribute '%s'\n", ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME);
+ PART_ERROR(H5Aget_name);
+ }
+
+ *name_buf = '\0';
+ if (H5Aget_name(attr_id, (size_t)name_buf_size, name_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve attribute name\n");
+ PART_ERROR(H5Aget_name);
+ }
+
+ if (HDstrncmp(name_buf, ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME,
+ strlen(ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME) + 1)) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute name '%s' didn't match '%s'\n", name_buf,
+ ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME);
+ PART_ERROR(H5Aget_name);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close attribute '%s'\n", ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME);
+ PART_ERROR(H5Aget_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_name);
+
+ H5E_BEGIN_TRY
+ {
+ H5Aclose(attr_id);
+ attr_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Aget_name_by_idx_crt_order_increasing)
+ {
+ TESTING_2("H5Aget_name_by_idx by creation order in increasing order");
+
+ *name_buf = '\0';
+ if (H5Aget_name_by_idx(container_group, ATTRIBUTE_GET_NAME_TEST_GROUP_NAME, H5_INDEX_CRT_ORDER,
+ H5_ITER_INC, 0, name_buf, (size_t)name_buf_size, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve name of attribute at index %d using H5Aget_name_by_index by "
+ "creation order in increasing order\n",
+ 0);
+ PART_ERROR(H5Aget_name_by_idx_crt_order_increasing);
+ }
+
+ if (HDstrncmp(name_buf, ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME,
+ strlen(ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME) + 1)) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute name '%s' didn't match '%s'\n", name_buf,
+ ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME);
+ PART_ERROR(H5Aget_name_by_idx_crt_order_increasing);
+ }
+
+ *name_buf = '\0';
+ if (H5Aget_name_by_idx(container_group, ATTRIBUTE_GET_NAME_TEST_GROUP_NAME, H5_INDEX_CRT_ORDER,
+ H5_ITER_INC, 1, name_buf, (size_t)name_buf_size, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve name of attribute at index %d using H5Aget_name_by_index by "
+ "creation order in increasing order\n",
+ 1);
+ PART_ERROR(H5Aget_name_by_idx_crt_order_increasing);
+ }
+
+ if (HDstrncmp(name_buf, ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME2,
+ strlen(ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME2) + 1)) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute name '%s' didn't match '%s'\n", name_buf,
+ ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME2);
+ PART_ERROR(H5Aget_name_by_idx_crt_order_increasing);
+ }
+
+ *name_buf = '\0';
+ if (H5Aget_name_by_idx(container_group, ATTRIBUTE_GET_NAME_TEST_GROUP_NAME, H5_INDEX_CRT_ORDER,
+ H5_ITER_INC, 2, name_buf, (size_t)name_buf_size, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve name of attribute at index %d using H5Aget_name_by_index by "
+ "creation order in increasing order\n",
+ 2);
+ PART_ERROR(H5Aget_name_by_idx_crt_order_increasing);
+ }
+
+ if (HDstrncmp(name_buf, ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME3,
+ strlen(ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME3) + 1)) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute name '%s' didn't match '%s'\n", name_buf,
+ ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME3);
+ PART_ERROR(H5Aget_name_by_idx_crt_order_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_name_by_idx_crt_order_increasing);
+
+ PART_BEGIN(H5Aget_name_by_idx_crt_order_decreasing)
+ {
+ TESTING_2("H5Aget_name_by_idx by creation order in decreasing order");
+
+ *name_buf = '\0';
+ if (H5Aget_name_by_idx(container_group, ATTRIBUTE_GET_NAME_TEST_GROUP_NAME, H5_INDEX_CRT_ORDER,
+ H5_ITER_DEC, 2, name_buf, (size_t)name_buf_size, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve name of attribute at index %d using H5Aget_name_by_index by "
+ "creation order in decreasing order\n",
+ 2);
+ PART_ERROR(H5Aget_name_by_idx_crt_order_decreasing);
+ }
+
+ if (HDstrncmp(name_buf, ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME,
+ strlen(ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME) + 1)) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute name '%s' didn't match '%s'\n", name_buf,
+ ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME);
+ PART_ERROR(H5Aget_name_by_idx_crt_order_decreasing);
+ }
+
+ *name_buf = '\0';
+ if (H5Aget_name_by_idx(container_group, ATTRIBUTE_GET_NAME_TEST_GROUP_NAME, H5_INDEX_CRT_ORDER,
+ H5_ITER_DEC, 1, name_buf, (size_t)name_buf_size, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve name of attribute at index %d using H5Aget_name_by_index by "
+ "creation order in decreasing order\n",
+ 1);
+ PART_ERROR(H5Aget_name_by_idx_crt_order_decreasing);
+ }
+
+ if (HDstrncmp(name_buf, ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME2,
+ strlen(ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME2) + 1)) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute name '%s' didn't match '%s'\n", name_buf,
+ ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME2);
+ PART_ERROR(H5Aget_name_by_idx_crt_order_decreasing);
+ }
+
+ *name_buf = '\0';
+ if (H5Aget_name_by_idx(container_group, ATTRIBUTE_GET_NAME_TEST_GROUP_NAME, H5_INDEX_CRT_ORDER,
+ H5_ITER_DEC, 0, name_buf, (size_t)name_buf_size, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve name of attribute at index %d using H5Aget_name_by_index by "
+ "creation order in decreasing order\n",
+ 0);
+ PART_ERROR(H5Aget_name_by_idx_crt_order_decreasing);
+ }
+
+ if (HDstrncmp(name_buf, ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME3,
+ strlen(ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME3) + 1)) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute name '%s' didn't match '%s'\n", name_buf,
+ ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME3);
+ PART_ERROR(H5Aget_name_by_idx_crt_order_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_name_by_idx_crt_order_decreasing);
+
+ PART_BEGIN(H5Aget_name_by_idx_name_order_increasing)
+ {
+ TESTING_2("H5Aget_name_by_idx by alphabetical order in increasing order");
+
+ *name_buf = '\0';
+ if (H5Aget_name_by_idx(container_group, ATTRIBUTE_GET_NAME_TEST_GROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_INC, 0, name_buf, (size_t)name_buf_size, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve name of attribute at index %d using H5Aget_name_by_index by "
+ "alphabetical order in increasing order\n",
+ 0);
+ PART_ERROR(H5Aget_name_by_idx_name_order_increasing);
+ }
+
+ if (HDstrncmp(name_buf, ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME,
+ strlen(ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME) + 1)) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute name '%s' didn't match '%s'\n", name_buf,
+ ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME);
+ PART_ERROR(H5Aget_name_by_idx_name_order_increasing);
+ }
+
+ *name_buf = '\0';
+ if (H5Aget_name_by_idx(container_group, ATTRIBUTE_GET_NAME_TEST_GROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_INC, 1, name_buf, (size_t)name_buf_size, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve name of attribute at index %d using H5Aget_name_by_index by "
+ "alphabetical order in increasing order\n",
+ 1);
+ PART_ERROR(H5Aget_name_by_idx_name_order_increasing);
+ }
+
+ if (HDstrncmp(name_buf, ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME2,
+ strlen(ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME2) + 1)) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute name '%s' didn't match '%s'\n", name_buf,
+ ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME2);
+ PART_ERROR(H5Aget_name_by_idx_name_order_increasing);
+ }
+
+ *name_buf = '\0';
+ if (H5Aget_name_by_idx(container_group, ATTRIBUTE_GET_NAME_TEST_GROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_INC, 2, name_buf, (size_t)name_buf_size, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve name of attribute at index %d using H5Aget_name_by_index by "
+ "alphabetical order in increasing order\n",
+ 2);
+ PART_ERROR(H5Aget_name_by_idx_name_order_increasing);
+ }
+
+ if (HDstrncmp(name_buf, ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME3,
+ strlen(ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME3) + 1)) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute name '%s' didn't match '%s'\n", name_buf,
+ ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME3);
+ PART_ERROR(H5Aget_name_by_idx_name_order_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_name_by_idx_name_order_increasing);
+
+ PART_BEGIN(H5Aget_name_by_idx_name_order_decreasing)
+ {
+ TESTING_2("H5Aget_name_by_idx by alphabetical order in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ *name_buf = '\0';
+ if (H5Aget_name_by_idx(container_group, ATTRIBUTE_GET_NAME_TEST_GROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_DEC, 2, name_buf, (size_t)name_buf_size, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve name of attribute at index %lld using H5Aget_name_by_index "
+ "by alphabetical order in decreasing order\n",
+ 2);
+ PART_ERROR(H5Aget_name_by_idx_name_order_decreasing);
+ }
+
+ if (HDstrncmp(name_buf, ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME,
+ strlen(ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME) + 1)) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute name '%s' didn't match '%s'\n", name_buf,
+ ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME);
+ PART_ERROR(H5Aget_name_by_idx_name_order_decreasing);
+ }
+
+ *name_buf = '\0';
+ if (H5Aget_name_by_idx(container_group, ATTRIBUTE_GET_NAME_TEST_GROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_DEC, 1, name_buf, (size_t)name_buf_size, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve name of attribute at index %lld using H5Aget_name_by_index "
+ "by alphabetical order in decreasing order\n",
+ 1);
+ PART_ERROR(H5Aget_name_by_idx_name_order_decreasing);
+ }
+
+ if (HDstrncmp(name_buf, ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME2,
+ strlen(ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME2) + 1)) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute name '%s' didn't match '%s'\n", name_buf,
+ ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME2);
+ PART_ERROR(H5Aget_name_by_idx_name_order_decreasing);
+ }
+
+ *name_buf = '\0';
+ if (H5Aget_name_by_idx(container_group, ATTRIBUTE_GET_NAME_TEST_GROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_DEC, 0, name_buf, (size_t)name_buf_size, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve name of attribute at index %lld using H5Aget_name_by_index "
+ "by alphabetical order in decreasing order\n",
+ 0);
+ PART_ERROR(H5Aget_name_by_idx_name_order_decreasing);
+ }
+
+ if (HDstrncmp(name_buf, ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME3,
+ strlen(ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME3) + 1)) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute name '%s' didn't match '%s'\n", name_buf,
+ ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME3);
+ PART_ERROR(H5Aget_name_by_idx_name_order_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Aget_name_by_idx_name_order_decreasing);
+#endif
+ }
+ PART_END(H5Aget_name_by_idx_name_order_decreasing);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (name_buf) {
+ HDfree(name_buf);
+ name_buf = NULL;
+ }
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype) < 0)
+ TEST_ERROR;
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (name_buf)
+ HDfree(name_buf);
+ H5Sclose(space_id);
+ H5Tclose(attr_dtype);
+ H5Aclose(attr_id);
+ H5Pclose(gcpl_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that an attribute's name can't be
+ * retrieved when H5Aget_name(_by_idx) is passed invalid
+ * parameters.
+ */
+static int
+test_get_attribute_name_invalid_params(void)
+{
+ ssize_t name_buf_size;
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t attr_dtype = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ char *name_buf = NULL;
+
+ TESTING_MULTIPART("retrieval of an attribute's name with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_MORE)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or attribute aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", ATTRIBUTE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_GET_NAME_INVALID_PARAMS_TEST_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n",
+ ATTRIBUTE_GET_NAME_INVALID_PARAMS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((space_id = generate_random_dataspace(ATTRIBUTE_GET_NAME_INVALID_PARAMS_TEST_SPACE_RANK, NULL, NULL,
+ TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_dtype = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_GET_NAME_INVALID_PARAMS_TEST_ATTRIBUTE_NAME, attr_dtype,
+ space_id, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_GET_NAME_INVALID_PARAMS_TEST_ATTRIBUTE_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ goto error;
+ }
+
+ /*
+ * Allocate an actual buffer for the tests.
+ */
+
+ if ((name_buf_size = H5Aget_name(attr_id, 0, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve name buf size\n");
+ goto error;
+ }
+
+ if (NULL == (name_buf = (char *)HDmalloc((size_t)name_buf_size + 1)))
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Aget_name_invalid_attr_id)
+ {
+ TESTING_2("H5Aget_name with an invalid attr_id");
+
+ H5E_BEGIN_TRY
+ {
+ name_buf_size = H5Aget_name(H5I_INVALID_HID, (size_t)name_buf_size + 1, name_buf);
+ }
+ H5E_END_TRY;
+
+ if (name_buf_size >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute name using H5Aget_name with an invalid attr_id!\n");
+ PART_ERROR(H5Aget_name_invalid_attr_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_name_invalid_attr_id);
+
+ PART_BEGIN(H5Aget_name_invalid_name_buf)
+ {
+ TESTING_2("H5Aget_name with an invalid name buffer");
+
+ H5E_BEGIN_TRY
+ {
+ name_buf_size = 1;
+ name_buf_size = H5Aget_name(attr_id, (size_t)name_buf_size, NULL);
+ }
+ H5E_END_TRY;
+
+ if (name_buf_size >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute name using H5Aget_name with an invalid name buffer!\n");
+ PART_ERROR(H5Aget_name_invalid_name_buf);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_name_invalid_name_buf);
+
+ PART_BEGIN(H5Aget_name_by_idx_invalid_loc_id)
+ {
+ TESTING_2("H5Aget_name_by_idx with an invalid loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ name_buf_size = H5Aget_name_by_idx(
+ H5I_INVALID_HID, ATTRIBUTE_GET_NAME_INVALID_PARAMS_TEST_GROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_INC, 0, name_buf, (size_t)name_buf_size + 1, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (name_buf_size >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute name using H5Aget_name_by_idx with an invalid loc_id!\n");
+ PART_ERROR(H5Aget_name_by_idx_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_name_by_idx_invalid_loc_id);
+
+ PART_BEGIN(H5Aget_name_by_idx_invalid_obj_name)
+ {
+ TESTING_2("H5Aget_name_by_idx with an invalid object name");
+
+ H5E_BEGIN_TRY
+ {
+ name_buf_size = H5Aget_name_by_idx(container_group, NULL, H5_INDEX_NAME, H5_ITER_INC, 0,
+ name_buf, (size_t)name_buf_size + 1, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (name_buf_size >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute name using H5Aget_name_by_idx with a NULL object name!\n");
+ PART_ERROR(H5Aget_name_by_idx_invalid_obj_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ name_buf_size = H5Aget_name_by_idx(container_group, "", H5_INDEX_NAME, H5_ITER_INC, 0,
+ name_buf, (size_t)name_buf_size + 1, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (name_buf_size >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute name using H5Aget_name_by_idx with an invalid object name "
+ "of ''!\n");
+ PART_ERROR(H5Aget_name_by_idx_invalid_obj_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_name_by_idx_invalid_obj_name);
+
+ PART_BEGIN(H5Aget_name_by_idx_invalid_index_type)
+ {
+ TESTING_2("H5Aget_name_by_idx with an invalid index type");
+
+ H5E_BEGIN_TRY
+ {
+ name_buf_size = H5Aget_name_by_idx(
+ container_group, ATTRIBUTE_GET_NAME_INVALID_PARAMS_TEST_GROUP_NAME, H5_INDEX_UNKNOWN,
+ H5_ITER_INC, 0, name_buf, (size_t)name_buf_size + 1, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (name_buf_size >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute name using H5Aget_name_by_idx with invalid index type "
+ "H5_INDEX_UNKNOWN!\n");
+ PART_ERROR(H5Aget_name_by_idx_invalid_index_type);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ name_buf_size = H5Aget_name_by_idx(
+ container_group, ATTRIBUTE_GET_NAME_INVALID_PARAMS_TEST_GROUP_NAME, H5_INDEX_N,
+ H5_ITER_INC, 0, name_buf, (size_t)name_buf_size + 1, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (name_buf_size >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute name using H5Aget_name_by_idx with invalid index type "
+ "H5_INDEX_N!\n");
+ PART_ERROR(H5Aget_name_by_idx_invalid_index_type);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_name_by_idx_invalid_index_type);
+
+ PART_BEGIN(H5Aget_name_by_idx_invalid_iter_order)
+ {
+ TESTING_2("H5Aget_name_by_idx with an invalid iteration order");
+
+ H5E_BEGIN_TRY
+ {
+ name_buf_size = H5Aget_name_by_idx(
+ container_group, ATTRIBUTE_GET_NAME_INVALID_PARAMS_TEST_GROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_UNKNOWN, 0, name_buf, (size_t)name_buf_size + 1, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (name_buf_size >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute name using H5Aget_name_by_idx with invalid iteration order "
+ "H5_ITER_UNKNOWN!\n");
+ PART_ERROR(H5Aget_name_by_idx_invalid_iter_order);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ name_buf_size = H5Aget_name_by_idx(
+ container_group, ATTRIBUTE_GET_NAME_INVALID_PARAMS_TEST_GROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_N, 0, name_buf, (size_t)name_buf_size + 1, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (name_buf_size >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute name using H5Aget_name_by_idx with invalid iteration order "
+ "H5_ITER_N!\n");
+ PART_ERROR(H5Aget_name_by_idx_invalid_iter_order);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_name_by_idx_invalid_iter_order);
+
+ PART_BEGIN(H5Aget_name_by_idx_invalid_name_buf)
+ {
+ TESTING_2("H5Aget_name_by_idx with an invalid name buffer");
+
+ H5E_BEGIN_TRY
+ {
+ name_buf_size = 1;
+ name_buf_size = H5Aget_name_by_idx(
+ container_group, ATTRIBUTE_GET_NAME_INVALID_PARAMS_TEST_GROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_INC, 0, NULL, (size_t)name_buf_size, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (name_buf_size >= 0) {
+ H5_FAILED();
+ HDprintf(
+ " retrieved attribute name using H5Aget_name_by_idx with an invalid name buffer!\n");
+ PART_ERROR(H5Aget_name_by_idx_invalid_name_buf);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_name_by_idx_invalid_name_buf);
+
+ PART_BEGIN(H5Aget_name_by_idx_invalid_lapl)
+ {
+ TESTING_2("H5Aget_name_by_idx with an invalid LAPL");
+
+ H5E_BEGIN_TRY
+ {
+ name_buf_size = H5Aget_name_by_idx(
+ container_group, ATTRIBUTE_GET_NAME_INVALID_PARAMS_TEST_GROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_INC, 0, name_buf, (size_t)name_buf_size + 1, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (name_buf_size >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute name using H5Aget_name_by_idx with an invalid LAPL!\n");
+ PART_ERROR(H5Aget_name_by_idx_invalid_lapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_name_by_idx_invalid_lapl);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (name_buf) {
+ HDfree(name_buf);
+ name_buf = NULL;
+ }
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (name_buf)
+ HDfree(name_buf);
+ H5Sclose(space_id);
+ H5Tclose(attr_dtype);
+ H5Aclose(attr_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test for H5Aget_storage_size.
+ */
+static int
+test_get_attribute_storage_size(void)
+{
+ TESTING("H5Aget_storage_size");
+
+ SKIPPED();
+
+ return 0;
+}
+
+/*
+ * A test to check the functionality of H5Aget_info(_by_idx).
+ */
+static int
+test_get_attribute_info(void)
+{
+ H5A_info_t attr_info;
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t attr_dtype = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("retrieval of attribute info");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, attribute, or creation order aren't supported "
+ "with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file\n");
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group\n");
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create GCPL for attribute creation order tracking\n");
+ goto error;
+ }
+
+ if (H5Pset_attr_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set attribute creation order tracking\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_GET_INFO_TEST_GROUP_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n", ATTRIBUTE_GET_INFO_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((space_id = generate_random_dataspace(ATTRIBUTE_GET_INFO_TEST_SPACE_RANK, NULL, NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_dtype = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ /* Create several attributes */
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_GET_INFO_TEST_ATTR_NAME, attr_dtype, space_id, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s'\n", ATTRIBUTE_GET_INFO_TEST_ATTR_NAME);
+ goto error;
+ }
+
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_GET_INFO_TEST_ATTR_NAME2, attr_dtype, space_id, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s'\n", ATTRIBUTE_GET_INFO_TEST_ATTR_NAME2);
+ goto error;
+ }
+
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_GET_INFO_TEST_ATTR_NAME3, attr_dtype, space_id, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s'\n", ATTRIBUTE_GET_INFO_TEST_ATTR_NAME3);
+ goto error;
+ }
+
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+
+ /* Verify the attributes have been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_GET_INFO_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n", ATTRIBUTE_GET_INFO_TEST_ATTR_NAME);
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' did not exist\n", ATTRIBUTE_GET_INFO_TEST_ATTR_NAME);
+ goto error;
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_GET_INFO_TEST_ATTR_NAME2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n", ATTRIBUTE_GET_INFO_TEST_ATTR_NAME2);
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' did not exist\n", ATTRIBUTE_GET_INFO_TEST_ATTR_NAME2);
+ goto error;
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_GET_INFO_TEST_ATTR_NAME3)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n", ATTRIBUTE_GET_INFO_TEST_ATTR_NAME3);
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' did not exist\n", ATTRIBUTE_GET_INFO_TEST_ATTR_NAME3);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Aget_info)
+ {
+ TESTING_2("H5Aget_info");
+
+ if ((attr_id = H5Aopen(group_id, ATTRIBUTE_GET_INFO_TEST_ATTR_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open attribute '%s'\n", ATTRIBUTE_GET_INFO_TEST_ATTR_NAME);
+ PART_ERROR(H5Aget_info);
+ }
+
+ HDmemset(&attr_info, 0, sizeof(attr_info));
+ if (H5Aget_info(attr_id, &attr_info) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get attribute info\n");
+ PART_ERROR(H5Aget_info);
+ }
+
+ if (attr_info.corder_valid && (attr_info.corder != 0)) {
+ H5_FAILED();
+ HDprintf(" attribute's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)attr_info.corder, (long long)0);
+ PART_ERROR(H5Aget_info);
+ }
+
+ /* Ensure that the cset field is at least set to a meaningful value */
+ if (attr_info.cset != H5T_CSET_ASCII && attr_info.cset != H5T_CSET_UTF8 &&
+ attr_info.cset != H5T_CSET_ERROR) {
+ H5_FAILED();
+ HDprintf(" attribute info's 'cset' field wasn't set to a meaningful value\n");
+ PART_ERROR(H5Aget_info);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close attribute '%s'\n", ATTRIBUTE_GET_INFO_TEST_ATTR_NAME);
+ PART_ERROR(H5Aget_info);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_info);
+
+ H5E_BEGIN_TRY
+ {
+ H5Aclose(attr_id);
+ attr_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Aget_info_by_name)
+ {
+ TESTING_2("H5Aget_info_by_name");
+
+ HDmemset(&attr_info, 0, sizeof(attr_info));
+ if (H5Aget_info_by_name(group_id, ".", ATTRIBUTE_GET_INFO_TEST_ATTR_NAME, &attr_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get attribute info by name '%s'\n", ATTRIBUTE_GET_INFO_TEST_ATTR_NAME);
+ PART_ERROR(H5Aget_info_by_name);
+ }
+
+ if (attr_info.corder_valid && (attr_info.corder != 0)) {
+ H5_FAILED();
+ HDprintf(" attribute's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)attr_info.corder, (long long)0);
+ PART_ERROR(H5Aget_info_by_name);
+ }
+
+ /* Ensure that the cset field is at least set to a meaningful value */
+ if (attr_info.cset != H5T_CSET_ASCII && attr_info.cset != H5T_CSET_UTF8 &&
+ attr_info.cset != H5T_CSET_ERROR) {
+ H5_FAILED();
+ HDprintf(" attribute info's 'cset' field wasn't set to a meaningful value\n");
+ PART_ERROR(H5Aget_info_by_name);
+ }
+
+ HDmemset(&attr_info, 0, sizeof(attr_info));
+ if (H5Aget_info_by_name(group_id, ".", ATTRIBUTE_GET_INFO_TEST_ATTR_NAME2, &attr_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get attribute info by name '%s'\n",
+ ATTRIBUTE_GET_INFO_TEST_ATTR_NAME2);
+ PART_ERROR(H5Aget_info_by_name);
+ }
+
+ if (attr_info.corder_valid && (attr_info.corder != 1)) {
+ H5_FAILED();
+ HDprintf(" attribute's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)attr_info.corder, (long long)1);
+ PART_ERROR(H5Aget_info_by_name);
+ }
+
+ /* Ensure that the cset field is at least set to a meaningful value */
+ if (attr_info.cset != H5T_CSET_ASCII && attr_info.cset != H5T_CSET_UTF8 &&
+ attr_info.cset != H5T_CSET_ERROR) {
+ H5_FAILED();
+ HDprintf(" attribute info's 'cset' field wasn't set to a meaningful value\n");
+ PART_ERROR(H5Aget_info_by_name);
+ }
+
+ HDmemset(&attr_info, 0, sizeof(attr_info));
+ if (H5Aget_info_by_name(group_id, ".", ATTRIBUTE_GET_INFO_TEST_ATTR_NAME3, &attr_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get attribute info by name '%s'\n",
+ ATTRIBUTE_GET_INFO_TEST_ATTR_NAME3);
+ PART_ERROR(H5Aget_info_by_name);
+ }
+
+ if (attr_info.corder_valid && (attr_info.corder != 2)) {
+ H5_FAILED();
+ HDprintf(" attribute's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)attr_info.corder, (long long)2);
+ PART_ERROR(H5Aget_info_by_name);
+ }
+
+ /* Ensure that the cset field is at least set to a meaningful value */
+ if (attr_info.cset != H5T_CSET_ASCII && attr_info.cset != H5T_CSET_UTF8 &&
+ attr_info.cset != H5T_CSET_ERROR) {
+ H5_FAILED();
+ HDprintf(" attribute info's 'cset' field wasn't set to a meaningful value\n");
+ PART_ERROR(H5Aget_info_by_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_info_by_name);
+
+ PART_BEGIN(H5Aget_info_by_idx_crt_order_increasing)
+ {
+ TESTING_2("H5Aget_info_by_idx by creation order in increasing order");
+
+ HDmemset(&attr_info, 0, sizeof(attr_info));
+ if (H5Aget_info_by_idx(group_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0, &attr_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get info for attribute at index %d using H5Aget_info_by_idx by "
+ "creation order in increasing order\n",
+ 0);
+ PART_ERROR(H5Aget_info_by_idx_crt_order_increasing);
+ }
+
+ if (attr_info.corder_valid && (attr_info.corder != 0)) {
+ H5_FAILED();
+ HDprintf(" attribute's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)attr_info.corder, (long long)0);
+ PART_ERROR(H5Aget_info_by_idx_crt_order_increasing);
+ }
+
+ /* Ensure that the cset field is at least set to a meaningful value */
+ if (attr_info.cset != H5T_CSET_ASCII && attr_info.cset != H5T_CSET_UTF8 &&
+ attr_info.cset != H5T_CSET_ERROR) {
+ H5_FAILED();
+ HDprintf(" attribute info's 'cset' field wasn't set to a meaningful value\n");
+ PART_ERROR(H5Aget_info_by_idx_crt_order_increasing);
+ }
+
+ HDmemset(&attr_info, 0, sizeof(attr_info));
+ if (H5Aget_info_by_idx(group_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 1, &attr_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get info for attribute at index %d using H5Aget_info_by_idx by "
+ "creation order in increasing order\n",
+ 1);
+ PART_ERROR(H5Aget_info_by_idx_crt_order_increasing);
+ }
+
+ if (attr_info.corder_valid && (attr_info.corder != 1)) {
+ H5_FAILED();
+ HDprintf(" attribute's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)attr_info.corder, (long long)1);
+ PART_ERROR(H5Aget_info_by_idx_crt_order_increasing);
+ }
+
+ /* Ensure that the cset field is at least set to a meaningful value */
+ if (attr_info.cset != H5T_CSET_ASCII && attr_info.cset != H5T_CSET_UTF8 &&
+ attr_info.cset != H5T_CSET_ERROR) {
+ H5_FAILED();
+ HDprintf(" attribute info's 'cset' field wasn't set to a meaningful value\n");
+ PART_ERROR(H5Aget_info_by_idx_crt_order_increasing);
+ }
+
+ HDmemset(&attr_info, 0, sizeof(attr_info));
+ if (H5Aget_info_by_idx(group_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 2, &attr_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get info for attribute at index %d using H5Aget_info_by_idx by "
+ "creation order in increasing order\n",
+ 2);
+ PART_ERROR(H5Aget_info_by_idx_crt_order_increasing);
+ }
+
+ if (attr_info.corder_valid && (attr_info.corder != 2)) {
+ H5_FAILED();
+ HDprintf(" attribute's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)attr_info.corder, (long long)2);
+ PART_ERROR(H5Aget_info_by_idx_crt_order_increasing);
+ }
+
+ /* Ensure that the cset field is at least set to a meaningful value */
+ if (attr_info.cset != H5T_CSET_ASCII && attr_info.cset != H5T_CSET_UTF8 &&
+ attr_info.cset != H5T_CSET_ERROR) {
+ H5_FAILED();
+ HDprintf(" attribute info's 'cset' field wasn't set to a meaningful value\n");
+ PART_ERROR(H5Aget_info_by_idx_crt_order_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_info_by_idx_crt_order_increasing);
+
+ PART_BEGIN(H5Aget_info_by_idx_crt_order_decreasing)
+ {
+ TESTING_2("H5Aget_info_by_idx by creation order in decreasing order");
+
+ HDmemset(&attr_info, 0, sizeof(attr_info));
+ if (H5Aget_info_by_idx(group_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 2, &attr_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get info for attribute at index %d using H5Aget_info_by_idx by "
+ "creation order in decreasing order\n",
+ 2);
+ PART_ERROR(H5Aget_info_by_idx_crt_order_decreasing);
+ }
+
+ if (attr_info.corder_valid && (attr_info.corder != 0)) {
+ H5_FAILED();
+ HDprintf(" attribute's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)attr_info.corder, (long long)0);
+ PART_ERROR(H5Aget_info_by_idx_crt_order_decreasing);
+ }
+
+ /* Ensure that the cset field is at least set to a meaningful value */
+ if (attr_info.cset != H5T_CSET_ASCII && attr_info.cset != H5T_CSET_UTF8 &&
+ attr_info.cset != H5T_CSET_ERROR) {
+ H5_FAILED();
+ HDprintf(" attribute info's 'cset' field wasn't set to a meaningful value\n");
+ PART_ERROR(H5Aget_info_by_idx_crt_order_decreasing);
+ }
+
+ HDmemset(&attr_info, 0, sizeof(attr_info));
+ if (H5Aget_info_by_idx(group_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 1, &attr_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get info for attribute at index %d using H5Aget_info_by_idx by "
+ "creation order in decreasing order\n",
+ 1);
+ PART_ERROR(H5Aget_info_by_idx_crt_order_decreasing);
+ }
+
+ if (attr_info.corder_valid && (attr_info.corder != 1)) {
+ H5_FAILED();
+ HDprintf(" attribute's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)attr_info.corder, (long long)1);
+ PART_ERROR(H5Aget_info_by_idx_crt_order_decreasing);
+ }
+
+ /* Ensure that the cset field is at least set to a meaningful value */
+ if (attr_info.cset != H5T_CSET_ASCII && attr_info.cset != H5T_CSET_UTF8 &&
+ attr_info.cset != H5T_CSET_ERROR) {
+ H5_FAILED();
+ HDprintf(" attribute info's 'cset' field wasn't set to a meaningful value\n");
+ PART_ERROR(H5Aget_info_by_idx_crt_order_decreasing);
+ }
+
+ HDmemset(&attr_info, 0, sizeof(attr_info));
+ if (H5Aget_info_by_idx(group_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 0, &attr_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get info for attribute at index %d using H5Aget_info_by_idx by "
+ "creation order in decreasing order\n",
+ 0);
+ PART_ERROR(H5Aget_info_by_idx_crt_order_decreasing);
+ }
+
+ if (attr_info.corder_valid && (attr_info.corder != 2)) {
+ H5_FAILED();
+ HDprintf(" attribute's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)attr_info.corder, (long long)2);
+ PART_ERROR(H5Aget_info_by_idx_crt_order_decreasing);
+ }
+
+ /* Ensure that the cset field is at least set to a meaningful value */
+ if (attr_info.cset != H5T_CSET_ASCII && attr_info.cset != H5T_CSET_UTF8 &&
+ attr_info.cset != H5T_CSET_ERROR) {
+ H5_FAILED();
+ HDprintf(" attribute info's 'cset' field wasn't set to a meaningful value\n");
+ PART_ERROR(H5Aget_info_by_idx_crt_order_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_info_by_idx_crt_order_decreasing);
+
+ PART_BEGIN(H5Aget_info_by_idx_name_order_increasing)
+ {
+ TESTING_2("H5Aget_info_by_idx by alphabetical order in increasing order");
+
+ HDmemset(&attr_info, 0, sizeof(attr_info));
+ if (H5Aget_info_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0, &attr_info, H5P_DEFAULT) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get info for attribute at index %d using H5Aget_info_by_idx by "
+ "alphabetical order in increasing order\n",
+ 0);
+ PART_ERROR(H5Aget_info_by_idx_name_order_increasing);
+ }
+
+ if (attr_info.corder_valid && (attr_info.corder != 0)) {
+ H5_FAILED();
+ HDprintf(" attribute's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)attr_info.corder, (long long)0);
+ PART_ERROR(H5Aget_info_by_idx_name_order_increasing);
+ }
+
+ /* Ensure that the cset field is at least set to a meaningful value */
+ if (attr_info.cset != H5T_CSET_ASCII && attr_info.cset != H5T_CSET_UTF8 &&
+ attr_info.cset != H5T_CSET_ERROR) {
+ H5_FAILED();
+ HDprintf(" attribute info's 'cset' field wasn't set to a meaningful value\n");
+ PART_ERROR(H5Aget_info_by_idx_name_order_increasing);
+ }
+
+ HDmemset(&attr_info, 0, sizeof(attr_info));
+ if (H5Aget_info_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_INC, 1, &attr_info, H5P_DEFAULT) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get info for attribute at index %d using H5Aget_info_by_idx by "
+ "alphabetical order in increasing order\n",
+ 1);
+ PART_ERROR(H5Aget_info_by_idx_name_order_increasing);
+ }
+
+ if (attr_info.corder_valid && (attr_info.corder != 1)) {
+ H5_FAILED();
+ HDprintf(" attribute's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)attr_info.corder, (long long)1);
+ PART_ERROR(H5Aget_info_by_idx_name_order_increasing);
+ }
+
+ /* Ensure that the cset field is at least set to a meaningful value */
+ if (attr_info.cset != H5T_CSET_ASCII && attr_info.cset != H5T_CSET_UTF8 &&
+ attr_info.cset != H5T_CSET_ERROR) {
+ H5_FAILED();
+ HDprintf(" attribute info's 'cset' field wasn't set to a meaningful value\n");
+ PART_ERROR(H5Aget_info_by_idx_name_order_increasing);
+ }
+
+ HDmemset(&attr_info, 0, sizeof(attr_info));
+ if (H5Aget_info_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_INC, 2, &attr_info, H5P_DEFAULT) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get info for attribute at index %d using H5Aget_info_by_idx by "
+ "alphabetical order in increasing order\n",
+ 2);
+ PART_ERROR(H5Aget_info_by_idx_name_order_increasing);
+ }
+
+ if (attr_info.corder_valid && (attr_info.corder != 2)) {
+ H5_FAILED();
+ HDprintf(" attribute's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)attr_info.corder, (long long)2);
+ PART_ERROR(H5Aget_info_by_idx_name_order_increasing);
+ }
+
+ /* Ensure that the cset field is at least set to a meaningful value */
+ if (attr_info.cset != H5T_CSET_ASCII && attr_info.cset != H5T_CSET_UTF8 &&
+ attr_info.cset != H5T_CSET_ERROR) {
+ H5_FAILED();
+ HDprintf(" attribute info's 'cset' field wasn't set to a meaningful value\n");
+ PART_ERROR(H5Aget_info_by_idx_name_order_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_info_by_idx_name_order_increasing);
+
+ PART_BEGIN(H5Aget_info_by_idx_name_order_decreasing)
+ {
+ TESTING_2("H5Aget_info_by_idx by alphabetical order in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ HDmemset(&attr_info, 0, sizeof(attr_info));
+ if (H5Aget_info_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 2, &attr_info, H5P_DEFAULT) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get info for attribute at index %lld using H5Aget_info_by_idx by "
+ "alphabetical order in decreasing order\n",
+ 2);
+ PART_ERROR(H5Aget_info_by_idx_name_order_decreasing);
+ }
+
+ if (attr_info.corder_valid && (attr_info.corder != 0)) {
+ H5_FAILED();
+ HDprintf(" attribute's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)attr_info.corder, (long long)0);
+ PART_ERROR(H5Aget_info_by_idx_name_order_decreasing);
+ }
+
+ /* Ensure that the cset field is at least set to a meaningful value */
+ if (attr_info.cset != H5T_CSET_ASCII && attr_info.cset != H5T_CSET_UTF8 &&
+ attr_info.cset != H5T_CSET_ERROR) {
+ H5_FAILED();
+ HDprintf(" attribute info's 'cset' field wasn't set to a meaningful value\n");
+ PART_ERROR(H5Aget_info_by_idx_name_order_decreasing);
+ }
+
+ HDmemset(&attr_info, 0, sizeof(attr_info));
+ if (H5Aget_info_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 1, &attr_info, H5P_DEFAULT) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get info for attribute at index %lld using H5Aget_info_by_idx by "
+ "alphabetical order in decreasing order\n",
+ 1);
+ PART_ERROR(H5Aget_info_by_idx_name_order_decreasing);
+ }
+
+ if (attr_info.corder_valid && (attr_info.corder != 1)) {
+ H5_FAILED();
+ HDprintf(" attribute's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)attr_info.corder, (long long)1);
+ PART_ERROR(H5Aget_info_by_idx_name_order_decreasing);
+ }
+
+ /* Ensure that the cset field is at least set to a meaningful value */
+ if (attr_info.cset != H5T_CSET_ASCII && attr_info.cset != H5T_CSET_UTF8 &&
+ attr_info.cset != H5T_CSET_ERROR) {
+ H5_FAILED();
+ HDprintf(" attribute info's 'cset' field wasn't set to a meaningful value\n");
+ PART_ERROR(H5Aget_info_by_idx_name_order_decreasing);
+ }
+
+ HDmemset(&attr_info, 0, sizeof(attr_info));
+ if (H5Aget_info_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 0, &attr_info, H5P_DEFAULT) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get info for attribute at index %lld using H5Aget_info_by_idx by "
+ "alphabetical order in decreasing order\n",
+ 0);
+ PART_ERROR(H5Aget_info_by_idx_name_order_decreasing);
+ }
+
+ if (attr_info.corder_valid && (attr_info.corder != 2)) {
+ H5_FAILED();
+ HDprintf(" attribute's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)attr_info.corder, (long long)2);
+ PART_ERROR(H5Aget_info_by_idx_name_order_decreasing);
+ }
+
+ /* Ensure that the cset field is at least set to a meaningful value */
+ if (attr_info.cset != H5T_CSET_ASCII && attr_info.cset != H5T_CSET_UTF8 &&
+ attr_info.cset != H5T_CSET_ERROR) {
+ H5_FAILED();
+ HDprintf(" attribute info's 'cset' field wasn't set to a meaningful value\n");
+ PART_ERROR(H5Aget_info_by_idx_name_order_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Aget_info_by_idx_name_order_decreasing);
+#endif
+ }
+ PART_END(H5Aget_info_by_idx_name_order_decreasing);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype) < 0)
+ TEST_ERROR;
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ H5Tclose(attr_dtype);
+ H5Aclose(attr_id);
+ H5Pclose(gcpl_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that H5Aget_info(_by_name/_by_idx)
+ * doesn't succeed when passed invalid parameters.
+ */
+static int
+test_get_attribute_info_invalid_params(void)
+{
+ H5A_info_t attr_info;
+ htri_t attr_exists;
+ herr_t err_ret = -1;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t attr_dtype = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("retrieval of attribute info with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_MORE)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or attribute aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", ATTRIBUTE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_GET_INFO_INVALID_PARAMS_TEST_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n",
+ ATTRIBUTE_GET_INFO_INVALID_PARAMS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((space_id = generate_random_dataspace(ATTRIBUTE_GET_INFO_INVALID_PARAMS_TEST_SPACE_RANK, NULL, NULL,
+ TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_dtype = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_GET_INFO_INVALID_PARAMS_TEST_ATTR_NAME, attr_dtype,
+ space_id, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_GET_INFO_INVALID_PARAMS_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Aget_info_invalid_attr_id)
+ {
+ TESTING_2("H5Aget_info with an invalid attr_id");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aget_info(H5I_INVALID_HID, &attr_info);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute info using H5Aget_info with an invalid attr_id!\n");
+ PART_ERROR(H5Aget_info_invalid_attr_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_info_invalid_attr_id);
+
+ PART_BEGIN(H5Aget_info_invalid_attr_info_pointer)
+ {
+ TESTING_2("H5Aget_info with an invalid attribute info pointer");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aget_info(attr_id, NULL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute info using H5Aget_info with an invalid attr_id!\n");
+ PART_ERROR(H5Aget_info_invalid_attr_info_pointer);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_info_invalid_attr_info_pointer);
+
+ PART_BEGIN(H5Aget_info_by_name_invalid_loc_id)
+ {
+ TESTING_2("H5Aget_info_by_name with an invalid loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aget_info_by_name(H5I_INVALID_HID, ".",
+ ATTRIBUTE_GET_INFO_INVALID_PARAMS_TEST_ATTR_NAME, &attr_info,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute info using H5Aget_info_by_name with an invalid loc_id!\n");
+ PART_ERROR(H5Aget_info_by_name_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_info_by_name_invalid_loc_id);
+
+ PART_BEGIN(H5Aget_info_by_name_invalid_obj_name)
+ {
+ TESTING_2("H5Aget_info_by_name with an invalid object name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Aget_info_by_name(group_id, NULL, ATTRIBUTE_GET_INFO_INVALID_PARAMS_TEST_ATTR_NAME,
+ &attr_info, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute info using H5Aget_info_by_name with a NULL object name!\n");
+ PART_ERROR(H5Aget_info_by_name_invalid_obj_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aget_info_by_name(group_id, "", ATTRIBUTE_GET_INFO_INVALID_PARAMS_TEST_ATTR_NAME,
+ &attr_info, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute info using H5Aget_info_by_name with an invalid object name "
+ "of ''!\n");
+ PART_ERROR(H5Aget_info_by_name_invalid_obj_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_info_by_name_invalid_obj_name);
+
+ PART_BEGIN(H5Aget_info_by_name_invalid_attr_name)
+ {
+ TESTING_2("H5Aget_info_by_name with an invalid attribute name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aget_info_by_name(group_id, ".", NULL, &attr_info, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(
+ " retrieved attribute info using H5Aget_info_by_name with a NULL attribute name!\n");
+ PART_ERROR(H5Aget_info_by_name_invalid_attr_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aget_info_by_name(group_id, ".", "", &attr_info, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute info using H5Aget_info_by_name with an invalid attribute "
+ "name of ''!\n");
+ PART_ERROR(H5Aget_info_by_name_invalid_attr_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_info_by_name_invalid_attr_name);
+
+ PART_BEGIN(H5Aget_info_by_name_invalid_attr_info_pointer)
+ {
+ TESTING_2("H5Aget_info_by_name with an invalid attribute info pointer");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aget_info_by_name(group_id, ".", ATTRIBUTE_GET_INFO_INVALID_PARAMS_TEST_ATTR_NAME,
+ NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute info using H5Aget_info_by_name with an invalid attribute "
+ "info pointer!\n");
+ PART_ERROR(H5Aget_info_by_name_invalid_attr_info_pointer);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_info_by_name_invalid_attr_info_pointer);
+
+ PART_BEGIN(H5Aget_info_by_name_invalid_lapl)
+ {
+ TESTING_2("H5Aget_info_by_name with an invalid LAPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aget_info_by_name(group_id, ".", ATTRIBUTE_GET_INFO_INVALID_PARAMS_TEST_ATTR_NAME,
+ &attr_info, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute info using H5Aget_info_by_name with an invalid LAPL!\n");
+ PART_ERROR(H5Aget_info_by_name_invalid_lapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_info_by_name_invalid_lapl);
+
+ PART_BEGIN(H5Aget_info_by_idx_invalid_loc_id)
+ {
+ TESTING_2("H5Aget_info_by_idx with an invalid loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aget_info_by_idx(H5I_INVALID_HID, ".", H5_INDEX_NAME, H5_ITER_INC, 0, &attr_info,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute info using H5Aget_info_by_idx with an invalid loc_id!\n");
+ PART_ERROR(H5Aget_info_by_idx_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_info_by_idx_invalid_loc_id);
+
+ PART_BEGIN(H5Aget_info_by_idx_invalid_obj_name)
+ {
+ TESTING_2("H5Aget_info_by_idx with an invalid object name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aget_info_by_idx(group_id, NULL, H5_INDEX_NAME, H5_ITER_INC, 0, &attr_info,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute info using H5Aget_info_by_idx with a NULL object name!\n");
+ PART_ERROR(H5Aget_info_by_idx_invalid_obj_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Aget_info_by_idx(group_id, "", H5_INDEX_NAME, H5_ITER_INC, 0, &attr_info, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute info using H5Aget_info_by_idx with an invalid object name "
+ "of ''!\n");
+ PART_ERROR(H5Aget_info_by_idx_invalid_obj_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_info_by_idx_invalid_obj_name);
+
+ PART_BEGIN(H5Aget_info_by_idx_invalid_index_type)
+ {
+ TESTING_2("H5Aget_info_by_idx with an invalid index type");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aget_info_by_idx(group_id, ".", H5_INDEX_UNKNOWN, H5_ITER_INC, 0, &attr_info,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute info using H5Aget_info_by_idx with invalid index type "
+ "H5_INDEX_UNKNOWN!\n");
+ PART_ERROR(H5Aget_info_by_idx_invalid_index_type);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Aget_info_by_idx(group_id, ".", H5_INDEX_N, H5_ITER_INC, 0, &attr_info, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute info using H5Aget_info_by_idx with invalid index type "
+ "H5_INDEX_N!\n");
+ PART_ERROR(H5Aget_info_by_idx_invalid_index_type);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_info_by_idx_invalid_index_type);
+
+ PART_BEGIN(H5Aget_info_by_idx_invalid_iter_order)
+ {
+ TESTING_2("H5Aget_info_by_idx with an invalid iteration order");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aget_info_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_UNKNOWN, 0, &attr_info,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute info using H5Aget_info_by_idx with invalid iteration order "
+ "H5_ITER_UNKNOWN!\n");
+ PART_ERROR(H5Aget_info_by_idx_invalid_iter_order);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Aget_info_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_N, 0, &attr_info, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute info using H5Aget_info_by_idx with invalid iteration order "
+ "H5_ITER_N!\n");
+ PART_ERROR(H5Aget_info_by_idx_invalid_iter_order);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_info_by_idx_invalid_iter_order);
+
+ PART_BEGIN(H5Aget_info_by_idx_invalid_attr_info_pointer)
+ {
+ TESTING_2("H5Aget_info_by_idx with an invalid attribute info pointer");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aget_info_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute info using H5Aget_info_by_idx with an invalid attribute "
+ "info pointer!\n");
+ PART_ERROR(H5Aget_info_by_idx_invalid_attr_info_pointer);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_info_by_idx_invalid_attr_info_pointer);
+
+ PART_BEGIN(H5Aget_info_by_idx_invalid_lapl)
+ {
+ TESTING_2("H5Aget_info_by_idx with an invalid LAPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aget_info_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0, &attr_info,
+ H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved attribute info using H5Aget_info_by_idx with an invalid LAPL!\n");
+ PART_ERROR(H5Aget_info_by_idx_invalid_lapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aget_info_by_idx_invalid_lapl);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ H5Tclose(attr_dtype);
+ H5Aclose(attr_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that an attribute can be renamed
+ * with H5Arename and H5Arename_by_name.
+ */
+static int
+test_rename_attribute(void)
+{
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID, attr_id2 = H5I_INVALID_HID;
+ hid_t attr_dtype = H5I_INVALID_HID;
+ hid_t attr_space_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("attribute renaming");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_MORE)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or attribute aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", ATTRIBUTE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_RENAME_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n", ATTRIBUTE_RENAME_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((attr_space_id = generate_random_dataspace(ATTRIBUTE_RENAME_TEST_SPACE_RANK, NULL, NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_dtype = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_RENAME_TEST_ATTR_NAME, attr_dtype, attr_space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ if ((attr_id2 = H5Acreate2(group_id, ATTRIBUTE_RENAME_TEST_ATTR_NAME2, attr_dtype, attr_space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ /* Verify the attributes have been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_RENAME_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ goto error;
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_RENAME_TEST_ATTR_NAME2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Arename)
+ {
+ TESTING_2("H5Arename");
+
+ if (H5Arename(group_id, ATTRIBUTE_RENAME_TEST_ATTR_NAME, ATTRIBUTE_RENAME_TEST_NEW_NAME) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't rename attribute '%s' to '%s' using H5Arename\n",
+ ATTRIBUTE_RENAME_TEST_ATTR_NAME, ATTRIBUTE_RENAME_TEST_NEW_NAME);
+ PART_ERROR(H5Arename);
+ }
+
+ /* Verify the attribute has been renamed */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_RENAME_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ PART_ERROR(H5Arename);
+ }
+
+ if (attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not get renamed!\n");
+ PART_ERROR(H5Arename);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_RENAME_TEST_NEW_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ PART_ERROR(H5Arename);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not get renamed!\n");
+ PART_ERROR(H5Arename);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Arename);
+
+ PART_BEGIN(H5Arename_by_name)
+ {
+ TESTING_2("H5Arename_by_name");
+
+ if (H5Arename_by_name(container_group, ATTRIBUTE_RENAME_TEST_GROUP_NAME,
+ ATTRIBUTE_RENAME_TEST_ATTR_NAME2, ATTRIBUTE_RENAME_TEST_NEW_NAME2,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't rename attribute '%s' to '%s' using H5Arename_by_name\n",
+ ATTRIBUTE_RENAME_TEST_ATTR_NAME2, ATTRIBUTE_RENAME_TEST_NEW_NAME2);
+ PART_ERROR(H5Arename_by_name);
+ }
+
+ /* Verify the attribute has been renamed */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_RENAME_TEST_ATTR_NAME2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ PART_ERROR(H5Arename_by_name);
+ }
+
+ if (attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not get renamed!\n");
+ PART_ERROR(H5Arename_by_name);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_RENAME_TEST_NEW_NAME2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ PART_ERROR(H5Arename_by_name);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not get renamed!\n");
+ PART_ERROR(H5Arename_by_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Arename_by_name);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(attr_space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id2) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(attr_space_id);
+ H5Tclose(attr_dtype);
+ H5Aclose(attr_id);
+ H5Aclose(attr_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that an attribute can't be renamed
+ * when H5Arename(_by_name) is passed invalid parameters.
+ */
+static int
+test_rename_attribute_invalid_params(void)
+{
+ htri_t attr_exists;
+ herr_t err_ret = -1;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID, attr_id2 = H5I_INVALID_HID;
+ hid_t attr_dtype = H5I_INVALID_HID;
+ hid_t attr_space_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("attribute renaming with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_MORE)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or attribute aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", ATTRIBUTE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n",
+ ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((attr_space_id = generate_random_dataspace(ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_SPACE_RANK, NULL,
+ NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_dtype = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_ATTR_NAME, attr_dtype,
+ attr_space_id, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ if ((attr_id2 = H5Acreate2(group_id, ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_ATTR_NAME2, attr_dtype,
+ attr_space_id, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ /* Verify the attributes have been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ goto error;
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_ATTR_NAME2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Arename_invalid_loc_id)
+ {
+ TESTING_2("H5Arename with an invalid loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Arename(H5I_INVALID_HID, ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_ATTR_NAME,
+ ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_NEW_NAME);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" renamed attribute using H5Arename with an invalid loc_id!\n");
+ PART_ERROR(H5Arename_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Arename_invalid_loc_id);
+
+ PART_BEGIN(H5Arename_invalid_old_attr_name)
+ {
+ TESTING_2("H5Arename with an invalid old attribute name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Arename(group_id, NULL, ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_NEW_NAME);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" renamed attribute using H5Arename with a NULL old attribute name!\n");
+ PART_ERROR(H5Arename_invalid_old_attr_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Arename(group_id, "", ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_NEW_NAME);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" renamed attribute using H5Arename with an invalid old attribute name of ''!\n");
+ PART_ERROR(H5Arename_invalid_old_attr_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Arename_invalid_old_attr_name);
+
+ PART_BEGIN(H5Arename_invalid_new_attr_name)
+ {
+ TESTING_2("H5Arename with an invalid new attribute name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Arename(group_id, ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_ATTR_NAME, NULL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" renamed attribute using H5Arename with a NULL new attribute name!\n");
+ PART_ERROR(H5Arename_invalid_new_attr_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Arename(group_id, ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_ATTR_NAME, "");
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" renamed attribute using H5Arename with an invalid new attribute name of ''!\n");
+ PART_ERROR(H5Arename_invalid_new_attr_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Arename_invalid_new_attr_name);
+
+ PART_BEGIN(H5Arename_by_name_invalid_loc_id)
+ {
+ TESTING_2("H5Arename_by_name with an invalid loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Arename_by_name(H5I_INVALID_HID, ".", ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_ATTR_NAME,
+ ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_NEW_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" renamed attribute using H5Arename_by_name with an invalid loc_id!\n");
+ PART_ERROR(H5Arename_by_name_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Arename_by_name_invalid_loc_id);
+
+ PART_BEGIN(H5Arename_by_name_invalid_obj_name)
+ {
+ TESTING_2("H5Arename_by_name with an invalid object name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Arename_by_name(group_id, NULL, ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_ATTR_NAME,
+ ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_NEW_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" renamed attribute using H5Arename_by_name with a NULL object name!\n");
+ PART_ERROR(H5Arename_by_name_invalid_obj_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Arename_by_name(group_id, "", ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_ATTR_NAME,
+ ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_NEW_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(
+ " renamed attribute using H5Arename_by_name with an invalid object name of ''!\n");
+ PART_ERROR(H5Arename_by_name_invalid_obj_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Arename_by_name_invalid_obj_name);
+
+ PART_BEGIN(H5Arename_by_name_invalid_old_attr_name)
+ {
+ TESTING_2("H5Arename_by_name with an invalid old attribute name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Arename_by_name(group_id, ".", NULL,
+ ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_NEW_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" renamed attribute using H5Arename_by_name with a NULL old attribute name!\n");
+ PART_ERROR(H5Arename_by_name_invalid_old_attr_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Arename_by_name(group_id, ".", "", ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_NEW_NAME,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" renamed attribute using H5Arename_by_name with an invalid old attribute name "
+ "of ''!\n");
+ PART_ERROR(H5Arename_by_name_invalid_old_attr_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Arename_by_name_invalid_old_attr_name);
+
+ PART_BEGIN(H5Arename_by_name_invalid_new_attr_name)
+ {
+ TESTING_2("H5Arename_by_name with an invalid new attribute name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Arename_by_name(group_id, ".", ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_ATTR_NAME,
+ NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" renamed attribute using H5Arename_by_name with a NULL new attribute name!\n");
+ PART_ERROR(H5Arename_by_name_invalid_new_attr_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Arename_by_name(group_id, ".", ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_ATTR_NAME, "",
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" renamed attribute using H5Arename_by_name with an invalid new attribute name "
+ "of ''!\n");
+ PART_ERROR(H5Arename_by_name_invalid_new_attr_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Arename_by_name_invalid_new_attr_name);
+
+ PART_BEGIN(H5Arename_by_name_invalid_lapl)
+ {
+ TESTING_2("H5Arename_by_name with an invalid LAPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Arename_by_name(group_id, ".", ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_ATTR_NAME,
+ ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_NEW_NAME, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" renamed attribute using H5Arename_by_name with an invalid LAPL!\n");
+ PART_ERROR(H5Arename_by_name_invalid_lapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Arename_by_name_invalid_lapl);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(attr_space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id2) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(attr_space_id);
+ H5Tclose(attr_dtype);
+ H5Aclose(attr_id);
+ H5Aclose(attr_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check the functionality of attribute
+ * iteration using H5Aiterate(_by_name) on a group.
+ * Iteration is done in increasing and decreasing
+ * order of both attribute name and attribute
+ * creation order.
+ */
+static int
+test_attribute_iterate_group(void)
+{
+ size_t link_counter;
+ size_t i;
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t attr_dtype = H5I_INVALID_HID;
+ hid_t attr_space_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("attribute iteration on a group");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, attribute, iterate, or creation order aren't "
+ "supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group\n");
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create GCPL for attribute creation order tracking\n");
+ goto error;
+ }
+
+ if (H5Pset_attr_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set attribute creation order tracking\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_ITERATE_TEST_GRP_SUBGROUP_NAME, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n", ATTRIBUTE_ITERATE_TEST_GRP_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((attr_dtype = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+ if ((attr_space_id =
+ generate_random_dataspace(ATTRIBUTE_ITERATE_TEST_ATTR_SPACE_RANK, NULL, NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ /* Create some attributes with a reverse-ordering naming scheme to test creation order */
+ for (i = 0; i < ATTRIBUTE_ITERATE_TEST_NUM_ATTRS; i++) {
+ char attr_name[ATTRIBUTE_ITERATE_TEST_ATTR_NAME_BUF_SIZE];
+
+ HDsnprintf(attr_name, ATTRIBUTE_ITERATE_TEST_ATTR_NAME_BUF_SIZE,
+ ATTRIBUTE_ITERATE_TEST_ATTR_NAME "%d", (int)(ATTRIBUTE_ITERATE_TEST_NUM_ATTRS - i - 1));
+
+ if ((attr_id = H5Acreate2(group_id, attr_name, attr_dtype, attr_space_id, H5P_DEFAULT, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s'\n", attr_name);
+ goto error;
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(group_id, attr_name)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n", attr_name);
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' did not exist\n", attr_name);
+ goto error;
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close attribute '%s'\n", attr_name);
+ goto error;
+ }
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ /*
+ * NOTE: A counter is passed to the iteration callback to try to match up the
+ * expected attributes with a given step throughout all of the following
+ * iterations. Since the only information we can count on in the attribute
+ * iteration callback is the attribute's name, we need some other way of
+ * ensuring that the attributes are coming back in the correct order.
+ */
+
+ PART_BEGIN(H5Aiterate2_name_increasing)
+ {
+ TESTING_2("H5Aiterate by attribute name in increasing order");
+
+ link_counter = 0;
+
+ /* Test basic attribute iteration capability using both index types and both index orders */
+ if (H5Aiterate2(group_id, H5_INDEX_NAME, H5_ITER_INC, NULL, attr_iter_callback1, &link_counter) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Aiterate2_name_increasing);
+ }
+
+ /* Make sure that the attribute iteration callback was actually called */
+ if (link_counter == 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate sentinel value is unchanged; supplied callback function must not "
+ "have been called!\n");
+ PART_ERROR(H5Aiterate2_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate2_name_increasing);
+
+ PART_BEGIN(H5Aiterate2_name_decreasing)
+ {
+ TESTING_2("H5Aiterate by attribute name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ link_counter = ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
+
+ if (H5Aiterate2(group_id, H5_INDEX_NAME, H5_ITER_DEC, NULL, attr_iter_callback1, &link_counter) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Aiterate2_name_decreasing);
+ }
+
+ /* Make sure that the attribute iteration callback was actually called */
+ if (link_counter == ATTRIBUTE_ITERATE_TEST_NUM_ATTRS) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate sentinel value is unchanged; supplied callback function must not "
+ "have been called!\n");
+ PART_ERROR(H5Aiterate2_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Aiterate2_name_decreasing);
+#endif
+ }
+ PART_END(H5Aiterate2_name_decreasing);
+
+ PART_BEGIN(H5Aiterate2_creation_increasing)
+ {
+ TESTING_2("H5Aiterate by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ link_counter = 2 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
+
+ if (H5Aiterate2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_INC, NULL, attr_iter_callback1,
+ &link_counter) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Aiterate2_creation_increasing);
+ }
+
+ /* Make sure that the attribute iteration callback was actually called */
+ if (link_counter == 2 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate sentinel value is unchanged; supplied callback function must not "
+ "have been called!\n");
+ PART_ERROR(H5Aiterate2_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate2_creation_increasing);
+
+ PART_BEGIN(H5Aiterate2_creation_decreasing)
+ {
+ TESTING_2("H5Aiterate by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ link_counter = 3 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
+
+ if (H5Aiterate2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_DEC, NULL, attr_iter_callback1,
+ &link_counter) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Aiterate2_creation_decreasing);
+ }
+
+ /* Make sure that the attribute iteration callback was actually called */
+ if (link_counter == 3 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate sentinel value is unchanged; supplied callback function must not "
+ "have been called!\n");
+ PART_ERROR(H5Aiterate2_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate2_creation_decreasing);
+
+ PART_BEGIN(H5Aiterate_by_name_name_increasing)
+ {
+ TESTING_2("H5Aiterate_by_name by attribute name in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ link_counter = 0;
+
+ if (H5Aiterate_by_name(
+ file_id, "/" ATTRIBUTE_TEST_GROUP_NAME "/" ATTRIBUTE_ITERATE_TEST_GRP_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_INC, NULL, attr_iter_callback1, &link_counter, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate_by_name by index type name in increasing order failed\n");
+ PART_ERROR(H5Aiterate_by_name_name_increasing);
+ }
+
+ /* Make sure that the attribute iteration callback was actually called */
+ if (link_counter == 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate_by_name sentinel value is unchanged; supplied callback function "
+ "must not have been called!\n");
+ PART_ERROR(H5Aiterate_by_name_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate_by_name_name_increasing);
+
+ PART_BEGIN(H5Aiterate_by_name_name_decreasing)
+ {
+ TESTING_2("H5Aiterate_by_name by attribute name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ link_counter = ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
+
+ if (H5Aiterate_by_name(
+ file_id, "/" ATTRIBUTE_TEST_GROUP_NAME "/" ATTRIBUTE_ITERATE_TEST_GRP_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_DEC, NULL, attr_iter_callback1, &link_counter, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate_by_name by index type name in decreasing order failed\n");
+ PART_ERROR(H5Aiterate_by_name_name_decreasing);
+ }
+
+ /* Make sure that the attribute iteration callback was actually called */
+ if (link_counter == ATTRIBUTE_ITERATE_TEST_NUM_ATTRS) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate_by_name sentinel value is unchanged; supplied callback function "
+ "must not have been called!\n");
+ PART_ERROR(H5Aiterate_by_name_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Aiterate_by_name_name_decreasing);
+#endif
+ }
+ PART_END(H5Aiterate_by_name_name_decreasing);
+
+ PART_BEGIN(H5Aiterate_by_name_creation_increasing)
+ {
+ TESTING_2("H5Aiterate_by_name by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ link_counter = 2 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
+
+ if (H5Aiterate_by_name(file_id,
+ "/" ATTRIBUTE_TEST_GROUP_NAME "/" ATTRIBUTE_ITERATE_TEST_GRP_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_INC, NULL, attr_iter_callback1, &link_counter,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate_by_name by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Aiterate_by_name_creation_increasing);
+ }
+
+ /* Make sure that the attribute iteration callback was actually called */
+ if (link_counter == 2 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate_by_name sentinel value is unchanged; supplied callback function "
+ "must not have been called!\n");
+ PART_ERROR(H5Aiterate_by_name_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate_by_name_creation_increasing);
+
+ PART_BEGIN(H5Aiterate_by_name_creation_decreasing)
+ {
+ TESTING_2("H5Aiterate_by_name by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ link_counter = 3 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
+
+ if (H5Aiterate_by_name(file_id,
+ "/" ATTRIBUTE_TEST_GROUP_NAME "/" ATTRIBUTE_ITERATE_TEST_GRP_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_DEC, NULL, attr_iter_callback1, &link_counter,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate_by_name by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Aiterate_by_name_creation_decreasing);
+ }
+
+ /* Make sure that the attribute iteration callback was actually called */
+ if (link_counter == 3 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate_by_name sentinel value is unchanged; supplied callback function "
+ "must not have been called!\n");
+ PART_ERROR(H5Aiterate_by_name_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate_by_name_creation_decreasing);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(attr_space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype) < 0)
+ TEST_ERROR;
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(attr_space_id);
+ H5Tclose(attr_dtype);
+ H5Aclose(attr_id);
+ H5Pclose(gcpl_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check the functionality of attribute
+ * iteration using H5Aiterate(_by_name) on a dataset.
+ * Iteration is done in increasing and decreasing
+ * order of both attribute name and attribute
+ * creation order.
+ */
+static int
+test_attribute_iterate_dataset(void)
+{
+ size_t link_counter;
+ size_t i;
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t attr_dtype = H5I_INVALID_HID;
+ hid_t dset_space_id = H5I_INVALID_HID;
+ hid_t attr_space_id = H5I_INVALID_HID;
+ hid_t dcpl_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("attribute iteration on a dataset");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, attribute, iterate, or creation order "
+ "aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_ITERATE_TEST_DSET_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n", ATTRIBUTE_ITERATE_TEST_DSET_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create DCPL for attribute creation order tracking\n");
+ goto error;
+ }
+
+ if (H5Pset_attr_creation_order(dcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set attribute creation order tracking\n");
+ goto error;
+ }
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+ if ((attr_dtype = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_space_id =
+ generate_random_dataspace(ATTRIBUTE_ITERATE_TEST_DSET_SPACE_RANK, NULL, NULL, FALSE)) < 0)
+ TEST_ERROR;
+ if ((attr_space_id =
+ generate_random_dataspace(ATTRIBUTE_ITERATE_TEST_ATTR_SPACE_RANK, NULL, NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, ATTRIBUTE_ITERATE_TEST_DSET_NAME, dset_dtype, dset_space_id,
+ H5P_DEFAULT, dcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", ATTRIBUTE_ITERATE_TEST_DSET_NAME);
+ goto error;
+ }
+
+ /* Create some attributes with a reverse-ordering naming scheme to test creation order */
+ for (i = 0; i < ATTRIBUTE_ITERATE_TEST_NUM_ATTRS; i++) {
+ char attr_name[ATTRIBUTE_ITERATE_TEST_ATTR_NAME_BUF_SIZE];
+
+ HDsnprintf(attr_name, ATTRIBUTE_ITERATE_TEST_ATTR_NAME_BUF_SIZE,
+ ATTRIBUTE_ITERATE_TEST_ATTR_NAME "%d", (int)(ATTRIBUTE_ITERATE_TEST_NUM_ATTRS - i - 1));
+
+ if ((attr_id = H5Acreate2(dset_id, attr_name, attr_dtype, attr_space_id, H5P_DEFAULT, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s'\n", attr_name);
+ goto error;
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(dset_id, attr_name)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n", attr_name);
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' did not exist\n", attr_name);
+ goto error;
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close attribute '%s'\n", attr_name);
+ goto error;
+ }
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ /*
+ * NOTE: A counter is passed to the iteration callback to try to match up the
+ * expected attributes with a given step throughout all of the following
+ * iterations. Since the only information we can count on in the attribute
+ * iteration callback is the attribute's name, we need some other way of
+ * ensuring that the attributes are coming back in the correct order.
+ */
+
+ PART_BEGIN(H5Aiterate2_name_increasing)
+ {
+ TESTING_2("H5Aiterate by attribute name in increasing order");
+
+ link_counter = 0;
+
+ /* Test basic attribute iteration capability using both index types and both index orders */
+ if (H5Aiterate2(dset_id, H5_INDEX_NAME, H5_ITER_INC, NULL, attr_iter_callback1, &link_counter) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Aiterate2_name_increasing);
+ }
+
+ /* Make sure that the attribute iteration callback was actually called */
+ if (link_counter == 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate sentinel value is unchanged; supplied callback function must not "
+ "have been called!\n");
+ PART_ERROR(H5Aiterate2_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate2_name_increasing);
+
+ PART_BEGIN(H5Aiterate2_name_decreasing)
+ {
+ TESTING_2("H5Aiterate by attribute name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ link_counter = ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
+
+ if (H5Aiterate2(dset_id, H5_INDEX_NAME, H5_ITER_DEC, NULL, attr_iter_callback1, &link_counter) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Aiterate2_name_decreasing);
+ }
+
+ /* Make sure that the attribute iteration callback was actually called */
+ if (link_counter == ATTRIBUTE_ITERATE_TEST_NUM_ATTRS) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate sentinel value is unchanged; supplied callback function must not "
+ "have been called!\n");
+ PART_ERROR(H5Aiterate2_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Aiterate2_name_decreasing);
+#endif
+ }
+ PART_END(H5Aiterate2_name_decreasing);
+
+ PART_BEGIN(H5Aiterate2_creation_increasing)
+ {
+ TESTING_2("H5Aiterate by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ link_counter = 2 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
+
+ if (H5Aiterate2(dset_id, H5_INDEX_CRT_ORDER, H5_ITER_INC, NULL, attr_iter_callback1,
+ &link_counter) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Aiterate2_creation_increasing);
+ }
+
+ /* Make sure that the attribute iteration callback was actually called */
+ if (link_counter == 2 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate sentinel value is unchanged; supplied callback function must not "
+ "have been called!\n");
+ PART_ERROR(H5Aiterate2_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate2_creation_increasing);
+
+ PART_BEGIN(H5Aiterate2_creation_decreasing)
+ {
+ TESTING_2("H5Aiterate by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ link_counter = 3 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
+
+ if (H5Aiterate2(dset_id, H5_INDEX_CRT_ORDER, H5_ITER_DEC, NULL, attr_iter_callback1,
+ &link_counter) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Aiterate2_creation_decreasing);
+ }
+
+ /* Make sure that the attribute iteration callback was actually called */
+ if (link_counter == 3 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate sentinel value is unchanged; supplied callback function must not "
+ "have been called!\n");
+ PART_ERROR(H5Aiterate2_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate2_creation_decreasing);
+
+ PART_BEGIN(H5Aiterate_by_name_name_increasing)
+ {
+ TESTING_2("H5Aiterate_by_name by attribute name in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ link_counter = 0;
+
+ if (H5Aiterate_by_name(file_id,
+ "/" ATTRIBUTE_TEST_GROUP_NAME "/" ATTRIBUTE_ITERATE_TEST_DSET_SUBGROUP_NAME
+ "/" ATTRIBUTE_ITERATE_TEST_DSET_NAME,
+ H5_INDEX_NAME, H5_ITER_INC, NULL, attr_iter_callback1, &link_counter,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate_by_name by index type name in increasing order failed\n");
+ PART_ERROR(H5Aiterate_by_name_name_increasing);
+ }
+
+ /* Make sure that the attribute iteration callback was actually called */
+ if (link_counter == 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate_by_name sentinel value is unchanged; supplied callback function "
+ "must not have been called!\n");
+ PART_ERROR(H5Aiterate_by_name_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate_by_name_name_increasing);
+
+ PART_BEGIN(H5Aiterate_by_name_name_decreasing)
+ {
+ TESTING_2("H5Aiterate_by_name by attribute name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ link_counter = ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
+
+ if (H5Aiterate_by_name(file_id,
+ "/" ATTRIBUTE_TEST_GROUP_NAME "/" ATTRIBUTE_ITERATE_TEST_DSET_SUBGROUP_NAME
+ "/" ATTRIBUTE_ITERATE_TEST_DSET_NAME,
+ H5_INDEX_NAME, H5_ITER_DEC, NULL, attr_iter_callback1, &link_counter,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate_by_name by index type name in decreasing order failed\n");
+ PART_ERROR(H5Aiterate_by_name_name_decreasing);
+ }
+
+ /* Make sure that the attribute iteration callback was actually called */
+ if (link_counter == ATTRIBUTE_ITERATE_TEST_NUM_ATTRS) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate_by_name sentinel value is unchanged; supplied callback function "
+ "must not have been called!\n");
+ PART_ERROR(H5Aiterate_by_name_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Aiterate_by_name_name_decreasing);
+#endif
+ }
+ PART_END(H5Aiterate_by_name_name_decreasing);
+
+ PART_BEGIN(H5Aiterate_by_name_creation_increasing)
+ {
+ TESTING_2("H5Aiterate_by_name by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ link_counter = 2 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
+
+ if (H5Aiterate_by_name(file_id,
+ "/" ATTRIBUTE_TEST_GROUP_NAME "/" ATTRIBUTE_ITERATE_TEST_DSET_SUBGROUP_NAME
+ "/" ATTRIBUTE_ITERATE_TEST_DSET_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_INC, NULL, attr_iter_callback1, &link_counter,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate_by_name by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Aiterate_by_name_creation_increasing);
+ }
+
+ /* Make sure that the attribute iteration callback was actually called */
+ if (link_counter == 2 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate_by_name sentinel value is unchanged; supplied callback function "
+ "must not have been called!\n");
+ PART_ERROR(H5Aiterate_by_name_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate_by_name_creation_increasing);
+
+ PART_BEGIN(H5Aiterate_by_name_creation_decreasing)
+ {
+ TESTING_2("H5Aiterate_by_name by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ link_counter = 3 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
+
+ if (H5Aiterate_by_name(file_id,
+ "/" ATTRIBUTE_TEST_GROUP_NAME "/" ATTRIBUTE_ITERATE_TEST_DSET_SUBGROUP_NAME
+ "/" ATTRIBUTE_ITERATE_TEST_DSET_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_DEC, NULL, attr_iter_callback1, &link_counter,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate_by_name by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Aiterate_by_name_creation_decreasing);
+ }
+
+ /* Make sure that the attribute iteration callback was actually called */
+ if (link_counter == 3 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate_by_name sentinel value is unchanged; supplied callback function "
+ "must not have been called!\n");
+ PART_ERROR(H5Aiterate_by_name_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate_by_name_creation_decreasing);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Pclose(dcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(attr_space_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(dset_space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(dcpl_id);
+ H5Sclose(attr_space_id);
+ H5Sclose(dset_space_id);
+ H5Tclose(attr_dtype);
+ H5Tclose(dset_dtype);
+ H5Aclose(attr_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check the functionality of attribute
+ * iteration using H5Aiterate(_by_name) on a committed
+ * datatype. Iteration is done in increasing and
+ * decreasing order of both attribute name and attribute
+ * creation order.
+ */
+static int
+test_attribute_iterate_datatype(void)
+{
+ size_t link_counter;
+ size_t i;
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t type_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t attr_dtype = H5I_INVALID_HID;
+ hid_t attr_space_id = H5I_INVALID_HID;
+ hid_t tcpl_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("attribute iteration on a committed datatype");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, stored datatype, attribute, iterate, or creation "
+ "order aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_ITERATE_TEST_DTYPE_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n", ATTRIBUTE_ITERATE_TEST_DTYPE_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((tcpl_id = H5Pcreate(H5P_DATATYPE_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create TCPL for attribute creation order tracking\n");
+ goto error;
+ }
+
+ if (H5Pset_attr_creation_order(tcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set attribute creation order tracking\n");
+ goto error;
+ }
+
+ if ((type_id = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+ if ((attr_dtype = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_space_id =
+ generate_random_dataspace(ATTRIBUTE_ITERATE_TEST_ATTR_SPACE_RANK, NULL, NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if (H5Tcommit2(group_id, ATTRIBUTE_ITERATE_TEST_DTYPE_NAME, type_id, H5P_DEFAULT, tcpl_id, H5P_DEFAULT) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't commit datatype '%s'\n", ATTRIBUTE_ITERATE_TEST_DTYPE_NAME);
+ goto error;
+ }
+
+ /* Create some attributes with a reverse-ordering naming scheme to test creation order */
+ for (i = 0; i < ATTRIBUTE_ITERATE_TEST_NUM_ATTRS; i++) {
+ char attr_name[ATTRIBUTE_ITERATE_TEST_ATTR_NAME_BUF_SIZE];
+
+ HDsnprintf(attr_name, ATTRIBUTE_ITERATE_TEST_ATTR_NAME_BUF_SIZE,
+ ATTRIBUTE_ITERATE_TEST_ATTR_NAME "%d", (int)(ATTRIBUTE_ITERATE_TEST_NUM_ATTRS - i - 1));
+
+ if ((attr_id = H5Acreate2(type_id, attr_name, attr_dtype, attr_space_id, H5P_DEFAULT, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s'\n", attr_name);
+ goto error;
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(type_id, attr_name)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n", attr_name);
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' did not exist\n", attr_name);
+ goto error;
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close attribute '%s'\n", attr_name);
+ goto error;
+ }
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ /*
+ * NOTE: A counter is passed to the iteration callback to try to match up the
+ * expected attributes with a given step throughout all of the following
+ * iterations. Since the only information we can count on in the attribute
+ * iteration callback is the attribute's name, we need some other way of
+ * ensuring that the attributes are coming back in the correct order.
+ */
+
+ PART_BEGIN(H5Aiterate2_name_increasing)
+ {
+ TESTING_2("H5Aiterate by attribute name in increasing order");
+
+ link_counter = 0;
+
+ /* Test basic attribute iteration capability using both index types and both index orders */
+ if (H5Aiterate2(type_id, H5_INDEX_NAME, H5_ITER_INC, NULL, attr_iter_callback1, &link_counter) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Aiterate2_name_increasing);
+ }
+
+ /* Make sure that the attribute iteration callback was actually called */
+ if (link_counter == 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate sentinel value is unchanged; supplied callback function must not "
+ "have been called!\n");
+ PART_ERROR(H5Aiterate2_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate2_name_increasing);
+
+ PART_BEGIN(H5Aiterate2_name_decreasing)
+ {
+ TESTING_2("H5Aiterate by attribute name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ link_counter = ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
+
+ if (H5Aiterate2(type_id, H5_INDEX_NAME, H5_ITER_DEC, NULL, attr_iter_callback1, &link_counter) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Aiterate2_name_decreasing);
+ }
+
+ /* Make sure that the attribute iteration callback was actually called */
+ if (link_counter == ATTRIBUTE_ITERATE_TEST_NUM_ATTRS) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate sentinel value is unchanged; supplied callback function must not "
+ "have been called!\n");
+ PART_ERROR(H5Aiterate2_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Aiterate2_name_decreasing);
+#endif
+ }
+ PART_END(H5Aiterate2_name_decreasing);
+
+ PART_BEGIN(H5Aiterate2_creation_increasing)
+ {
+ TESTING_2("H5Aiterate by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ link_counter = 2 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
+
+ if (H5Aiterate2(type_id, H5_INDEX_CRT_ORDER, H5_ITER_INC, NULL, attr_iter_callback1,
+ &link_counter) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Aiterate2_creation_increasing);
+ }
+
+ /* Make sure that the attribute iteration callback was actually called */
+ if (link_counter == 2 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate sentinel value is unchanged; supplied callback function must not "
+ "have been called!\n");
+ PART_ERROR(H5Aiterate2_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate2_creation_increasing);
+
+ PART_BEGIN(H5Aiterate2_creation_decreasing)
+ {
+ TESTING_2("H5Aiterate by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ link_counter = 3 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
+
+ if (H5Aiterate2(type_id, H5_INDEX_CRT_ORDER, H5_ITER_DEC, NULL, attr_iter_callback1,
+ &link_counter) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Aiterate2_creation_decreasing);
+ }
+
+ /* Make sure that the attribute iteration callback was actually called */
+ if (link_counter == 3 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate sentinel value is unchanged; supplied callback function must not "
+ "have been called!\n");
+ PART_ERROR(H5Aiterate2_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate2_creation_decreasing);
+
+ PART_BEGIN(H5Aiterate_by_name_name_increasing)
+ {
+ TESTING_2("H5Aiterate_by_name by attribute name in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ link_counter = 0;
+
+ if (H5Aiterate_by_name(
+ file_id,
+ "/" ATTRIBUTE_TEST_GROUP_NAME "/" ATTRIBUTE_ITERATE_TEST_DTYPE_SUBGROUP_NAME
+ "/" ATTRIBUTE_ITERATE_TEST_DTYPE_NAME,
+ H5_INDEX_NAME, H5_ITER_INC, NULL, attr_iter_callback1, &link_counter, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate_by_name by index type name in increasing order failed\n");
+ PART_ERROR(H5Aiterate_by_name_name_increasing);
+ }
+
+ /* Make sure that the attribute iteration callback was actually called */
+ if (link_counter == 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate_by_name sentinel value is unchanged; supplied callback function "
+ "must not have been called!\n");
+ PART_ERROR(H5Aiterate_by_name_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate_by_name_name_increasing);
+
+ PART_BEGIN(H5Aiterate_by_name_name_decreasing)
+ {
+ TESTING_2("H5Aiterate_by_name by attribute name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ link_counter = ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
+
+ if (H5Aiterate_by_name(
+ file_id,
+ "/" ATTRIBUTE_TEST_GROUP_NAME "/" ATTRIBUTE_ITERATE_TEST_DTYPE_SUBGROUP_NAME
+ "/" ATTRIBUTE_ITERATE_TEST_DTYPE_NAME,
+ H5_INDEX_NAME, H5_ITER_DEC, NULL, attr_iter_callback1, &link_counter, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate_by_name by index type name in decreasing order failed\n");
+ PART_ERROR(H5Aiterate_by_name_name_decreasing);
+ }
+
+ /* Make sure that the attribute iteration callback was actually called */
+ if (link_counter == ATTRIBUTE_ITERATE_TEST_NUM_ATTRS) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate_by_name sentinel value is unchanged; supplied callback function "
+ "must not have been called!\n");
+ PART_ERROR(H5Aiterate_by_name_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Aiterate_by_name_name_decreasing);
+#endif
+ }
+ PART_END(H5Aiterate_by_name_name_decreasing);
+
+ PART_BEGIN(H5Aiterate_by_name_creation_increasing)
+ {
+ TESTING_2("H5Aiterate_by_name by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ link_counter = 2 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
+
+ if (H5Aiterate_by_name(file_id,
+ "/" ATTRIBUTE_TEST_GROUP_NAME
+ "/" ATTRIBUTE_ITERATE_TEST_DTYPE_SUBGROUP_NAME
+ "/" ATTRIBUTE_ITERATE_TEST_DTYPE_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_INC, NULL, attr_iter_callback1, &link_counter,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate_by_name by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Aiterate_by_name_creation_increasing);
+ }
+
+ /* Make sure that the attribute iteration callback was actually called */
+ if (link_counter == 2 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate_by_name sentinel value is unchanged; supplied callback function "
+ "must not have been called!\n");
+ PART_ERROR(H5Aiterate_by_name_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate_by_name_creation_increasing);
+
+ PART_BEGIN(H5Aiterate_by_name_creation_decreasing)
+ {
+ TESTING_2("H5Aiterate_by_name by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ link_counter = 3 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS;
+
+ if (H5Aiterate_by_name(file_id,
+ "/" ATTRIBUTE_TEST_GROUP_NAME
+ "/" ATTRIBUTE_ITERATE_TEST_DTYPE_SUBGROUP_NAME
+ "/" ATTRIBUTE_ITERATE_TEST_DTYPE_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_DEC, NULL, attr_iter_callback1, &link_counter,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate_by_name by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Aiterate_by_name_creation_decreasing);
+ }
+
+ /* Make sure that the attribute iteration callback was actually called */
+ if (link_counter == 3 * ATTRIBUTE_ITERATE_TEST_NUM_ATTRS) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate_by_name sentinel value is unchanged; supplied callback function "
+ "must not have been called!\n");
+ PART_ERROR(H5Aiterate_by_name_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate_by_name_creation_decreasing);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Pclose(tcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(attr_space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype) < 0)
+ TEST_ERROR;
+ if (H5Tclose(type_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(tcpl_id);
+ H5Sclose(attr_space_id);
+ H5Tclose(attr_dtype);
+ H5Tclose(type_id);
+ H5Aclose(attr_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check the functionality of attribute
+ * iteration index saving using H5Aiterate(_by_name).
+ * Iteration is done in increasing and decreasing
+ * order of both attribute name and attribute
+ * creation order.
+ */
+static int
+test_attribute_iterate_index_saving(void)
+{
+ TESTING("attribute iteration index saving capability");
+
+ SKIPPED();
+
+ return 1;
+}
+
+/*
+ * A test to check that an object's attributes can't
+ * be iterated over when H5Aiterate(_by_name) is
+ * passed invalid parameters.
+ */
+static int
+test_attribute_iterate_invalid_params(void)
+{
+ herr_t err_ret = -1;
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID, attr_id2 = H5I_INVALID_HID, attr_id3 = H5I_INVALID_HID,
+ attr_id4 = H5I_INVALID_HID;
+ hid_t attr_dtype = H5I_INVALID_HID;
+ hid_t attr_space_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("attribute iteration with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, attribute, or iterate aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file\n");
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_ITERATE_INVALID_PARAMS_TEST_SUBGROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup\n");
+ goto error;
+ }
+
+ if ((attr_dtype = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_space_id = generate_random_dataspace(ATTRIBUTE_ITERATE_INVALID_PARAMS_TEST_ATTR_SPACE_RANK,
+ NULL, NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_ITERATE_INVALID_PARAMS_TEST_ATTR_NAME, attr_dtype,
+ attr_space_id, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ if ((attr_id2 = H5Acreate2(group_id, ATTRIBUTE_ITERATE_INVALID_PARAMS_TEST_ATTR_NAME2, attr_dtype,
+ attr_space_id, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ if ((attr_id3 = H5Acreate2(group_id, ATTRIBUTE_ITERATE_INVALID_PARAMS_TEST_ATTR_NAME3, attr_dtype,
+ attr_space_id, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ if ((attr_id4 = H5Acreate2(group_id, ATTRIBUTE_ITERATE_INVALID_PARAMS_TEST_ATTR_NAME4, attr_dtype,
+ attr_space_id, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ /* Verify the attributes have been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_ITERATE_INVALID_PARAMS_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ goto error;
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_ITERATE_INVALID_PARAMS_TEST_ATTR_NAME2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ goto error;
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_ITERATE_INVALID_PARAMS_TEST_ATTR_NAME3)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ goto error;
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_ITERATE_INVALID_PARAMS_TEST_ATTR_NAME4)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Aiterate_invalid_loc_id)
+ {
+ TESTING_2("H5Aiterate with an invalid loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Aiterate2(H5I_INVALID_HID, H5_INDEX_NAME, H5_ITER_INC, NULL, attr_iter_callback2, NULL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" iterated over attributes using H5Aiterate with an invalid loc_id!\n");
+ PART_ERROR(H5Aiterate_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate_invalid_loc_id);
+
+ PART_BEGIN(H5Aiterate_invalid_index_type)
+ {
+ TESTING_2("H5Aiterate with an invalid index type");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Aiterate2(group_id, H5_INDEX_UNKNOWN, H5_ITER_INC, NULL, attr_iter_callback2, NULL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" iterated over attributes using H5Aiterate with invalid index type "
+ "H5_INDEX_UNKNOWN!\n");
+ PART_ERROR(H5Aiterate_invalid_index_type);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aiterate2(group_id, H5_INDEX_N, H5_ITER_INC, NULL, attr_iter_callback2, NULL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(
+ " iterated over attributes using H5Aiterate with invalid index type H5_INDEX_N!\n");
+ PART_ERROR(H5Aiterate_invalid_index_type);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate_invalid_index_type);
+
+ PART_BEGIN(H5Aiterate_invalid_index_order)
+ {
+ TESTING_2("H5Aiterate with an invalid index ordering");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Aiterate2(group_id, H5_INDEX_NAME, H5_ITER_UNKNOWN, NULL, attr_iter_callback2, NULL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" iterated over attributes using H5Aiterate with invalid index ordering "
+ "H5_ITER_UNKNOWN!\n");
+ PART_ERROR(H5Aiterate_invalid_index_order);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aiterate2(group_id, H5_INDEX_NAME, H5_ITER_N, NULL, attr_iter_callback2, NULL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(
+ " iterated over attributes using H5Aiterate with invalid index ordering H5_ITER_N!\n");
+ PART_ERROR(H5Aiterate_invalid_index_order);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate_invalid_index_order);
+
+ PART_BEGIN(H5Aiterate_by_name_invalid_loc_id)
+ {
+ TESTING_2("H5Aiterate_by_name with an invalid loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aiterate_by_name(H5I_INVALID_HID, ".", H5_INDEX_NAME, H5_ITER_INC, NULL,
+ attr_iter_callback2, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" iterated over attributes using H5Aiterate_by_name with an invalid loc_id!\n");
+ PART_ERROR(H5Aiterate_by_name_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate_by_name_invalid_loc_id);
+
+ PART_BEGIN(H5Aiterate_by_name_invalid_obj_name)
+ {
+ TESTING_2("H5Aiterate_by_name with an invalid object name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aiterate_by_name(group_id, NULL, H5_INDEX_NAME, H5_ITER_INC, NULL,
+ attr_iter_callback2, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" iterated over attributes using H5Aiterate_by_name with a NULL object name!\n");
+ PART_ERROR(H5Aiterate_by_name_invalid_obj_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aiterate_by_name(group_id, "", H5_INDEX_NAME, H5_ITER_INC, NULL,
+ attr_iter_callback2, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" iterated over attributes using H5Aiterate_by_name with an invalid object name "
+ "of ''!\n");
+ PART_ERROR(H5Aiterate_by_name_invalid_obj_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate_by_name_invalid_obj_name);
+
+ PART_BEGIN(H5Aiterate_by_name_invalid_index_type)
+ {
+ TESTING_2("H5Aiterate_by_name with an invalid index type");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aiterate_by_name(group_id, ".", H5_INDEX_UNKNOWN, H5_ITER_INC, NULL,
+ attr_iter_callback2, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" iterated over attributes using H5Aiterate_by_name with invalid index type "
+ "H5_INDEX_UNKNOWN!\n");
+ PART_ERROR(H5Aiterate_by_name_invalid_index_type);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aiterate_by_name(group_id, ".", H5_INDEX_N, H5_ITER_INC, NULL,
+ attr_iter_callback2, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" iterated over attributes using H5Aiterate_by_name with invalid index type "
+ "H5_INDEX_N!\n");
+ PART_ERROR(H5Aiterate_by_name_invalid_index_type);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate_by_name_invalid_index_type);
+
+ PART_BEGIN(H5Aiterate_by_name_invalid_index_order)
+ {
+ TESTING_2("H5Aiterate_by_name with an invalid index ordering");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aiterate_by_name(group_id, ".", H5_INDEX_NAME, H5_ITER_UNKNOWN, NULL,
+ attr_iter_callback2, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" iterated over attributes using H5Aiterate_by_name with invalid index ordering "
+ "H5_ITER_UNKNOWN!\n");
+ PART_ERROR(H5Aiterate_by_name_invalid_index_order);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aiterate_by_name(group_id, ".", H5_INDEX_NAME, H5_ITER_N, NULL,
+ attr_iter_callback2, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" iterated over attributes using H5Aiterate_by_name with invalid index ordering "
+ "H5_ITER_N!\n");
+ PART_ERROR(H5Aiterate_by_name_invalid_index_order);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate_by_name_invalid_index_order);
+
+ PART_BEGIN(H5Aiterate_by_name_invalid_lapl)
+ {
+ TESTING_2("H5Aiterate_by_name with an invalid LAPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aiterate_by_name(group_id, ".", H5_INDEX_NAME, H5_ITER_INC, NULL,
+ attr_iter_callback2, NULL, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" iterated over attributes using H5Aiterate_by_name with an invalid LAPL!\n");
+ PART_ERROR(H5Aiterate_by_name_invalid_lapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate_by_name_invalid_lapl);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(attr_space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id2) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id3) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id4) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(attr_space_id);
+ H5Tclose(attr_dtype);
+ H5Aclose(attr_id);
+ H5Aclose(attr_id2);
+ H5Aclose(attr_id3);
+ H5Aclose(attr_id4);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that attribute iteration performed
+ * on an object with no attributes attached to it is
+ * not problematic.
+ */
+static int
+test_attribute_iterate_0_attributes(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t dset_space_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("attribute iteration on object with 0 attributes");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, attribute, or iterate aren't supported "
+ "with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file\n");
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_ITERATE_TEST_0_ATTRIBUTES_SUBGROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup\n");
+ goto error;
+ }
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_space_id = generate_random_dataspace(ATTRIBUTE_ITERATE_TEST_0_ATTRIBUTES_DSET_SPACE_RANK, NULL,
+ NULL, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, ATTRIBUTE_ITERATE_TEST_0_ATTRIBUTES_DSET_NAME, dset_dtype,
+ dset_space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Aiterate_0_attributes_native)
+ {
+ TESTING_2("H5Aiterate (native order)");
+
+ if (H5Aiterate2(dset_id, H5_INDEX_NAME, H5_ITER_NATIVE, NULL, attr_iter_callback2, NULL) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate2 on object with 0 attributes failed\n");
+ PART_ERROR(H5Aiterate_0_attributes_native);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate_0_attributes_native);
+
+ PART_BEGIN(H5Aiterate_0_attributes_inc)
+ {
+ TESTING_2("H5Aiterate (increasing order)");
+
+ if (H5Aiterate2(dset_id, H5_INDEX_NAME, H5_ITER_INC, NULL, attr_iter_callback2, NULL) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate2 on object with 0 attributes failed\n");
+ PART_ERROR(H5Aiterate_0_attributes_inc);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate_0_attributes_inc);
+
+ PART_BEGIN(H5Aiterate_0_attributes_dec)
+ {
+ TESTING_2("H5Aiterate (decreasing order)");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ if (H5Aiterate2(dset_id, H5_INDEX_NAME, H5_ITER_DEC, NULL, attr_iter_callback2, NULL) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate2 on object with 0 attributes failed\n");
+ PART_ERROR(H5Aiterate_0_attributes_dec);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Aiterate_0_attributes_dec);
+#endif
+ }
+ PART_END(H5Aiterate_0_attributes_dec);
+
+ PART_BEGIN(H5Aiterate_by_name_0_attributes_native)
+ {
+ TESTING_2("H5Aiterate_by_name (native order)");
+
+ if (H5Aiterate_by_name(group_id, ATTRIBUTE_ITERATE_TEST_0_ATTRIBUTES_DSET_NAME, H5_INDEX_NAME,
+ H5_ITER_NATIVE, NULL, attr_iter_callback2, NULL, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate_by_name on object with 0 attributes failed\n");
+ PART_ERROR(H5Aiterate_by_name_0_attributes_native);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate_by_name_0_attributes_native);
+
+ PART_BEGIN(H5Aiterate_by_name_0_attributes_inc)
+ {
+ TESTING_2("H5Aiterate_by_name (increasing order)");
+
+ if (H5Aiterate_by_name(group_id, ATTRIBUTE_ITERATE_TEST_0_ATTRIBUTES_DSET_NAME, H5_INDEX_NAME,
+ H5_ITER_INC, NULL, attr_iter_callback2, NULL, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate_by_name on object with 0 attributes failed\n");
+ PART_ERROR(H5Aiterate_by_name_0_attributes_inc);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aiterate_by_name_0_attributes_inc);
+
+ PART_BEGIN(H5Aiterate_by_name_0_attributes_dec)
+ {
+ TESTING_2("H5Aiterate_by_name (decreasing order)");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ if (H5Aiterate_by_name(group_id, ATTRIBUTE_ITERATE_TEST_0_ATTRIBUTES_DSET_NAME, H5_INDEX_NAME,
+ H5_ITER_DEC, NULL, attr_iter_callback2, NULL, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Aiterate_by_name on object with 0 attributes failed\n");
+ PART_ERROR(H5Aiterate_by_name_0_attributes_dec);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Aiterate_by_name_0_attributes_dec);
+#endif
+ }
+ PART_END(H5Aiterate_by_name_0_attributes_dec);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(dset_space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(dset_space_id);
+ H5Tclose(dset_dtype);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that an attribute can be deleted
+ * using H5Adelete(_by_idx).
+ */
+static int
+test_delete_attribute(void)
+{
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t attr_dtype = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("attribute deletion");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, attribute, or creation order aren't supported "
+ "with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", ATTRIBUTE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create GCPL for attribute creation order tracking\n");
+ goto error;
+ }
+
+ if (H5Pset_attr_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set attribute creation order tracking\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_DELETION_TEST_GROUP_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n", ATTRIBUTE_DELETION_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((space_id = generate_random_dataspace(ATTRIBUTE_DELETION_TEST_SPACE_RANK, NULL, NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_dtype = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Adelete)
+ {
+ TESTING_2("H5Adelete");
+
+ /* Test H5Adelete */
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME, attr_dtype, space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete);
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' didn't exist before deletion\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete);
+ }
+
+ /* Delete the attribute */
+ if (H5Adelete(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to delete attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete);
+ }
+
+ /* Verify the attribute has been deleted */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete);
+ }
+
+ if (attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' exists after deletion!\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Adelete);
+
+ H5E_BEGIN_TRY
+ {
+ H5Aclose(attr_id);
+ attr_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Adelete_by_name)
+ {
+ TESTING_2("H5Adelete_by_name");
+
+ /* Test H5Adelete_by_name */
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME, attr_dtype, space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_name);
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_name);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' didn't exist before deletion\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_name);
+ }
+
+ /* Delete the attribute */
+ if (H5Adelete_by_name(container_group, ATTRIBUTE_DELETION_TEST_GROUP_NAME,
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to delete attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_name);
+ }
+
+ /* Verify the attribute has been deleted */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_name);
+ }
+
+ if (attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' exists after deletion!\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_name);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Adelete_by_name);
+
+ H5E_BEGIN_TRY
+ {
+ H5Aclose(attr_id);
+ attr_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Adelete_by_idx_crt_order_increasing)
+ {
+ TESTING_2("H5Adelete_by_idx by creation order in increasing order");
+
+ /* Create several attributes */
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME, attr_dtype, space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME2, attr_dtype, space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME3, attr_dtype, space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ /* Verify the attributes have been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' didn't exist before deletion\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' didn't exist before deletion\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME3)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' didn't exist before deletion\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ /* Delete an attribute */
+ if (H5Adelete_by_idx(container_group, ATTRIBUTE_DELETION_TEST_GROUP_NAME, H5_INDEX_CRT_ORDER,
+ H5_ITER_INC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to delete attribute using H5Adelete_by_idx by creation order in "
+ "increasing order\n");
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ /* Ensure that the attribute is gone and others remain */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ if (attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' exists after deletion!\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' doesn't exist after deletion of a different attribute!\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME3)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' doesn't exist after deletion of a different attribute!\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ /* Repeat until all attributes have been deleted */
+ if (H5Adelete_by_idx(container_group, ATTRIBUTE_DELETION_TEST_GROUP_NAME, H5_INDEX_CRT_ORDER,
+ H5_ITER_INC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to delete attribute using H5Adelete_by_idx by creation order in "
+ "increasing order\n");
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ if (attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' exists after deletion!\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ if (attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' exists after deletion!\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME3)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' doesn't exist after deletion of a different attribute!\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ if (H5Adelete_by_idx(container_group, ATTRIBUTE_DELETION_TEST_GROUP_NAME, H5_INDEX_CRT_ORDER,
+ H5_ITER_INC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to delete attribute using H5Adelete_by_idx by creation order in "
+ "increasing order\n");
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ if (attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' exists after deletion!\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ if (attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' exists after deletion!\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME3)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ if (attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' exists after deletion!\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_crt_order_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Adelete_by_idx_crt_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Aclose(attr_id);
+ attr_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Adelete_by_idx_crt_order_decreasing)
+ {
+ TESTING_2("H5Adelete_by_idx by creation order in decreasing order");
+
+ /* Create several attributes */
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME, attr_dtype, space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME2, attr_dtype, space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME3, attr_dtype, space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ /* Verify the attributes have been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' didn't exist before deletion\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' didn't exist before deletion\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME3)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' didn't exist before deletion\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ /* Delete an attribute */
+ if (H5Adelete_by_idx(container_group, ATTRIBUTE_DELETION_TEST_GROUP_NAME, H5_INDEX_CRT_ORDER,
+ H5_ITER_DEC, 2, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to delete attribute using H5Adelete_by_idx by creation order in "
+ "decreasing order\n");
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ /* Ensure that the attribute is gone and others remain */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ if (attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' exists after deletion!\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' doesn't exist after deletion of a different attribute!\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME3)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' doesn't exist after deletion of a different attribute!\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ /* Repeat until all attributes have been deleted */
+ if (H5Adelete_by_idx(container_group, ATTRIBUTE_DELETION_TEST_GROUP_NAME, H5_INDEX_CRT_ORDER,
+ H5_ITER_DEC, 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to delete attribute using H5Adelete_by_idx by creation order in "
+ "decreasing order\n");
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ if (attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' exists after deletion!\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ if (attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' exists after deletion!\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME3)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' doesn't exist after deletion of a different attribute!\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ if (H5Adelete_by_idx(container_group, ATTRIBUTE_DELETION_TEST_GROUP_NAME, H5_INDEX_CRT_ORDER,
+ H5_ITER_DEC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to delete attribute using H5Adelete_by_idx by creation order in "
+ "decreasing order\n");
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ if (attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' exists after deletion!\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ if (attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' exists after deletion!\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME3)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ if (attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' exists after deletion!\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_crt_order_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Adelete_by_idx_crt_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Aclose(attr_id);
+ attr_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Adelete_by_idx_name_order_increasing)
+ {
+ TESTING_2("H5Adelete_by_idx by alphabetical order in increasing order");
+
+ /* Create several attributes */
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME, attr_dtype, space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME2, attr_dtype, space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME3, attr_dtype, space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ /* Verify the attributes have been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' didn't exist before deletion\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' didn't exist before deletion\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME3)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' didn't exist before deletion\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ /* Delete an attribute */
+ if (H5Adelete_by_idx(container_group, ATTRIBUTE_DELETION_TEST_GROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_INC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to delete attribute using H5Adelete_by_idx by alphabetical order in "
+ "increasing order\n");
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ /* Ensure that the attribute is gone and others remain */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ if (attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' exists after deletion!\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' doesn't exist after deletion of a different attribute!\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME3)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' doesn't exist after deletion of a different attribute!\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ /* Repeat until all attributes have been deleted */
+ if (H5Adelete_by_idx(container_group, ATTRIBUTE_DELETION_TEST_GROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_INC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to delete attribute using H5Adelete_by_idx by alphabetical order in "
+ "increasing order\n");
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ if (attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' exists after deletion!\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ if (attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' exists after deletion!\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME3)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' doesn't exist after deletion of a different attribute!\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ if (H5Adelete_by_idx(container_group, ATTRIBUTE_DELETION_TEST_GROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_INC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to delete attribute using H5Adelete_by_idx by alphabetical order in "
+ "increasing order\n");
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ if (attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' exists after deletion!\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ if (attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' exists after deletion!\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME3)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ if (attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' exists after deletion!\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_name_order_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Adelete_by_idx_name_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Aclose(attr_id);
+ attr_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Adelete_by_idx_name_order_decreasing)
+ {
+ TESTING_2("H5Adelete_by_idx by alphabetical order in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Create several attributes */
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME, attr_dtype, space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME2, attr_dtype, space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME3, attr_dtype, space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close attribute '%s'\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ /* Verify the attributes have been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' didn't exist before deletion\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' didn't exist before deletion\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME3)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' didn't exist before deletion\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ /* Delete an attribute */
+ if (H5Adelete_by_idx(container_group, ATTRIBUTE_DELETION_TEST_GROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_DEC, 2, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to delete attribute using H5Adelete_by_idx by alphabetical order in "
+ "decreasing order\n");
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ /* Ensure that the attribute is gone and others remain */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ if (attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' exists after deletion!\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' doesn't exist after deletion of a different attribute!\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME3)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' doesn't exist after deletion of a different attribute!\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ /* Repeat until all attributes have been deleted */
+ if (H5Adelete_by_idx(container_group, ATTRIBUTE_DELETION_TEST_GROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_DEC, 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to delete attribute using H5Adelete_by_idx by alphabetical order in "
+ "decreasing order\n");
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ if (attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' exists after deletion!\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ if (attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' exists after deletion!\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME3)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' doesn't exist after deletion of a different attribute!\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ if (H5Adelete_by_idx(container_group, ATTRIBUTE_DELETION_TEST_GROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_DEC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to delete attribute using H5Adelete_by_idx by alphabetical order in "
+ "decreasing order\n");
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ if (attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' exists after deletion!\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ if (attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' exists after deletion!\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME2);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_TEST_ATTR_NAME3)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ if (attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' exists after deletion!\n", ATTRIBUTE_DELETION_TEST_ATTR_NAME3);
+ PART_ERROR(H5Adelete_by_idx_name_order_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Adelete_by_idx_name_order_decreasing);
+#endif
+ }
+ PART_END(H5Adelete_by_idx_name_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Aclose(attr_id);
+ attr_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype) < 0)
+ TEST_ERROR;
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ H5Tclose(attr_dtype);
+ H5Aclose(attr_id);
+ H5Pclose(gcpl_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that an attribute can't be deleted
+ * when H5Adelete(_by_name/_by_idx) is passed invalid
+ * parameters.
+ */
+static int
+test_delete_attribute_invalid_params(void)
+{
+ herr_t err_ret = -1;
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t attr_dtype = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("attribute deletion with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or attribute aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", ATTRIBUTE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_DELETION_INVALID_PARAMS_TEST_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n",
+ ATTRIBUTE_DELETION_INVALID_PARAMS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((space_id = generate_random_dataspace(ATTRIBUTE_DELETION_INVALID_PARAMS_TEST_SPACE_RANK, NULL, NULL,
+ TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_dtype = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_DELETION_INVALID_PARAMS_TEST_ATTR_NAME, attr_dtype,
+ space_id, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DELETION_INVALID_PARAMS_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute didn't exists\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Adelete_invalid_loc_id)
+ {
+ TESTING_2("H5Adelete with an invalid loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Adelete(H5I_INVALID_HID, ATTRIBUTE_DELETION_INVALID_PARAMS_TEST_ATTR_NAME);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" deleted an attribute using H5Adelete with an invalid loc_id!\n");
+ PART_ERROR(H5Adelete_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Adelete_invalid_loc_id);
+
+ PART_BEGIN(H5Adelete_invalid_attr_name)
+ {
+ TESTING_2("H5Adelete with an invalid attribute name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Adelete(group_id, NULL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" deleted an attribute using H5Adelete with a NULL attribute name!\n");
+ PART_ERROR(H5Adelete_invalid_attr_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Adelete(group_id, "");
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" deleted an attribute using H5Adelete with an invalid attribute name of ''!\n");
+ PART_ERROR(H5Adelete_invalid_attr_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Adelete_invalid_attr_name);
+
+ PART_BEGIN(H5Adelete_by_name_invalid_loc_id)
+ {
+ TESTING_2("H5Adelete_by_name with an invalid loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Adelete_by_name(H5I_INVALID_HID, ".",
+ ATTRIBUTE_DELETION_INVALID_PARAMS_TEST_ATTR_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" deleted an attribute using H5Adelete_by_name with an invalid loc_id!\n");
+ PART_ERROR(H5Adelete_by_name_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Adelete_by_name_invalid_loc_id);
+
+ PART_BEGIN(H5Adelete_by_name_invalid_obj_name)
+ {
+ TESTING_2("H5Adelete_by_name with an invalid object name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Adelete_by_name(group_id, NULL, ATTRIBUTE_DELETION_INVALID_PARAMS_TEST_ATTR_NAME,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" deleted an attribute using H5Adelete_by_name with a NULL object name!\n");
+ PART_ERROR(H5Adelete_by_name_invalid_obj_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Adelete_by_name(group_id, "", ATTRIBUTE_DELETION_INVALID_PARAMS_TEST_ATTR_NAME,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(
+ " deleted an attribute using H5Adelete_by_name with an invalid object name of ''!\n");
+ PART_ERROR(H5Adelete_by_name_invalid_obj_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Adelete_by_name_invalid_obj_name);
+
+ PART_BEGIN(H5Adelete_by_name_invalid_attr_name)
+ {
+ TESTING_2("H5Adelete_by_name with an invalid attribute name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Adelete_by_name(group_id, ".", NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" deleted an attribute using H5Adelete_by_name with a NULL attribute name!\n");
+ PART_ERROR(H5Adelete_by_name_invalid_attr_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Adelete_by_name(group_id, ".", "", H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" deleted an attribute using H5Adelete_by_name with an invalid attribute name of "
+ "''!\n");
+ PART_ERROR(H5Adelete_by_name_invalid_attr_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Adelete_by_name_invalid_attr_name);
+
+ PART_BEGIN(H5Adelete_by_name_invalid_lapl)
+ {
+ TESTING_2("H5Adelete_by_name with an invalid LAPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Adelete_by_name(group_id, ".", ATTRIBUTE_DELETION_INVALID_PARAMS_TEST_ATTR_NAME,
+ H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" deleted an attribute using H5Adelete_by_name with an invalid LAPL!\n");
+ PART_ERROR(H5Adelete_by_name_invalid_lapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Adelete_by_name_invalid_lapl);
+
+ PART_BEGIN(H5Adelete_by_idx_invalid_loc_id)
+ {
+ TESTING_2("H5Adelete_by_idx with an invalid loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Adelete_by_idx(H5I_INVALID_HID, ".", H5_INDEX_NAME, H5_ITER_INC, 0, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" deleted an attribute using H5Adelete_by_idx with an invalid loc_id!\n");
+ PART_ERROR(H5Adelete_by_idx_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Adelete_by_idx_invalid_loc_id);
+
+ PART_BEGIN(H5Adelete_by_idx_invalid_obj_name)
+ {
+ TESTING_2("H5Adelete_by_idx with an invalid object name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Adelete_by_idx(group_id, NULL, H5_INDEX_NAME, H5_ITER_INC, 0, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" deleted an attribute using H5Adelete_by_idx with a NULL object name!\n");
+ PART_ERROR(H5Adelete_by_idx_invalid_obj_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Adelete_by_idx(group_id, "", H5_INDEX_NAME, H5_ITER_INC, 0, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(
+ " deleted an attribute using H5Adelete_by_idx with an invalid object name of ''!\n");
+ PART_ERROR(H5Adelete_by_idx_invalid_obj_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Adelete_by_idx_invalid_obj_name);
+
+ PART_BEGIN(H5Adelete_by_idx_invalid_index_type)
+ {
+ TESTING_2("H5Adelete_by_idx with an invalid index type");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Adelete_by_idx(group_id, ".", H5_INDEX_UNKNOWN, H5_ITER_INC, 0, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" deleted an attribute using H5Adelete_by_idx with invalid index type "
+ "H5_INDEX_UNKNOWN!\n");
+ PART_ERROR(H5Adelete_by_idx_invalid_index_type);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Adelete_by_idx(group_id, ".", H5_INDEX_N, H5_ITER_INC, 0, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(
+ " deleted an attribute using H5Adelete_by_idx with invalid index type H5_INDEX_N!\n");
+ PART_ERROR(H5Adelete_by_idx_invalid_index_type);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Adelete_by_idx_invalid_index_type);
+
+ PART_BEGIN(H5Adelete_by_idx_invalid_index_order)
+ {
+ TESTING_2("H5Adelete_by_idx with an invalid index ordering");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Adelete_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_UNKNOWN, 0, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" deleted an attribute using H5Adelete_by_idx with invalid index ordering "
+ "H5_ITER_UNKNOWN!\n");
+ PART_ERROR(H5Adelete_by_idx_invalid_index_order);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Adelete_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_N, 0, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" deleted an attribute using H5Adelete_by_idx with invalid index ordering "
+ "H5_ITER_N!\n");
+ PART_ERROR(H5Adelete_by_idx_invalid_index_order);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Adelete_by_idx_invalid_index_order);
+
+ PART_BEGIN(H5Adelete_by_idx_invalid_lapl)
+ {
+ TESTING_2("H5Adelete_by_idx with an invalid LAPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Adelete_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" deleted an attribute using H5Adelete_by_idx with an invalid LAPL!\n");
+ PART_ERROR(H5Adelete_by_idx_invalid_lapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Adelete_by_idx_invalid_lapl);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ H5Tclose(attr_dtype);
+ H5Aclose(attr_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test for H5Aexists and H5Aexists_by_name.
+ */
+static int
+test_attribute_exists(void)
+{
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t attr_dtype = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("attribute existence");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or attribute aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_EXISTS_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n", ATTRIBUTE_EXISTS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((space_id = generate_random_dataspace(ATTRIBUTE_EXISTS_TEST_SPACE_RANK, NULL, NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_dtype = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_EXISTS_TEST_ATTR_NAME, attr_dtype, space_id, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Aexists)
+ {
+ TESTING_2("H5Aexists");
+
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_EXISTS_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ PART_ERROR(H5Aexists);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' did not exist\n", ATTRIBUTE_EXISTS_TEST_ATTR_NAME);
+ PART_ERROR(H5Aexists);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aexists);
+
+ PART_BEGIN(H5Aexists_by_name)
+ {
+ TESTING_2("H5Aexists_by_name");
+
+ if ((attr_exists = H5Aexists_by_name(container_group, ATTRIBUTE_EXISTS_TEST_GROUP_NAME,
+ ATTRIBUTE_EXISTS_TEST_ATTR_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists by name\n");
+ PART_ERROR(H5Aexists_by_name);
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute '%s' did not exist by name\n", ATTRIBUTE_EXISTS_TEST_ATTR_NAME);
+ PART_ERROR(H5Aexists_by_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aexists_by_name);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ H5Tclose(attr_dtype);
+ H5Aclose(attr_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to ensure that H5Aexists(_by_name) will fail when
+ * given invalid parameters.
+ */
+static int
+test_attribute_exists_invalid_params(void)
+{
+ herr_t err_ret = -1;
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t attr_dtype = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("attribute existence with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or attribute aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", ATTRIBUTE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_EXISTS_INVALID_PARAMS_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n",
+ ATTRIBUTE_EXISTS_INVALID_PARAMS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((space_id = generate_random_dataspace(ATTRIBUTE_EXISTS_INVALID_PARAMS_TEST_SPACE_RANK, NULL, NULL,
+ TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_dtype = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_EXISTS_INVALID_PARAMS_TEST_ATTR_NAME, attr_dtype, space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_EXISTS_INVALID_PARAMS_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute didn't exists\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Aexists_invalid_loc_id)
+ {
+ TESTING_2("H5Aexists with an invalid loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aexists(H5I_INVALID_HID, ATTRIBUTE_EXISTS_INVALID_PARAMS_TEST_ATTR_NAME);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Aexists with an invalid loc_id succeeded!\n");
+ PART_ERROR(H5Aexists_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aexists_invalid_loc_id);
+
+ PART_BEGIN(H5Aexists_invalid_attr_name)
+ {
+ TESTING_2("H5Aexists with invalid attribute name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aexists(group_id, NULL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Aexists with a NULL attribute name succeeded!\n");
+ PART_ERROR(H5Aexists_invalid_attr_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aexists(group_id, "");
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Aexists with an invalid attribute name of '' succeeded!\n");
+ PART_ERROR(H5Aexists_invalid_attr_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aexists_invalid_attr_name);
+
+ PART_BEGIN(H5Aexists_by_name_invalid_loc_id)
+ {
+ TESTING_2("H5Aexists_by_name with an invalid loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aexists_by_name(H5I_INVALID_HID, ATTRIBUTE_EXISTS_INVALID_PARAMS_TEST_GROUP_NAME,
+ ATTRIBUTE_EXISTS_INVALID_PARAMS_TEST_ATTR_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Aexists_by_name with an invalid loc_id succeeded!\n");
+ PART_ERROR(H5Aexists_by_name_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aexists_by_name_invalid_loc_id);
+
+ PART_BEGIN(H5Aexists_by_name_invalid_obj_name)
+ {
+ TESTING_2("H5Aexists_by_name with invalid object name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aexists_by_name(file_id, NULL, ATTRIBUTE_EXISTS_INVALID_PARAMS_TEST_ATTR_NAME,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Aexists_by_name with a NULL object name succeeded!\n");
+ PART_ERROR(H5Aexists_by_name_invalid_obj_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aexists_by_name(file_id, "", ATTRIBUTE_EXISTS_INVALID_PARAMS_TEST_ATTR_NAME,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Aexists_by_name with an invalid object name of '' succeeded!\n");
+ PART_ERROR(H5Aexists_by_name_invalid_obj_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aexists_by_name_invalid_obj_name);
+
+ PART_BEGIN(H5Aexists_by_name_invalid_attr_name)
+ {
+ TESTING_2("H5Aexists_by_name with invalid attribute name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aexists_by_name(file_id, ATTRIBUTE_EXISTS_INVALID_PARAMS_TEST_GROUP_NAME, NULL,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Aexists_by_name with a NULL attribute name succeeded!\n");
+ PART_ERROR(H5Aexists_by_name_invalid_attr_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aexists_by_name(file_id, ATTRIBUTE_EXISTS_INVALID_PARAMS_TEST_GROUP_NAME, "",
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Aexists_by_name with an invalid attribute name of '' succeeded!\n");
+ PART_ERROR(H5Aexists_by_name_invalid_attr_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aexists_by_name_invalid_attr_name);
+
+ PART_BEGIN(H5Aexists_by_name_invalid_lapl)
+ {
+ TESTING_2("H5Aexists_by_name with an invalid link access property list");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aexists_by_name(file_id, ATTRIBUTE_EXISTS_INVALID_PARAMS_TEST_GROUP_NAME,
+ ATTRIBUTE_EXISTS_INVALID_PARAMS_TEST_ATTR_NAME, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Aexists_by_name with an invalid link access property list succeeded!\n");
+ PART_ERROR(H5Aexists_by_name_invalid_lapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Aexists_by_name_invalid_lapl);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ H5Tclose(attr_dtype);
+ H5Aclose(attr_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to make sure many attributes can be written
+ * to the file
+ */
+static int
+test_attribute_many(void)
+{
+ unsigned u;
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t attr_dtype = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ char attrname[ATTRIBUTE_MANY_NAME_BUF_SIZE]; /* Name of attribute */
+
+ TESTING("creating many attributes");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or attribute aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file\n");
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_MANY_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create the group '%s'\n", ATTRIBUTE_MANY_GROUP_NAME);
+ goto error;
+ }
+
+ if ((space_id = generate_random_dataspace(ATTRIBUTE_MANY_SPACE_RANK, NULL, NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_dtype = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ /* Create many attributes */
+ for (u = 0; u < ATTRIBUTE_MANY_NUMB; u++) {
+ sprintf(attrname, "many-%06u", u);
+
+ if ((attr_id = H5Acreate2(group_id, attrname, attr_dtype, space_id, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(group_id, attrname)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ goto error;
+ }
+
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+ }
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ H5Tclose(attr_dtype);
+ H5Aclose(attr_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to make sure an attribute can be opened for
+ * a second time
+ */
+static int
+test_attribute_duplicate_id(void)
+{
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID, attr_id2 = H5I_INVALID_HID;
+ hid_t attr_dtype = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+
+ TESTING("duplicated IDs for an attribute");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or attribute aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file\n");
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_DUPLICATE_ID_GRP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create the group '%s'\n", ATTRIBUTE_DUPLICATE_ID_GRP_NAME);
+ goto error;
+ }
+
+ if ((space_id = generate_random_dataspace(ATTRIBUTE_DUPLICATE_ID_SPACE_RANK, NULL, NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_dtype = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_DUPLICATE_ID_ATTR_NAME, attr_dtype, space_id, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_DUPLICATE_ID_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ goto error;
+ }
+
+ /* Open the attribute just created and get a second ID */
+ if ((attr_id2 = H5Aopen(group_id, ATTRIBUTE_DUPLICATE_ID_ATTR_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" attribute can't be opened for a second time\n");
+ goto error;
+ }
+
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id2) < 0)
+ TEST_ERROR;
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ H5Tclose(attr_dtype);
+ H5Aclose(attr_id);
+ H5Aclose(attr_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that the number of attributes attached
+ * to an object (group, dataset, datatype) can be retrieved.
+ *
+ * XXX: Cover all of the cases and move to H5O tests.
+ */
+static int
+test_get_number_attributes(void)
+{
+ H5O_info2_t obj_info;
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t attr_dtype = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("retrieval of the number of attributes on an object");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_MORE)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, attribute, or object aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file\n");
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_GET_NUM_ATTRS_TEST_GRP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create the group '%s'\n", ATTRIBUTE_GET_NUM_ATTRS_TEST_GRP_NAME);
+ goto error;
+ }
+
+ if ((space_id = generate_random_dataspace(ATTRIBUTE_GET_NUM_ATTRS_TEST_SPACE_RANK, NULL, NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_dtype = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_GET_NUM_ATTRS_TEST_ATTR_NAME, attr_dtype, space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_GET_NUM_ATTRS_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Oget_info)
+ {
+ TESTING_2("H5Oget_info");
+
+ /* Now get the number of attributes from the group */
+ if (H5Oget_info3(group_id, &obj_info, H5O_INFO_ALL) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve group info using H5Oget_info3\n");
+ PART_ERROR(H5Oget_info);
+ }
+
+ if (obj_info.num_attrs != 1) {
+ H5_FAILED();
+ HDprintf(" invalid number of attributes received\n");
+ PART_ERROR(H5Oget_info);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oget_info);
+
+ PART_BEGIN(H5Oget_info_by_name)
+ {
+ TESTING_2("H5Oget_info_by_name");
+
+ if (H5Oget_info_by_name3(container_group, ATTRIBUTE_GET_NUM_ATTRS_TEST_GRP_NAME, &obj_info,
+ H5O_INFO_ALL, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve group info using H5Oget_info_by_name3\n");
+ PART_ERROR(H5Oget_info_by_name);
+ }
+
+ if (obj_info.num_attrs != 1) {
+ H5_FAILED();
+ HDprintf(" invalid number of attributes received\n");
+ PART_ERROR(H5Oget_info_by_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oget_info_by_name);
+
+ PART_BEGIN(H5Oget_info_by_idx)
+ {
+ TESTING_2("H5Oget_info_by_idx");
+
+ if (H5Oget_info_by_idx3(container_group, ".", H5_INDEX_NAME, H5_ITER_INC, 0, &obj_info,
+ H5O_INFO_ALL, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve group info using H5Oget_info_by_idx3\n");
+ PART_ERROR(H5Oget_info_by_idx);
+ }
+
+ if (obj_info.num_attrs != 1) {
+ H5_FAILED();
+ HDprintf(" invalid number of attributes received\n");
+ PART_ERROR(H5Oget_info_by_idx);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oget_info_by_idx);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ H5Tclose(attr_dtype);
+ H5Aclose(attr_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that the reference count of a named datatype used by
+ * attribute and a dataset is correct.
+ *
+ * XXX: May move to H5O tests.
+ */
+static int
+test_attr_shared_dtype(void)
+{
+#ifndef NO_SHARED_DATATYPES
+ H5O_info2_t obj_info;
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t attr_dtype = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+#endif
+
+ TESTING("shared datatype for attributes");
+
+#ifndef NO_SHARED_DATATYPES
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_MORE)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, attribute, stored datatype, or object aren't "
+ "supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file\n");
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_SHARED_DTYPE_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create the group '%s'\n", ATTRIBUTE_SHARED_DTYPE_GROUP_NAME);
+ goto error;
+ }
+
+ if ((space_id = generate_random_dataspace(ATTRIBUTE_SHARED_DTYPE_SPACE_RANK, NULL, NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_dtype = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ /* Commit datatype to file */
+ if (H5Tcommit2(group_id, ATTRIBUTE_SHARED_DTYPE_NAME, attr_dtype, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't commit datatype\n");
+ goto error;
+ }
+
+ if (H5Oget_info_by_name3(group_id, ATTRIBUTE_SHARED_DTYPE_NAME, &obj_info, H5O_INFO_ALL, H5P_DEFAULT) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve root group info using H5Oget_info_by_name3\n");
+ goto error;
+ }
+
+ if (obj_info.rc != 1) {
+ H5_FAILED();
+ HDprintf(" reference count of the named datatype is wrong: %u\n", obj_info.rc);
+ goto error;
+ }
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_SHARED_DTYPE_ATTR_NAME, attr_dtype, space_id, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute\n");
+ goto error;
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_SHARED_DTYPE_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute exists\n");
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ goto error;
+ }
+
+ if (H5Oget_info_by_name3(group_id, ATTRIBUTE_SHARED_DTYPE_NAME, &obj_info, H5O_INFO_ALL, H5P_DEFAULT) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve root group info using H5Oget_info_by_name3\n");
+ goto error;
+ }
+
+ if (obj_info.rc != 2) {
+ H5_FAILED();
+ HDprintf(" reference count of the named datatype is wrong: %u\n", obj_info.rc);
+ goto error;
+ }
+
+ if ((dset_id = H5Dcreate2(group_id, ATTRIBUTE_SHARED_DTYPE_DSET_NAME, attr_dtype, space_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset\n");
+ goto error;
+ }
+
+ if (H5Oget_info_by_name3(group_id, ATTRIBUTE_SHARED_DTYPE_NAME, &obj_info, H5O_INFO_ALL, H5P_DEFAULT) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve root group info using H5Oget_info_by_name3\n");
+ goto error;
+ }
+
+ if (obj_info.rc != 3) {
+ H5_FAILED();
+ HDprintf(" reference count of the named datatype is wrong: %u\n", obj_info.rc);
+ goto error;
+ }
+
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(attr_dtype) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(attr_dtype);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+#else
+ SKIPPED();
+ return 0;
+#endif
+}
+
+static herr_t
+attr_iter_callback1(hid_t location_id, const char *attr_name, const H5A_info_t *ainfo, void *op_data)
+{
+ size_t *i = (size_t *)op_data;
+ size_t counter_val = *((size_t *)op_data);
+ size_t test_iteration;
+ char expected_attr_name[ATTRIBUTE_ITERATE_TEST_ATTR_NAME_BUF_SIZE];
+ herr_t ret_val = H5_ITER_CONT;
+
+ UNUSED(location_id);
+ UNUSED(ainfo);
+
+ /*
+ * Four tests are run in the following order per attribute iteration API call:
+ *
+ * - iteration by attribute name in increasing order
+ * - iteration by attribute name in decreasing order
+ * - iteration by attribute creation order in increasing order
+ * - iteration by attribute creation order in decreasing order
+ *
+ * Based on how the test is written, this will mean that the attribute names
+ * will run in increasing order on the first and fourth tests and decreasing
+ * order on the second and third tests.
+ */
+ test_iteration = (counter_val / ATTRIBUTE_ITERATE_TEST_NUM_ATTRS);
+ if (test_iteration == 0 || test_iteration == 3) {
+ HDsnprintf(expected_attr_name, ATTRIBUTE_ITERATE_TEST_ATTR_NAME_BUF_SIZE,
+ ATTRIBUTE_ITERATE_TEST_ATTR_NAME "%d",
+ (int)(counter_val % ATTRIBUTE_ITERATE_TEST_NUM_ATTRS));
+ }
+ else {
+ HDsnprintf(
+ expected_attr_name, ATTRIBUTE_ITERATE_TEST_ATTR_NAME_BUF_SIZE,
+ ATTRIBUTE_ITERATE_TEST_ATTR_NAME "%d",
+ (int)(ATTRIBUTE_ITERATE_TEST_NUM_ATTRS - (counter_val % ATTRIBUTE_ITERATE_TEST_NUM_ATTRS) - 1));
+ }
+
+ if (HDstrncmp(attr_name, expected_attr_name, ATTRIBUTE_ITERATE_TEST_ATTR_NAME_BUF_SIZE)) {
+ HDprintf(" attribute name '%s' didn't match expected name '%s'\n", attr_name, expected_attr_name);
+ ret_val = H5_ITER_ERROR;
+ goto done;
+ }
+
+ /*
+ * If the attribute's creation order is marked as valid, make sure
+ * that it corresponds to what is expected based on the order that
+ * the attributes were created in.
+ */
+ if (ainfo->corder_valid) {
+ H5O_msg_crt_idx_t expected_crt_order;
+
+ /*
+ * As the attributes are created with a reverse-ordering naming
+ * scheme to test creation order, their creation order values will
+ * be listed in reverse ordering on the first and fourth tests and
+ * in normal ordering on the second and third tests.
+ */
+ if (test_iteration == 0 || test_iteration == 3)
+ expected_crt_order = (H5O_msg_crt_idx_t)(ATTRIBUTE_ITERATE_TEST_NUM_ATTRS -
+ (counter_val % ATTRIBUTE_ITERATE_TEST_NUM_ATTRS) - 1);
+ else
+ expected_crt_order = (H5O_msg_crt_idx_t)(counter_val % ATTRIBUTE_ITERATE_TEST_NUM_ATTRS);
+
+ if (ainfo->corder != expected_crt_order) {
+ H5_FAILED();
+ HDprintf(" attribute's creation order value of %lld didn't match expected value of %lld\n",
+ (long long)ainfo->corder, (long long)expected_crt_order);
+ ret_val = H5_ITER_ERROR;
+ goto done;
+ }
+ }
+
+done:
+ (*i)++;
+
+ return ret_val;
+}
+
+static herr_t
+attr_iter_callback2(hid_t location_id, const char *attr_name, const H5A_info_t *ainfo, void *op_data)
+{
+ UNUSED(location_id);
+ UNUSED(attr_name);
+ UNUSED(ainfo);
+ UNUSED(op_data);
+
+ return 0;
+}
+
+int
+H5_api_attribute_test(void)
+{
+ size_t i;
+ int nerrors;
+
+ HDprintf("**********************************************\n");
+ HDprintf("* *\n");
+ HDprintf("* API Attribute Tests *\n");
+ HDprintf("* *\n");
+ HDprintf("**********************************************\n\n");
+
+ for (i = 0, nerrors = 0; i < ARRAY_LENGTH(attribute_tests); i++) {
+ nerrors += (*attribute_tests[i])() ? 1 : 0;
+ }
+
+ HDprintf("\n");
+
+ return nerrors;
+}
diff --git a/test/API/H5_api_attribute_test.h b/test/API/H5_api_attribute_test.h
new file mode 100644
index 0000000..7656263
--- /dev/null
+++ b/test/API/H5_api_attribute_test.h
@@ -0,0 +1,203 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifndef H5_API_ATTRIBUTE_TEST_H
+#define H5_API_ATTRIBUTE_TEST_H
+
+#include "H5_api_test.h"
+
+int H5_api_attribute_test(void);
+
+/**************************************************
+ * *
+ * API Attribute test defines *
+ * *
+ **************************************************/
+
+#define ATTRIBUTE_CREATE_ON_ROOT_SPACE_RANK 1
+#define ATTRIBUTE_CREATE_ON_ROOT_ATTR_NAME "attr_on_root"
+#define ATTRIBUTE_CREATE_ON_ROOT_ATTR_NAME2 "attr_on_root2"
+
+#define ATTRIBUTE_CREATE_ON_DATASET_DSET_SPACE_RANK 2
+#define ATTRIBUTE_CREATE_ON_DATASET_ATTR_SPACE_RANK 1
+#define ATTRIBUTE_CREATE_ON_DATASET_GROUP_NAME "attr_on_dataset_test"
+#define ATTRIBUTE_CREATE_ON_DATASET_DSET_NAME "dataset_with_attr"
+#define ATTRIBUTE_CREATE_ON_DATASET_ATTR_NAME "attr_on_dataset"
+#define ATTRIBUTE_CREATE_ON_DATASET_ATTR_NAME2 "attr_on_dataset2"
+
+#define ATTRIBUTE_CREATE_ON_DATATYPE_SPACE_RANK 1
+#define ATTRIBUTE_CREATE_ON_DATATYPE_DTYPE_NAME "datatype_with_attr"
+#define ATTRIBUTE_CREATE_ON_DATATYPE_GROUP_NAME "attr_on_datatype_test"
+#define ATTRIBUTE_CREATE_ON_DATATYPE_ATTR_NAME "attr_on_datatype"
+#define ATTRIBUTE_CREATE_ON_DATATYPE_ATTR_NAME2 "attr_on_datatype2"
+
+#define ATTRIBUTE_CREATE_NULL_DATASPACE_TEST_SUBGROUP_NAME "attr_with_null_space_test"
+#define ATTRIBUTE_CREATE_NULL_DATASPACE_TEST_ATTR_NAME "attr_with_null_space"
+
+#define ATTRIBUTE_CREATE_SCALAR_DATASPACE_TEST_SUBGROUP_NAME "attr_with_scalar_space_test"
+#define ATTRIBUTE_CREATE_SCALAR_DATASPACE_TEST_ATTR_NAME "attr_with_scalar_space"
+
+#define ATTRIBUTE_CREATE_WITH_SPACE_IN_NAME_SPACE_RANK 1
+#define ATTRIBUTE_CREATE_WITH_SPACE_IN_NAME_GROUP_NAME "attr_with_space_in_name_test"
+#define ATTRIBUTE_CREATE_WITH_SPACE_IN_NAME_ATTR_NAME "attr with space in name"
+
+#define ATTRIBUTE_CREATE_INVALID_PARAMS_SPACE_RANK 1
+#define ATTRIBUTE_CREATE_INVALID_PARAMS_GROUP_NAME "attribute_create_invalid_params_test"
+#define ATTRIBUTE_CREATE_INVALID_PARAMS_ATTR_NAME "invalid_params_attr"
+
+#define ATTRIBUTE_OPEN_TEST_SPACE_RANK 1
+#define ATTRIBUTE_OPEN_TEST_GROUP_NAME "attribute_open_test"
+#define ATTRIBUTE_OPEN_TEST_ATTR_NAME "attribute_open_test_attr"
+#define ATTRIBUTE_OPEN_TEST_ATTR_NAME2 ATTRIBUTE_OPEN_TEST_ATTR_NAME "2"
+#define ATTRIBUTE_OPEN_TEST_ATTR_NAME3 ATTRIBUTE_OPEN_TEST_ATTR_NAME "3"
+
+#define ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_GROUP_NAME "attribute_open_invalid_params_test"
+#define ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_SPACE_RANK 1
+#define ATTRIBUTE_OPEN_INVALID_PARAMS_TEST_ATTR_NAME "attribute_open_invalid_params_attr"
+
+#define ATTRIBUTE_WRITE_TEST_ATTR_DTYPE_SIZE sizeof(int)
+#define ATTRIBUTE_WRITE_TEST_ATTR_DTYPE H5T_NATIVE_INT
+#define ATTRIBUTE_WRITE_TEST_SPACE_RANK 1
+#define ATTRIBUTE_WRITE_TEST_GROUP_NAME "attr_write_test"
+#define ATTRIBUTE_WRITE_TEST_ATTR_NAME "write_test_attr"
+
+#define ATTRIBUTE_WRITE_INVALID_PARAMS_TEST_ATTR_DTYPE_SIZE sizeof(int)
+#define ATTRIBUTE_WRITE_INVALID_PARAMS_TEST_ATTR_DTYPE H5T_NATIVE_INT
+#define ATTRIBUTE_WRITE_INVALID_PARAMS_TEST_SPACE_RANK 1
+#define ATTRIBUTE_WRITE_INVALID_PARAMS_TEST_GROUP_NAME "attr_write_invalid_params_test"
+#define ATTRIBUTE_WRITE_INVALID_PARAMS_TEST_ATTR_NAME "invalid_params_write_test_attr"
+
+#define ATTRIBUTE_READ_TEST_ATTR_DTYPE_SIZE sizeof(int)
+#define ATTRIBUTE_READ_TEST_ATTR_DTYPE H5T_NATIVE_INT
+#define ATTRIBUTE_READ_TEST_SPACE_RANK 1
+#define ATTRIBUTE_READ_TEST_GROUP_NAME "attr_read_test"
+#define ATTRIBUTE_READ_TEST_ATTR_NAME "read_test_attr"
+
+#define ATTRIBUTE_READ_INVALID_PARAMS_TEST_ATTR_DTYPE_SIZE sizeof(int)
+#define ATTRIBUTE_READ_INVALID_PARAMS_TEST_ATTR_DTYPE H5T_NATIVE_INT
+#define ATTRIBUTE_READ_INVALID_PARAMS_TEST_SPACE_RANK 1
+#define ATTRIBUTE_READ_INVALID_PARAMS_TEST_GROUP_NAME "attr_read_invalid_params_test"
+#define ATTRIBUTE_READ_INVALID_PARAMS_TEST_ATTR_NAME "invalid_params_read_test_attr"
+
+#define ATTRIBUTE_READ_EMPTY_SPACE_RANK 1
+#define ATTRIBUTE_READ_EMPTY_ATTR_GROUP_NAME "read_empty_attr_test"
+#define ATTRIBUTE_READ_EMPTY_ATTR_NAME "read_empty_attr"
+#define ATTRIBUTE_READ_EMPTY_DTYPE H5T_NATIVE_INT
+#define ATTRIBUTE_READ_EMPTY_DTYPE_SIZE sizeof(int)
+
+#define ATTRIBUTE_GET_SPACE_TYPE_TEST_SPACE_RANK 1
+#define ATTRIBUTE_GET_SPACE_TYPE_TEST_GROUP_NAME "get_attr_space_type_test"
+#define ATTRIBUTE_GET_SPACE_TYPE_TEST_ATTR_NAME "get_space_type_test_attr"
+
+#define ATTRIBUTE_GET_SPACE_TYPE_INVALID_PARAMS_TEST_SPACE_RANK 1
+#define ATTRIBUTE_GET_SPACE_TYPE_INVALID_PARAMS_TEST_GROUP_NAME "get_attr_space_type_invalid_params_test"
+#define ATTRIBUTE_GET_SPACE_TYPE_INVALID_PARAMS_TEST_ATTR_NAME "get_space_type_invalid_params_test_attr"
+
+#define ATTRIBUTE_PROPERTY_LIST_TEST_ATTRIBUTE_NAME1 "property_list_test_attribute1"
+#define ATTRIBUTE_PROPERTY_LIST_TEST_ATTRIBUTE_NAME2 "property_list_test_attribute2"
+#define ATTRIBUTE_PROPERTY_LIST_TEST_SUBGROUP_NAME "attribute_property_list_test_group"
+#define ATTRIBUTE_PROPERTY_LIST_TEST_SPACE_RANK 1
+
+#define ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME "attr_name_retrieval_attr"
+#define ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME2 ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME "2"
+#define ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME3 ATTRIBUTE_GET_NAME_TEST_ATTRIBUTE_NAME "3"
+#define ATTRIBUTE_GET_NAME_TEST_SPACE_RANK 1
+#define ATTRIBUTE_GET_NAME_TEST_GROUP_NAME "retrieve_attr_name_test"
+
+#define ATTRIBUTE_GET_NAME_INVALID_PARAMS_TEST_ATTRIBUTE_NAME "invalid_params_attr_name_retrieval_attr"
+#define ATTRIBUTE_GET_NAME_INVALID_PARAMS_TEST_SPACE_RANK 1
+#define ATTRIBUTE_GET_NAME_INVALID_PARAMS_TEST_GROUP_NAME "retrieve_attr_name_invalid_params_test"
+
+#define ATTRIBUTE_GET_INFO_TEST_SPACE_RANK 1
+#define ATTRIBUTE_GET_INFO_TEST_GROUP_NAME "attr_get_info_test"
+#define ATTRIBUTE_GET_INFO_TEST_ATTR_NAME "get_info_test_attr"
+#define ATTRIBUTE_GET_INFO_TEST_ATTR_NAME2 ATTRIBUTE_GET_INFO_TEST_ATTR_NAME "2"
+#define ATTRIBUTE_GET_INFO_TEST_ATTR_NAME3 ATTRIBUTE_GET_INFO_TEST_ATTR_NAME "3"
+
+#define ATTRIBUTE_GET_INFO_INVALID_PARAMS_TEST_SPACE_RANK 1
+#define ATTRIBUTE_GET_INFO_INVALID_PARAMS_TEST_GROUP_NAME "attr_get_info_invalid_params_test"
+#define ATTRIBUTE_GET_INFO_INVALID_PARAMS_TEST_ATTR_NAME "invalid_params_get_info_test_attr"
+
+#define ATTRIBUTE_RENAME_TEST_SPACE_RANK 1
+#define ATTRIBUTE_RENAME_TEST_GROUP_NAME "attr_rename_test"
+#define ATTRIBUTE_RENAME_TEST_ATTR_NAME "rename_test_attr"
+#define ATTRIBUTE_RENAME_TEST_ATTR_NAME2 "rename_test_attr2"
+#define ATTRIBUTE_RENAME_TEST_NEW_NAME "renamed_attr"
+#define ATTRIBUTE_RENAME_TEST_NEW_NAME2 "renamed_attr2"
+
+#define ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_SPACE_RANK 1
+#define ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_GROUP_NAME "attr_rename_invalid_params_test"
+#define ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_ATTR_NAME "invalid_params_rename_test_attr"
+#define ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_ATTR_NAME2 "invalid_params_rename_test_attr2"
+#define ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_NEW_NAME "invalid_params_renamed_attr"
+#define ATTRIBUTE_RENAME_INVALID_PARAMS_TEST_NEW_NAME2 "invalid_params_renamed_attr2"
+
+#define ATTRIBUTE_ITERATE_TEST_ATTR_NAME_BUF_SIZE 256
+#define ATTRIBUTE_ITERATE_TEST_DSET_SPACE_RANK 2
+#define ATTRIBUTE_ITERATE_TEST_ATTR_SPACE_RANK 1
+#define ATTRIBUTE_ITERATE_TEST_GRP_SUBGROUP_NAME "attribute_iterate_group_test"
+#define ATTRIBUTE_ITERATE_TEST_DSET_SUBGROUP_NAME "attribute_iterate_dset_test"
+#define ATTRIBUTE_ITERATE_TEST_DTYPE_SUBGROUP_NAME "attribute_iterate_datatype_test"
+#define ATTRIBUTE_ITERATE_TEST_DSET_NAME "attribute_iterate_dset"
+#define ATTRIBUTE_ITERATE_TEST_DTYPE_NAME "attribute_iterate_dtype"
+#define ATTRIBUTE_ITERATE_TEST_ATTR_NAME "iter_attr"
+#define ATTRIBUTE_ITERATE_TEST_NUM_ATTRS 4
+
+#define ATTRIBUTE_ITERATE_TEST_0_ATTRIBUTES_DSET_SPACE_RANK 2
+#define ATTRIBUTE_ITERATE_TEST_0_ATTRIBUTES_SUBGROUP_NAME "attribute_iterate_test_0_attributes"
+#define ATTRIBUTE_ITERATE_TEST_0_ATTRIBUTES_DSET_NAME "attribute_iterate_dset"
+
+#define ATTRIBUTE_ITERATE_INVALID_PARAMS_TEST_ATTR_SPACE_RANK 1
+#define ATTRIBUTE_ITERATE_INVALID_PARAMS_TEST_SUBGROUP_NAME "attribute_iterate_invalid_params_test"
+#define ATTRIBUTE_ITERATE_INVALID_PARAMS_TEST_ATTR_NAME "invalid_params_iter_attr1"
+#define ATTRIBUTE_ITERATE_INVALID_PARAMS_TEST_ATTR_NAME2 "invalid_params_iter_attr2"
+#define ATTRIBUTE_ITERATE_INVALID_PARAMS_TEST_ATTR_NAME3 "invalid_params_iter_attr3"
+#define ATTRIBUTE_ITERATE_INVALID_PARAMS_TEST_ATTR_NAME4 "invalid_params_iter_attr4"
+
+#define ATTRIBUTE_DELETION_TEST_SPACE_RANK 1
+#define ATTRIBUTE_DELETION_TEST_GROUP_NAME "attr_deletion_test"
+#define ATTRIBUTE_DELETION_TEST_ATTR_NAME "attr_to_be_deleted"
+#define ATTRIBUTE_DELETION_TEST_ATTR_NAME2 ATTRIBUTE_DELETION_TEST_ATTR_NAME "2"
+#define ATTRIBUTE_DELETION_TEST_ATTR_NAME3 ATTRIBUTE_DELETION_TEST_ATTR_NAME "3"
+
+#define ATTRIBUTE_DELETION_INVALID_PARAMS_TEST_SPACE_RANK 1
+#define ATTRIBUTE_DELETION_INVALID_PARAMS_TEST_GROUP_NAME "attr_deletion_invalid_params_test"
+#define ATTRIBUTE_DELETION_INVALID_PARAMS_TEST_ATTR_NAME "invalid_params_attr_to_be_deleted"
+
+#define ATTRIBUTE_EXISTS_TEST_GROUP_NAME "attr_exists_test"
+#define ATTRIBUTE_EXISTS_TEST_SPACE_RANK 1
+#define ATTRIBUTE_EXISTS_TEST_ATTR_NAME "attr_exists"
+
+#define ATTRIBUTE_EXISTS_INVALID_PARAMS_TEST_SPACE_RANK 1
+#define ATTRIBUTE_EXISTS_INVALID_PARAMS_TEST_GROUP_NAME "attr_exists_invalid_params_test"
+#define ATTRIBUTE_EXISTS_INVALID_PARAMS_TEST_ATTR_NAME "invalid_params_attr_exists"
+
+#define ATTRIBUTE_MANY_GROUP_NAME "group_for_many_attributes"
+#define ATTRIBUTE_MANY_NAME_BUF_SIZE 32U
+#define ATTRIBUTE_MANY_NUMB 64U
+#define ATTRIBUTE_MANY_SPACE_RANK 1
+
+#define ATTRIBUTE_DUPLICATE_ID_GRP_NAME "attr_duplicate_open_test"
+#define ATTRIBUTE_DUPLICATE_ID_ATTR_NAME "attr_duplicated_id"
+#define ATTRIBUTE_DUPLICATE_ID_SPACE_RANK 1
+
+#define ATTRIBUTE_GET_NUM_ATTRS_TEST_GRP_NAME "get_num_attrs_test"
+#define ATTRIBUTE_GET_NUM_ATTRS_TEST_ATTR_NAME "get_num_attrs_test_attribute"
+#define ATTRIBUTE_GET_NUM_ATTRS_TEST_SPACE_RANK 1
+
+#define ATTRIBUTE_SHARED_DTYPE_NAME "Datatype"
+#define ATTRIBUTE_SHARED_DTYPE_GROUP_NAME "shared_dtype_group"
+#define ATTRIBUTE_SHARED_DTYPE_ATTR_NAME "shared_dtype_attr"
+#define ATTRIBUTE_SHARED_DTYPE_DSET_NAME "shared_dtype_dset"
+#define ATTRIBUTE_SHARED_DTYPE_SPACE_RANK 1
+
+#endif
diff --git a/test/API/H5_api_dataset_test.c b/test/API/H5_api_dataset_test.c
new file mode 100644
index 0000000..35a19f3
--- /dev/null
+++ b/test/API/H5_api_dataset_test.c
@@ -0,0 +1,11683 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include "H5_api_dataset_test.h"
+
+/*
+ * XXX: H5Dread_chunk/H5Dwrite_chunk, H5Dfill/scatter/gather
+ */
+
+static int test_create_dataset_under_root(void);
+static int test_create_dataset_under_existing_group(void);
+static int test_create_dataset_invalid_params(void);
+static int test_create_anonymous_dataset(void);
+static int test_create_anonymous_dataset_invalid_params(void);
+static int test_create_dataset_null_space(void);
+static int test_create_dataset_scalar_space(void);
+static int test_create_zero_dim_dset(void);
+static int test_create_dataset_random_shapes(void);
+static int test_create_dataset_predefined_types(void);
+static int test_create_dataset_string_types(void);
+static int test_create_dataset_compound_types(void);
+static int test_create_dataset_enum_types(void);
+static int test_create_dataset_array_types(void);
+static int test_create_dataset_creation_properties(void);
+static int test_create_many_dataset(void);
+static int test_open_dataset(void);
+static int test_open_dataset_invalid_params(void);
+static int test_close_dataset_invalid_params(void);
+static int test_get_dataset_space_and_type(void);
+static int test_get_dataset_space_and_type_invalid_params(void);
+static int test_get_dataset_space_status(void);
+static int test_get_dataset_space_status_invalid_params(void);
+static int test_dataset_property_lists(void);
+static int test_get_dataset_storage_size(void);
+static int test_get_dataset_storage_size_invalid_params(void);
+static int test_get_dataset_chunk_storage_size(void);
+static int test_get_dataset_chunk_storage_size_invalid_params(void);
+static int test_get_dataset_offset(void);
+static int test_get_dataset_offset_invalid_params(void);
+static int test_read_dataset_small_all(void);
+static int test_read_dataset_small_hyperslab(void);
+static int test_read_dataset_small_point_selection(void);
+static int test_dataset_io_point_selections(void);
+#ifndef NO_LARGE_TESTS
+static int test_read_dataset_large_all(void);
+static int test_read_dataset_large_hyperslab(void);
+static int test_read_dataset_large_point_selection(void);
+#endif
+static int test_read_dataset_invalid_params(void);
+static int test_write_dataset_small_all(void);
+static int test_write_dataset_small_hyperslab(void);
+static int test_write_dataset_small_point_selection(void);
+#ifndef NO_LARGE_TESTS
+static int test_write_dataset_large_all(void);
+static int test_write_dataset_large_hyperslab(void);
+static int test_write_dataset_large_point_selection(void);
+#endif
+static int test_write_dataset_data_verification(void);
+static int test_write_dataset_invalid_params(void);
+static int test_dataset_builtin_type_conversion(void);
+static int test_dataset_compound_partial_io(void);
+static int test_dataset_set_extent_chunked_unlimited(void);
+static int test_dataset_set_extent_chunked_fixed(void);
+static int test_dataset_set_extent_data(void);
+static int test_dataset_set_extent_double_handles(void);
+static int test_dataset_set_extent_invalid_params(void);
+static int test_flush_dataset(void);
+static int test_flush_dataset_invalid_params(void);
+static int test_refresh_dataset(void);
+static int test_refresh_dataset_invalid_params(void);
+
+/*
+ * Chunking tests
+ */
+static int test_create_single_chunk_dataset(void);
+static int test_write_single_chunk_dataset(void);
+static int test_create_multi_chunk_dataset(void);
+static int test_write_multi_chunk_dataset_same_shape_read(void);
+static int test_write_multi_chunk_dataset_diff_shape_read(void);
+static int test_overwrite_multi_chunk_dataset_same_shape_read(void);
+static int test_overwrite_multi_chunk_dataset_diff_shape_read(void);
+static int test_read_partial_chunk_all_selection(void);
+static int test_read_partial_chunk_hyperslab_selection(void);
+static int test_read_partial_chunk_point_selection(void);
+
+static int test_get_vlen_buf_size(void);
+
+/*
+ * The array of dataset tests to be performed.
+ */
+static int (*dataset_tests[])(void) = {
+ test_create_dataset_under_root,
+ test_create_dataset_under_existing_group,
+ test_create_dataset_invalid_params,
+ test_create_anonymous_dataset,
+ test_create_anonymous_dataset_invalid_params,
+ test_create_dataset_null_space,
+ test_create_dataset_scalar_space,
+ test_create_zero_dim_dset,
+ test_create_dataset_random_shapes,
+ test_create_dataset_predefined_types,
+ test_create_dataset_string_types,
+ test_create_dataset_compound_types,
+ test_create_dataset_enum_types,
+ test_create_dataset_array_types,
+ test_create_dataset_creation_properties,
+ test_create_many_dataset,
+ test_open_dataset,
+ test_open_dataset_invalid_params,
+ test_close_dataset_invalid_params,
+ test_get_dataset_space_and_type,
+ test_get_dataset_space_and_type_invalid_params,
+ test_get_dataset_space_status,
+ test_get_dataset_space_status_invalid_params,
+ test_dataset_property_lists,
+ test_get_dataset_storage_size,
+ test_get_dataset_storage_size_invalid_params,
+ test_get_dataset_chunk_storage_size,
+ test_get_dataset_chunk_storage_size_invalid_params,
+ test_get_dataset_offset,
+ test_get_dataset_offset_invalid_params,
+ test_read_dataset_small_all,
+ test_read_dataset_small_hyperslab,
+ test_read_dataset_small_point_selection,
+ test_dataset_io_point_selections,
+#ifndef NO_LARGE_TESTS
+ test_read_dataset_large_all,
+ test_read_dataset_large_hyperslab,
+ test_read_dataset_large_point_selection,
+#endif
+ test_read_dataset_invalid_params,
+ test_write_dataset_small_all,
+ test_write_dataset_small_hyperslab,
+ test_write_dataset_small_point_selection,
+#ifndef NO_LARGE_TESTS
+ test_write_dataset_large_all,
+ test_write_dataset_large_hyperslab,
+ test_write_dataset_large_point_selection,
+#endif
+ test_write_dataset_data_verification,
+ test_write_dataset_invalid_params,
+ test_dataset_builtin_type_conversion,
+ test_dataset_compound_partial_io,
+ test_dataset_set_extent_chunked_unlimited,
+ test_dataset_set_extent_chunked_fixed,
+ test_dataset_set_extent_data,
+ test_dataset_set_extent_double_handles,
+ test_dataset_set_extent_invalid_params,
+ test_flush_dataset,
+ test_flush_dataset_invalid_params,
+ test_refresh_dataset,
+ test_refresh_dataset_invalid_params,
+ test_create_single_chunk_dataset,
+ test_write_single_chunk_dataset,
+ test_create_multi_chunk_dataset,
+ test_write_multi_chunk_dataset_same_shape_read,
+ test_write_multi_chunk_dataset_diff_shape_read,
+ test_overwrite_multi_chunk_dataset_same_shape_read,
+ test_overwrite_multi_chunk_dataset_diff_shape_read,
+ test_read_partial_chunk_all_selection,
+ test_read_partial_chunk_hyperslab_selection,
+ test_read_partial_chunk_point_selection,
+ test_get_vlen_buf_size,
+};
+
+/*
+ * A test to check that a dataset can be
+ * created under the root group.
+ */
+static int
+test_create_dataset_under_root(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+
+ TESTING("dataset creation under root group");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((fspace_id = generate_random_dataspace(DATASET_CREATE_UNDER_ROOT_SPACE_RANK, NULL, NULL, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ /* Create the Dataset under the root group of the file */
+ if ((dset_id = H5Dcreate2(file_id, DATASET_CREATE_UNDER_ROOT_DSET_NAME, dset_dtype, fspace_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_CREATE_UNDER_ROOT_DSET_NAME);
+ goto error;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ H5Tclose(dset_dtype);
+ H5Dclose(dset_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a dataset can be created
+ * under a group that is not the root group.
+ */
+static int
+test_create_dataset_under_existing_group(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+
+ TESTING("dataset creation under an existing group");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_CREATE_UNDER_EXISTING_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n", DATASET_CREATE_UNDER_EXISTING_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = generate_random_dataspace(DATASET_CREATE_UNDER_EXISTING_SPACE_RANK, NULL, NULL, FALSE)) <
+ 0)
+ TEST_ERROR;
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_CREATE_UNDER_EXISTING_DSET_NAME, dset_dtype, fspace_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_CREATE_UNDER_EXISTING_DSET_NAME);
+ goto error;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ H5Tclose(dset_dtype);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a dataset can't be created
+ * when H5Dcreate is passed invalid parameters.
+ */
+static int
+test_create_dataset_invalid_params(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("H5Dcreate with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_CREATE_INVALID_PARAMS_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n", DATASET_CREATE_INVALID_PARAMS_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = generate_random_dataspace(DATASET_CREATE_INVALID_PARAMS_SPACE_RANK, NULL, NULL, FALSE)) <
+ 0)
+ TEST_ERROR;
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Dcreate_invalid_loc_id)
+ {
+ TESTING_2("H5Dcreate with an invalid loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ dset_id = H5Dcreate2(H5I_INVALID_HID, DATASET_CREATE_INVALID_PARAMS_DSET_NAME, dset_dtype,
+ fspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (dset_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created dataset using H5Dcreate with an invalid loc_id!\n");
+ H5Dclose(dset_id);
+ PART_ERROR(H5Dcreate_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dcreate_invalid_loc_id);
+
+ PART_BEGIN(H5Dcreate_invalid_dataset_name)
+ {
+ TESTING_2("H5Dcreate with an invalid dataset name");
+
+ H5E_BEGIN_TRY
+ {
+ dset_id =
+ H5Dcreate2(group_id, NULL, dset_dtype, fspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (dset_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created dataset using H5Dcreate with a NULL dataset name!\n");
+ H5Dclose(dset_id);
+ PART_ERROR(H5Dcreate_invalid_dataset_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ dset_id =
+ H5Dcreate2(group_id, "", dset_dtype, fspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (dset_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created dataset using H5Dcreate with an invalid dataset name of ''!\n");
+ H5Dclose(dset_id);
+ PART_ERROR(H5Dcreate_invalid_dataset_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dcreate_invalid_dataset_name);
+
+ PART_BEGIN(H5Dcreate_invalid_datatype)
+ {
+ TESTING_2("H5Dcreate with an invalid datatype");
+
+ H5E_BEGIN_TRY
+ {
+ dset_id = H5Dcreate2(group_id, DATASET_CREATE_INVALID_PARAMS_DSET_NAME, H5I_INVALID_HID,
+ fspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (dset_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created dataset using H5Dcreate with an invalid datatype!\n");
+ H5Dclose(dset_id);
+ PART_ERROR(H5Dcreate_invalid_datatype);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dcreate_invalid_datatype);
+
+ PART_BEGIN(H5Dcreate_invalid_dataspace)
+ {
+ TESTING_2("H5Dcreate with an invalid dataspace");
+
+ H5E_BEGIN_TRY
+ {
+ dset_id = H5Dcreate2(group_id, DATASET_CREATE_INVALID_PARAMS_DSET_NAME, dset_dtype,
+ H5I_INVALID_HID, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (dset_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created dataset using H5Dcreate with an invalid dataspace!\n");
+ H5Dclose(dset_id);
+ PART_ERROR(H5Dcreate_invalid_dataspace);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dcreate_invalid_dataspace);
+
+ PART_BEGIN(H5Dcreate_invalid_lcpl)
+ {
+ TESTING_2("H5Dcreate with an invalid LCPL");
+
+ H5E_BEGIN_TRY
+ {
+ dset_id = H5Dcreate2(group_id, DATASET_CREATE_INVALID_PARAMS_DSET_NAME, dset_dtype, fspace_id,
+ H5I_INVALID_HID, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (dset_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created dataset using H5Dcreate with an invalid LCPL!\n");
+ H5Dclose(dset_id);
+ PART_ERROR(H5Dcreate_invalid_lcpl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dcreate_invalid_lcpl);
+
+ PART_BEGIN(H5Dcreate_invalid_dcpl)
+ {
+ TESTING_2("H5Dcreate with an invalid DCPL");
+
+ H5E_BEGIN_TRY
+ {
+ dset_id = H5Dcreate2(group_id, DATASET_CREATE_INVALID_PARAMS_DSET_NAME, dset_dtype, fspace_id,
+ H5P_DEFAULT, H5I_INVALID_HID, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (dset_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created dataset using H5Dcreate with an invalid DCPL!\n");
+ H5Dclose(dset_id);
+ PART_ERROR(H5Dcreate_invalid_dcpl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dcreate_invalid_dcpl);
+
+ PART_BEGIN(H5Dcreate_invalid_dapl)
+ {
+ TESTING_2("H5Dcreate with an invalid DAPL");
+
+ H5E_BEGIN_TRY
+ {
+ dset_id = H5Dcreate2(group_id, DATASET_CREATE_INVALID_PARAMS_DSET_NAME, dset_dtype, fspace_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (dset_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created dataset using H5Dcreate with an invalid DAPL!\n");
+ H5Dclose(dset_id);
+ PART_ERROR(H5Dcreate_invalid_dapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dcreate_invalid_dapl);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ H5Tclose(dset_dtype);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that an anonymous dataset can be created.
+ */
+static int
+test_create_anonymous_dataset(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+
+ TESTING("anonymous dataset creation");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_CREATE_ANONYMOUS_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n", DATASET_CREATE_ANONYMOUS_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = generate_random_dataspace(DATASET_CREATE_ANONYMOUS_SPACE_RANK, NULL, NULL, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate_anon(group_id, dset_dtype, fspace_id, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create anonymous dataset\n");
+ goto error;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ H5Tclose(dset_dtype);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that an anonymous dataset can't
+ * be created when H5Dcreate_anon is passed invalid
+ * parameters.
+ */
+static int
+test_create_anonymous_dataset_invalid_params(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("anonymous dataset creation with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_CREATE_ANONYMOUS_INVALID_PARAMS_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n",
+ DATASET_CREATE_ANONYMOUS_INVALID_PARAMS_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = generate_random_dataspace(DATASET_CREATE_ANONYMOUS_INVALID_PARAMS_SPACE_RANK, NULL, NULL,
+ FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Dcreate_anon_invalid_loc_id)
+ {
+ TESTING_2("H5Dcreate_anon with an invalid loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ dset_id = H5Dcreate_anon(H5I_INVALID_HID, dset_dtype, fspace_id, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (dset_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created anonymous dataset using an invalid loc_id!\n");
+ H5Dclose(dset_id);
+ PART_ERROR(H5Dcreate_anon_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dcreate_anon_invalid_loc_id);
+
+ PART_BEGIN(H5Dcreate_anon_invalid_datatype)
+ {
+ TESTING_2("H5Dcreate_anon with an invalid dataset datatype");
+
+ H5E_BEGIN_TRY
+ {
+ dset_id = H5Dcreate_anon(group_id, H5I_INVALID_HID, fspace_id, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (dset_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created anonymous dataset using an invalid dataset datatype!\n");
+ H5Dclose(dset_id);
+ PART_ERROR(H5Dcreate_anon_invalid_datatype);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dcreate_anon_invalid_datatype);
+
+ PART_BEGIN(H5Dcreate_anon_invalid_dataspace)
+ {
+ TESTING_2("H5Dcreate_anon with an invalid dataset dataspace");
+
+ H5E_BEGIN_TRY
+ {
+ dset_id = H5Dcreate_anon(group_id, dset_dtype, H5I_INVALID_HID, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (dset_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created anonymous dataset using an invalid dataset dataspace!\n");
+ H5Dclose(dset_id);
+ PART_ERROR(H5Dcreate_anon_invalid_dataspace);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dcreate_anon_invalid_dataspace);
+
+ PART_BEGIN(H5Dcreate_anon_invalid_dcpl)
+ {
+ TESTING_2("H5Dcreate_anon with an invalid DCPL");
+
+ H5E_BEGIN_TRY
+ {
+ dset_id = H5Dcreate_anon(group_id, dset_dtype, fspace_id, H5I_INVALID_HID, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (dset_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created anonymous dataset using an invalid DCPL!\n");
+ H5Dclose(dset_id);
+ PART_ERROR(H5Dcreate_anon_invalid_dcpl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dcreate_anon_invalid_dcpl);
+
+ PART_BEGIN(H5Dcreate_anon_invalid_dapl)
+ {
+ TESTING_2("H5Dcreate_anon with an invalid DAPL");
+
+ H5E_BEGIN_TRY
+ {
+ dset_id = H5Dcreate_anon(group_id, dset_dtype, fspace_id, H5P_DEFAULT, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (dset_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created anonymous dataset using an invalid DAPL!\n");
+ H5Dclose(dset_id);
+ PART_ERROR(H5Dcreate_anon_invalid_dapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dcreate_anon_invalid_dapl);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ H5Tclose(dset_dtype);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that creating a dataset with a NULL
+ * dataspace is not problematic.
+ */
+static int
+test_create_dataset_null_space(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+
+ TESTING("dataset creation with a NULL dataspace");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_CREATE_NULL_DATASPACE_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ DATASET_CREATE_NULL_DATASPACE_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Screate(H5S_NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_CREATE_NULL_DATASPACE_TEST_DSET_NAME, dset_dtype, fspace_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_CREATE_NULL_DATASPACE_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_CREATE_NULL_DATASPACE_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_CREATE_NULL_DATASPACE_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ H5Tclose(dset_dtype);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that creating a dataset with a scalar
+ * dataspace is not problematic.
+ */
+static int
+test_create_dataset_scalar_space(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+
+ TESTING("dataset creation with a SCALAR dataspace");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_CREATE_SCALAR_DATASPACE_TEST_SUBGROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ DATASET_CREATE_SCALAR_DATASPACE_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Screate(H5S_SCALAR)) < 0)
+ TEST_ERROR;
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_CREATE_SCALAR_DATASPACE_TEST_DSET_NAME, dset_dtype, fspace_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_CREATE_SCALAR_DATASPACE_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_CREATE_SCALAR_DATASPACE_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_CREATE_SCALAR_DATASPACE_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ H5Tclose(dset_dtype);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that creating a dataset with a dataspace
+ * which contains a 0-sized dimension is not problematic.
+ */
+static int
+test_create_zero_dim_dset(void)
+{
+ hsize_t dims[ZERO_DIM_DSET_TEST_SPACE_RANK] = {0};
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ int data[1];
+
+ TESTING("creation of 0-sized dataset");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATATYPE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATATYPE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ZERO_DIM_DSET_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", ZERO_DIM_DSET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Screate_simple(1, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, ZERO_DIM_DSET_TEST_DSET_NAME, H5T_NATIVE_INT, fspace_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create 0-sized dataset\n");
+ goto error;
+ }
+
+ if (H5Sselect_none(fspace_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set none selection in dataset's file dataspace\n");
+ goto error;
+ }
+
+ /* Attempt to write 0 elements to dataset */
+ if (H5Dwrite(dset_id, H5T_NATIVE_INT, fspace_id, fspace_id, H5P_DEFAULT, data) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to write 0 elements to 0-sized dataset\n");
+ goto error;
+ }
+
+ /* Attempt to read 0 elements from dataset */
+ if (H5Dread(dset_id, H5T_NATIVE_INT, fspace_id, fspace_id, H5P_DEFAULT, data) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to read 0 elements from 0-sized dataset\n");
+ goto error;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a dataset can be created with
+ * a variety of different dataspace shapes.
+ */
+static int
+test_create_dataset_random_shapes(void)
+{
+ size_t i;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID, space_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+
+ TESTING("dataset creation with random dimension sizes");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_SHAPE_TEST_SUBGROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group\n");
+ goto error;
+ }
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ for (i = 0; i < DATASET_SHAPE_TEST_NUM_ITERATIONS; i++) {
+ char name[100];
+ int ndims = rand() % DATASET_SHAPE_TEST_MAX_DIMS + 1;
+
+ if ((space_id = generate_random_dataspace(ndims, NULL, NULL, FALSE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataspace\n");
+ goto error;
+ }
+
+ HDsprintf(name, "%s%zu", DATASET_SHAPE_TEST_DSET_BASE_NAME, i + 1);
+
+ if ((dset_id = H5Dcreate2(group_id, name, dset_dtype, space_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset\n");
+ goto error;
+ }
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ }
+
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ H5Tclose(dset_dtype);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a dataset can be created using
+ * each of the predefined integer and floating-point
+ * datatypes.
+ */
+static int
+test_create_dataset_predefined_types(void)
+{
+ size_t i;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t predefined_type_test_table[] = {H5T_STD_U8LE, H5T_STD_U8BE, H5T_STD_I8LE, H5T_STD_I8BE,
+ H5T_STD_U16LE, H5T_STD_U16BE, H5T_STD_I16LE, H5T_STD_I16BE,
+ H5T_STD_U32LE, H5T_STD_U32BE, H5T_STD_I32LE, H5T_STD_I32BE,
+ H5T_STD_U64LE, H5T_STD_U64BE, H5T_STD_I64LE, H5T_STD_I64BE,
+ H5T_IEEE_F32LE, H5T_IEEE_F32BE, H5T_IEEE_F64LE, H5T_IEEE_F64BE};
+
+ TESTING("dataset creation with predefined datatypes");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_PREDEFINED_TYPE_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create sub-container group '%s'\n",
+ DATASET_PREDEFINED_TYPE_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < ARRAY_LENGTH(predefined_type_test_table); i++) {
+ char name[100];
+
+ if ((fspace_id =
+ generate_random_dataspace(DATASET_PREDEFINED_TYPE_TEST_SPACE_RANK, NULL, NULL, FALSE)) < 0)
+ TEST_ERROR;
+
+ HDsprintf(name, "%s%zu", DATASET_PREDEFINED_TYPE_TEST_BASE_NAME, i);
+
+ if ((dset_id = H5Dcreate2(group_id, name, predefined_type_test_table[i], fspace_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", name);
+ goto error;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dopen2(group_id, name, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to open dataset '%s'\n", name);
+ goto error;
+ }
+
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ }
+
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a dataset can be created using
+ * string datatypes.
+ */
+static int
+test_create_dataset_string_types(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id_fixed = H5I_INVALID_HID, dset_id_variable = H5I_INVALID_HID;
+ hid_t type_id_fixed = H5I_INVALID_HID, type_id_variable = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("dataset creation with string types");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_STRING_TYPE_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", DATASET_STRING_TYPE_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((type_id_fixed = H5Tcreate(H5T_STRING, DATASET_STRING_TYPE_TEST_STRING_LENGTH)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create fixed-length string type\n");
+ goto error;
+ }
+
+ if ((type_id_variable = H5Tcreate(H5T_STRING, H5T_VARIABLE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create variable-length string type\n");
+ goto error;
+ }
+
+ if ((fspace_id = generate_random_dataspace(DATASET_STRING_TYPE_TEST_SPACE_RANK, NULL, NULL, FALSE)) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Dcreate_fixed_string_type)
+ {
+ TESTING_2("creation of fixed-size string dataset");
+
+ if ((dset_id_fixed = H5Dcreate2(group_id, DATASET_STRING_TYPE_TEST_DSET_NAME1, type_id_fixed,
+ fspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create fixed-length string dataset '%s'\n",
+ DATASET_STRING_TYPE_TEST_DSET_NAME1);
+ PART_ERROR(H5Dcreate_fixed_string_type);
+ }
+
+ if (dset_id_fixed >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id_fixed);
+ }
+ H5E_END_TRY;
+ dset_id_fixed = H5I_INVALID_HID;
+ }
+
+ if ((dset_id_fixed = H5Dopen2(group_id, DATASET_STRING_TYPE_TEST_DSET_NAME1, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to open dataset '%s'\n", DATASET_STRING_TYPE_TEST_DSET_NAME1);
+ PART_ERROR(H5Dcreate_fixed_string_type);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dcreate_fixed_string_type);
+
+ PART_BEGIN(H5Dcreate_variable_string_type)
+ {
+ TESTING_2("creation of variable-length string dataset");
+
+ if ((dset_id_variable =
+ H5Dcreate2(group_id, DATASET_STRING_TYPE_TEST_DSET_NAME2, type_id_variable, fspace_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create variable-length string dataset '%s'\n",
+ DATASET_STRING_TYPE_TEST_DSET_NAME2);
+ PART_ERROR(H5Dcreate_variable_string_type);
+ }
+
+ if (dset_id_variable >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id_variable);
+ }
+ H5E_END_TRY;
+ dset_id_variable = H5I_INVALID_HID;
+ }
+
+ if ((dset_id_variable = H5Dopen2(group_id, DATASET_STRING_TYPE_TEST_DSET_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" failed to open dataset '%s'\n", DATASET_STRING_TYPE_TEST_DSET_NAME2);
+ PART_ERROR(H5Dcreate_variable_string_type);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dcreate_variable_string_type);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Tclose(type_id_fixed) < 0)
+ TEST_ERROR;
+ if (H5Tclose(type_id_variable) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id_fixed) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id_variable) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(type_id_fixed);
+ H5Tclose(type_id_variable);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id_fixed);
+ H5Dclose(dset_id_variable);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a dataset can be created using
+ * a variety of compound datatypes.
+ */
+static int
+test_create_dataset_compound_types(void)
+{
+ size_t i, j;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t compound_type = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t type_pool[DATASET_COMPOUND_TYPE_TEST_MAX_SUBTYPES];
+ int num_passes;
+
+ TESTING("dataset creation with compound datatypes");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ /*
+ * Make sure to pre-initialize all the compound field IDs
+ * so we don't try to close an uninitialized ID value;
+ * memory checkers will likely complain.
+ */
+ for (j = 0; j < DATASET_COMPOUND_TYPE_TEST_MAX_SUBTYPES; j++)
+ type_pool[j] = H5I_INVALID_HID;
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_COMPOUND_TYPE_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", DATASET_COMPOUND_TYPE_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = generate_random_dataspace(DATASET_COMPOUND_TYPE_TEST_DSET_RANK, NULL, NULL, FALSE)) < 0)
+ TEST_ERROR;
+
+ num_passes = (rand() % DATASET_COMPOUND_TYPE_TEST_MAX_PASSES) + 1;
+
+ for (i = 0; i < (size_t)num_passes; i++) {
+ size_t num_subtypes;
+ size_t compound_size = 0;
+ size_t next_offset = 0;
+ char dset_name[256];
+
+ /*
+ * Also pre-initialize all of the compound field IDs at the
+ * beginning of each loop so that we don't try to close an
+ * invalid ID.
+ */
+ for (j = 0; j < DATASET_COMPOUND_TYPE_TEST_MAX_SUBTYPES; j++)
+ type_pool[j] = H5I_INVALID_HID;
+
+ num_subtypes = (size_t)(rand() % DATASET_COMPOUND_TYPE_TEST_MAX_SUBTYPES) + 1;
+
+ if ((compound_type = H5Tcreate(H5T_COMPOUND, 1)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create compound datatype\n");
+ goto error;
+ }
+
+ /* Start adding subtypes to the compound type */
+ for (j = 0; j < num_subtypes; j++) {
+ size_t member_size;
+ char member_name[256];
+
+ HDsnprintf(member_name, 256, "member%zu", j);
+
+ if ((type_pool[j] = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create compound datatype member %zu\n", j);
+ goto error;
+ }
+
+ if (!(member_size = H5Tget_size(type_pool[j]))) {
+ H5_FAILED();
+ HDprintf(" couldn't get compound member %zu size\n", j);
+ goto error;
+ }
+
+ compound_size += member_size;
+
+ if (H5Tset_size(compound_type, compound_size) < 0)
+ TEST_ERROR;
+
+ if (H5Tinsert(compound_type, member_name, next_offset, type_pool[j]) < 0)
+ TEST_ERROR;
+
+ next_offset += member_size;
+ }
+
+ if (H5Tpack(compound_type) < 0)
+ TEST_ERROR;
+
+ HDsnprintf(dset_name, sizeof(dset_name), "%s%zu", DATASET_COMPOUND_TYPE_TEST_DSET_NAME, i);
+
+ if ((dset_id = H5Dcreate2(group_id, dset_name, compound_type, fspace_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", dset_name);
+ goto error;
+ }
+
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dopen2(group_id, dset_name, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to open dataset '%s'\n", dset_name);
+ goto error;
+ }
+
+ for (j = 0; j < num_subtypes; j++)
+ if (type_pool[j] >= 0 && H5Tclose(type_pool[j]) < 0)
+ TEST_ERROR;
+ if (H5Tclose(compound_type) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ for (i = 0; i < DATASET_COMPOUND_TYPE_TEST_MAX_SUBTYPES; i++)
+ H5Tclose(type_pool[i]);
+ H5Tclose(compound_type);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a dataset can be created with
+ * enum datatypes.
+ */
+static int
+test_create_dataset_enum_types(void)
+{
+ size_t i;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id_native = H5I_INVALID_HID, dset_id_non_native = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t enum_native = H5I_INVALID_HID, enum_non_native = H5I_INVALID_HID;
+ const char *enum_type_test_table[] = {"RED", "GREEN", "BLUE", "BLACK", "WHITE",
+ "PURPLE", "ORANGE", "YELLOW", "BROWN"};
+
+ TESTING("dataset creation with enum types");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_ENUM_TYPE_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", DATASET_ENUM_TYPE_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((enum_native = H5Tcreate(H5T_ENUM, sizeof(int))) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create native enum type\n");
+ goto error;
+ }
+
+ for (i = 0; i < ARRAY_LENGTH(enum_type_test_table); i++)
+ if (H5Tenum_insert(enum_native, enum_type_test_table[i], &i) < 0)
+ TEST_ERROR;
+
+ if ((enum_non_native = H5Tenum_create(H5T_STD_U32LE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create non-native enum type\n");
+ goto error;
+ }
+
+ for (i = 0; i < DATASET_ENUM_TYPE_TEST_NUM_MEMBERS; i++) {
+ char val_name[15];
+
+ HDsprintf(val_name, "%s%zu", DATASET_ENUM_TYPE_TEST_VAL_BASE_NAME, i);
+
+ if (H5Tenum_insert(enum_non_native, val_name, &i) < 0)
+ TEST_ERROR;
+ }
+
+ if ((fspace_id = generate_random_dataspace(DATASET_ENUM_TYPE_TEST_SPACE_RANK, NULL, NULL, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id_native = H5Dcreate2(group_id, DATASET_ENUM_TYPE_TEST_DSET_NAME1, enum_native, fspace_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create native enum dataset '%s'\n", DATASET_ENUM_TYPE_TEST_DSET_NAME1);
+ goto error;
+ }
+
+ if ((dset_id_non_native = H5Dcreate2(group_id, DATASET_ENUM_TYPE_TEST_DSET_NAME2, enum_non_native,
+ fspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create non-native enum dataset '%s'\n", DATASET_ENUM_TYPE_TEST_DSET_NAME2);
+ goto error;
+ }
+
+ if (H5Dclose(dset_id_native) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id_non_native) < 0)
+ TEST_ERROR;
+
+ if ((dset_id_native = H5Dopen2(group_id, DATASET_ENUM_TYPE_TEST_DSET_NAME1, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to open dataset '%s'\n", DATASET_ENUM_TYPE_TEST_DSET_NAME1);
+ goto error;
+ }
+
+ if ((dset_id_non_native = H5Dopen2(group_id, DATASET_ENUM_TYPE_TEST_DSET_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to open dataset '%s'\n", DATASET_ENUM_TYPE_TEST_DSET_NAME2);
+ goto error;
+ }
+
+ if (H5Tclose(enum_native) < 0)
+ TEST_ERROR;
+ if (H5Tclose(enum_non_native) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id_native) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id_non_native) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(enum_native);
+ H5Tclose(enum_non_native);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id_native);
+ H5Dclose(dset_id_non_native);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a dataset can be created using
+ * array datatypes.
+ */
+static int
+test_create_dataset_array_types(void)
+{
+ hsize_t array_dims1[DATASET_ARRAY_TYPE_TEST_RANK1];
+ hsize_t array_dims2[DATASET_ARRAY_TYPE_TEST_RANK2];
+ hsize_t array_dims3[DATASET_ARRAY_TYPE_TEST_RANK3];
+ size_t i;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id1 = H5I_INVALID_HID, dset_id2 = H5I_INVALID_HID, dset_id3 = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t array_type_id1 = H5I_INVALID_HID, array_type_id2 = H5I_INVALID_HID,
+ array_type_id3 = H5I_INVALID_HID;
+ hid_t array_base_type_id1 = H5I_INVALID_HID, array_base_type_id2 = H5I_INVALID_HID,
+ array_base_type_id3 = H5I_INVALID_HID;
+ hid_t nested_type_id = H5I_INVALID_HID;
+
+ TESTING("dataset creation with array types");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_ARRAY_TYPE_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", DATASET_ARRAY_TYPE_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ /* Test creation of array with some different types */
+ for (i = 0; i < DATASET_ARRAY_TYPE_TEST_RANK1; i++)
+ array_dims1[i] = (hsize_t)(rand() % MAX_DIM_SIZE + 1);
+
+ if ((array_base_type_id1 = generate_random_datatype(H5T_ARRAY, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((array_type_id1 = H5Tarray_create2(array_base_type_id1, DATASET_ARRAY_TYPE_TEST_RANK1, array_dims1)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create first array type\n");
+ goto error;
+ }
+
+ for (i = 0; i < DATASET_ARRAY_TYPE_TEST_RANK2; i++)
+ array_dims2[i] = (hsize_t)(rand() % MAX_DIM_SIZE + 1);
+
+ if ((array_base_type_id2 = generate_random_datatype(H5T_ARRAY, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((array_type_id2 = H5Tarray_create2(array_base_type_id2, DATASET_ARRAY_TYPE_TEST_RANK2, array_dims2)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create second array type\n");
+ goto error;
+ }
+
+ /* Test nested arrays */
+ for (i = 0; i < DATASET_ARRAY_TYPE_TEST_RANK3; i++)
+ array_dims3[i] = (hsize_t)(rand() % MAX_DIM_SIZE + 1);
+
+ if ((array_base_type_id3 = generate_random_datatype(H5T_ARRAY, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((nested_type_id = H5Tarray_create2(array_base_type_id3, DATASET_ARRAY_TYPE_TEST_RANK3, array_dims3)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create nested array base type\n");
+ goto error;
+ }
+
+ if ((array_type_id3 = H5Tarray_create2(nested_type_id, DATASET_ARRAY_TYPE_TEST_RANK3, array_dims3)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create nested array type\n");
+ goto error;
+ }
+
+ if ((fspace_id = generate_random_dataspace(DATASET_ARRAY_TYPE_TEST_SPACE_RANK, NULL, NULL, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id1 = H5Dcreate2(group_id, DATASET_ARRAY_TYPE_TEST_DSET_NAME1, array_type_id1, fspace_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create array type dataset '%s'\n", DATASET_ARRAY_TYPE_TEST_DSET_NAME1);
+ goto error;
+ }
+
+ if ((dset_id2 = H5Dcreate2(group_id, DATASET_ARRAY_TYPE_TEST_DSET_NAME2, array_type_id2, fspace_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create array type dataset '%s'\n", DATASET_ARRAY_TYPE_TEST_DSET_NAME2);
+ goto error;
+ }
+
+ if ((dset_id3 = H5Dcreate2(group_id, DATASET_ARRAY_TYPE_TEST_DSET_NAME3, array_type_id3, fspace_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create nested array type dataset '%s'\n", DATASET_ARRAY_TYPE_TEST_DSET_NAME3);
+ goto error;
+ }
+
+ if (H5Dclose(dset_id1) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id2) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id3) < 0)
+ TEST_ERROR;
+
+ if ((dset_id1 = H5Dopen2(group_id, DATASET_ARRAY_TYPE_TEST_DSET_NAME1, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to open dataset '%s'\n", DATASET_ARRAY_TYPE_TEST_DSET_NAME1);
+ goto error;
+ }
+
+ if ((dset_id2 = H5Dopen2(group_id, DATASET_ARRAY_TYPE_TEST_DSET_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to open dataset '%s'\n", DATASET_ARRAY_TYPE_TEST_DSET_NAME2);
+ goto error;
+ }
+
+ if ((dset_id3 = H5Dopen2(group_id, DATASET_ARRAY_TYPE_TEST_DSET_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to open dataset '%s'\n", DATASET_ARRAY_TYPE_TEST_DSET_NAME3);
+ goto error;
+ }
+
+ if (H5Tclose(array_base_type_id1) < 0)
+ TEST_ERROR;
+ if (H5Tclose(array_base_type_id2) < 0)
+ TEST_ERROR;
+ if (H5Tclose(array_base_type_id3) < 0)
+ TEST_ERROR;
+ if (H5Tclose(nested_type_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(array_type_id1) < 0)
+ TEST_ERROR;
+ if (H5Tclose(array_type_id2) < 0)
+ TEST_ERROR;
+ if (H5Tclose(array_type_id3) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id1) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id2) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id3) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(array_base_type_id1);
+ H5Tclose(array_base_type_id2);
+ H5Tclose(array_base_type_id3);
+ H5Tclose(nested_type_id);
+ H5Tclose(array_type_id1);
+ H5Tclose(array_type_id2);
+ H5Tclose(array_type_id3);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id1);
+ H5Dclose(dset_id2);
+ H5Dclose(dset_id3);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check the functionality of the different
+ * dataset creation properties.
+ */
+static int
+test_create_dataset_creation_properties(void)
+{
+ hsize_t dims[DATASET_CREATION_PROPERTIES_TEST_SHAPE_RANK];
+ hsize_t chunk_dims[DATASET_CREATION_PROPERTIES_TEST_SHAPE_RANK];
+ size_t i;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID, dcpl_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID, compact_dtype = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID, compact_fspace_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("dataset creation properties");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILTERS) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER) || !(vol_cap_flags_g & H5VL_CAP_FLAG_TRACK_TIMES) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_FILTERS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, creation order, track time, or filter "
+ "pipeline aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_CREATION_PROPERTIES_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", DATASET_CREATION_PROPERTIES_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id =
+ generate_random_dataspace(DATASET_CREATION_PROPERTIES_TEST_SHAPE_RANK, NULL, dims, FALSE)) < 0)
+ TEST_ERROR;
+ if ((compact_fspace_id =
+ generate_random_dataspace(DATASET_CREATION_PROPERTIES_TEST_SHAPE_RANK, NULL, NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ /* Set chunk dims to be size of dataset - for filters test */
+ for (i = 0; i < DATASET_CREATION_PROPERTIES_TEST_SHAPE_RANK; i++)
+ chunk_dims[i] = dims[i];
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+ if ((compact_dtype = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ /* Test the alloc time property */
+ PART_BEGIN(DCPL_alloc_time_test)
+ {
+ H5D_alloc_time_t alloc_times[] = {H5D_ALLOC_TIME_DEFAULT, H5D_ALLOC_TIME_EARLY,
+ H5D_ALLOC_TIME_INCR, H5D_ALLOC_TIME_LATE};
+
+ TESTING_2("dataset storage space allocation time property");
+
+ if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create DCPL\n");
+ PART_ERROR(DCPL_alloc_time_test);
+ }
+
+ for (i = 0; i < ARRAY_LENGTH(alloc_times); i++) {
+ char name[100];
+
+ if (H5Pset_alloc_time(dcpl_id, alloc_times[i]) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set alloc time property value\n");
+ PART_ERROR(DCPL_alloc_time_test);
+ }
+
+ HDsprintf(name, "%s%zu", DATASET_CREATION_PROPERTIES_TEST_ALLOC_TIMES_BASE_NAME, i);
+
+ if ((dset_id = H5Dcreate2(group_id, name, dset_dtype, fspace_id, H5P_DEFAULT, dcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", name);
+ PART_ERROR(DCPL_alloc_time_test);
+ }
+
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, name, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", name);
+ PART_ERROR(DCPL_alloc_time_test);
+ }
+
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+ }
+
+ if (dcpl_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(dcpl_id);
+ }
+ H5E_END_TRY;
+ dcpl_id = H5I_INVALID_HID;
+ }
+
+ PASSED();
+ }
+ PART_END(DCPL_alloc_time_test);
+
+ /* Test the attribute creation order property */
+ PART_BEGIN(DCPL_attr_crt_order_test)
+ {
+ unsigned creation_orders[] = {H5P_CRT_ORDER_TRACKED,
+ H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED};
+
+ TESTING_2("attribute creation order property for DCPL");
+
+ if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create DCPL\n");
+ PART_ERROR(DCPL_attr_crt_order_test);
+ }
+
+ for (i = 0; i < ARRAY_LENGTH(creation_orders); i++) {
+ char name[100];
+
+ if (H5Pset_attr_creation_order(dcpl_id, creation_orders[i]) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set attribute creation order property\n");
+ PART_ERROR(DCPL_attr_crt_order_test);
+ }
+
+ HDsprintf(name, "%s%zu", DATASET_CREATION_PROPERTIES_TEST_CRT_ORDER_BASE_NAME, i);
+
+ if ((dset_id = H5Dcreate2(group_id, name, dset_dtype, fspace_id, H5P_DEFAULT, dcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", name);
+ PART_ERROR(DCPL_attr_crt_order_test);
+ }
+
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, name, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", name);
+ PART_ERROR(DCPL_attr_crt_order_test);
+ }
+
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+ }
+
+ if (dcpl_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(dcpl_id);
+ }
+ H5E_END_TRY;
+ dcpl_id = H5I_INVALID_HID;
+ }
+
+ PASSED();
+ }
+ PART_END(DCPL_attr_crt_order_test);
+
+ /* Test the attribute phase change property */
+ PART_BEGIN(DCPL_attr_phase_change_test)
+ {
+ TESTING_2("attribute phase change property for DCPL");
+
+ if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create DCPL\n");
+ PART_ERROR(DCPL_attr_phase_change_test);
+ }
+
+ if (H5Pset_attr_phase_change(dcpl_id, DATASET_CREATION_PROPERTIES_TEST_MAX_COMPACT,
+ DATASET_CREATION_PROPERTIES_TEST_MIN_DENSE) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set attribute phase change property\n");
+ PART_ERROR(DCPL_attr_phase_change_test);
+ }
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_CREATION_PROPERTIES_TEST_PHASE_CHANGE_DSET_NAME,
+ dset_dtype, fspace_id, H5P_DEFAULT, dcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n",
+ DATASET_CREATION_PROPERTIES_TEST_PHASE_CHANGE_DSET_NAME);
+ PART_ERROR(DCPL_attr_phase_change_test);
+ }
+
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_CREATION_PROPERTIES_TEST_PHASE_CHANGE_DSET_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n",
+ DATASET_CREATION_PROPERTIES_TEST_PHASE_CHANGE_DSET_NAME);
+ PART_ERROR(DCPL_attr_phase_change_test);
+ }
+
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+ if (dcpl_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(dcpl_id);
+ }
+ H5E_END_TRY;
+ dcpl_id = H5I_INVALID_HID;
+ }
+
+ PASSED();
+ }
+ PART_END(DCPL_attr_phase_change_test);
+
+ /* Test the fill time property */
+ PART_BEGIN(DCPL_fill_time_property_test)
+ {
+ H5D_fill_time_t fill_times[] = {H5D_FILL_TIME_IFSET, H5D_FILL_TIME_ALLOC, H5D_FILL_TIME_NEVER};
+
+ TESTING_2("dataset fill time property");
+
+ if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create DCPL\n");
+ PART_ERROR(DCPL_fill_time_property_test);
+ }
+
+ for (i = 0; i < ARRAY_LENGTH(fill_times); i++) {
+ char name[100];
+
+ if (H5Pset_fill_time(dcpl_id, fill_times[i]) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set dataset fill time property\n");
+ PART_ERROR(DCPL_fill_time_property_test);
+ }
+
+ HDsprintf(name, "%s%zu", DATASET_CREATION_PROPERTIES_TEST_FILL_TIMES_BASE_NAME, i);
+
+ if ((dset_id = H5Dcreate2(group_id, name, dset_dtype, fspace_id, H5P_DEFAULT, dcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", name);
+ PART_ERROR(DCPL_fill_time_property_test);
+ }
+
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, name, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", name);
+ PART_ERROR(DCPL_fill_time_property_test);
+ }
+
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+ }
+
+ if (dcpl_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(dcpl_id);
+ }
+ H5E_END_TRY;
+ dcpl_id = H5I_INVALID_HID;
+ }
+
+ PASSED();
+ }
+ PART_END(DCPL_fill_time_property_test);
+
+ /* TODO: Test the fill value property */
+
+ /* Test filters */
+ PART_BEGIN(DCPL_filters_test)
+ {
+ TESTING_2("dataset filters");
+
+ if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create DCPL\n");
+ PART_ERROR(DCPL_filters_test);
+ }
+
+ if (H5Pset_chunk(dcpl_id, DATASET_CREATION_PROPERTIES_TEST_SHAPE_RANK, chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set chunking on DCPL\n");
+ PART_ERROR(DCPL_filters_test);
+ }
+
+ /* Set all of the available filters on the DCPL */
+ if (H5Pset_deflate(dcpl_id, 7) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set deflate filter on DCPL\n");
+ PART_ERROR(DCPL_filters_test);
+ }
+ if (H5Pset_shuffle(dcpl_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set shuffle filter on DCPL\n");
+ PART_ERROR(DCPL_filters_test);
+ }
+ if (H5Pset_fletcher32(dcpl_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set fletcher32 filter on DCPL\n");
+ PART_ERROR(DCPL_filters_test);
+ }
+ if (H5Pset_nbit(dcpl_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set nbit filter on DCPL\n");
+ PART_ERROR(DCPL_filters_test);
+ }
+ if (H5Pset_scaleoffset(dcpl_id, H5Z_SO_FLOAT_ESCALE, 2) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set scaleoffset filter on DCPL\n");
+ PART_ERROR(DCPL_filters_test);
+ }
+
+ /*
+ * Use a simple datatype, as not all filters support all datatypes.
+ */
+ if ((dset_id = H5Dcreate2(group_id, DATASET_CREATION_PROPERTIES_TEST_FILTERS_DSET_NAME,
+ H5T_NATIVE_INT, fspace_id, H5P_DEFAULT, dcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n",
+ DATASET_CREATION_PROPERTIES_TEST_FILTERS_DSET_NAME);
+ PART_ERROR(DCPL_filters_test);
+ }
+
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_CREATION_PROPERTIES_TEST_FILTERS_DSET_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n",
+ DATASET_CREATION_PROPERTIES_TEST_FILTERS_DSET_NAME);
+ PART_ERROR(DCPL_filters_test);
+ }
+
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ if (dcpl_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(dcpl_id);
+ }
+ H5E_END_TRY;
+ dcpl_id = H5I_INVALID_HID;
+ }
+
+ PASSED();
+ }
+ PART_END(DCPL_filters_test);
+
+ /* Test the dataset storage layout property */
+ PART_BEGIN(DCPL_storage_layout_test)
+ {
+ H5D_layout_t layouts[] = {H5D_COMPACT, H5D_CONTIGUOUS, H5D_CHUNKED};
+
+ TESTING_2("dataset storage layouts");
+
+ if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create DCPL\n");
+ PART_ERROR(DCPL_storage_layout_test);
+ }
+
+ for (i = 0; i < ARRAY_LENGTH(layouts); i++) {
+ char name[100];
+
+ if (H5Pset_layout(dcpl_id, layouts[i]) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set storage layout property\n");
+ PART_ERROR(DCPL_storage_layout_test);
+ }
+
+ if (H5D_CHUNKED == layouts[i]) {
+ hsize_t local_chunk_dims[DATASET_CREATION_PROPERTIES_TEST_CHUNK_DIM_RANK];
+ size_t j;
+
+ for (j = 0; j < DATASET_CREATION_PROPERTIES_TEST_CHUNK_DIM_RANK; j++)
+ local_chunk_dims[j] = (hsize_t)(rand() % (int)dims[j] + 1);
+
+ if (H5Pset_chunk(dcpl_id, DATASET_CREATION_PROPERTIES_TEST_CHUNK_DIM_RANK,
+ local_chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set chunk dimensionality\n");
+ PART_ERROR(DCPL_storage_layout_test);
+ }
+ }
+
+ HDsprintf(name, "%s%zu", DATASET_CREATION_PROPERTIES_TEST_LAYOUTS_BASE_NAME, i);
+
+ if ((dset_id =
+ H5Dcreate2(group_id, name, (H5D_COMPACT == layouts[i]) ? compact_dtype : dset_dtype,
+ (H5D_COMPACT == layouts[i]) ? compact_fspace_id : fspace_id, H5P_DEFAULT,
+ dcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", name);
+ PART_ERROR(DCPL_storage_layout_test);
+ }
+
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, name, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", name);
+ PART_ERROR(DCPL_storage_layout_test);
+ }
+
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+ }
+
+ if (dcpl_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(dcpl_id);
+ }
+ H5E_END_TRY;
+ dcpl_id = H5I_INVALID_HID;
+ }
+
+ PASSED();
+ }
+ PART_END(DCPL_storage_layout_test);
+
+ /* Test the "track object times" property */
+ PART_BEGIN(DCPL_track_obj_times_test)
+ {
+ TESTING_2("object time tracking property for DCPL");
+
+ if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create DCPL\n");
+ PART_ERROR(DCPL_track_obj_times_test);
+ }
+
+ if (H5Pset_obj_track_times(dcpl_id, true) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set object time tracking property\n");
+ PART_ERROR(DCPL_track_obj_times_test);
+ }
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_CREATION_PROPERTIES_TEST_TRACK_TIMES_YES_DSET_NAME,
+ dset_dtype, fspace_id, H5P_DEFAULT, dcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n",
+ DATASET_CREATION_PROPERTIES_TEST_TRACK_TIMES_YES_DSET_NAME);
+ PART_ERROR(DCPL_track_obj_times_test);
+ }
+
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_CREATION_PROPERTIES_TEST_TRACK_TIMES_YES_DSET_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n",
+ DATASET_CREATION_PROPERTIES_TEST_TRACK_TIMES_YES_DSET_NAME);
+ PART_ERROR(DCPL_track_obj_times_test);
+ }
+
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ if (H5Pset_obj_track_times(dcpl_id, false) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set object time tracking property\n");
+ PART_ERROR(DCPL_track_obj_times_test);
+ }
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_CREATION_PROPERTIES_TEST_TRACK_TIMES_NO_DSET_NAME,
+ dset_dtype, fspace_id, H5P_DEFAULT, dcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n",
+ DATASET_CREATION_PROPERTIES_TEST_TRACK_TIMES_NO_DSET_NAME);
+ PART_ERROR(DCPL_track_obj_times_test);
+ }
+
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_CREATION_PROPERTIES_TEST_TRACK_TIMES_NO_DSET_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n",
+ DATASET_CREATION_PROPERTIES_TEST_TRACK_TIMES_NO_DSET_NAME);
+ PART_ERROR(DCPL_track_obj_times_test);
+ }
+
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+ if (dcpl_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(dcpl_id);
+ }
+ H5E_END_TRY;
+ dcpl_id = H5I_INVALID_HID;
+ }
+
+ PASSED();
+ }
+ PART_END(DCPL_track_obj_times_test);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(compact_fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(compact_dtype) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(compact_fspace_id);
+ H5Sclose(fspace_id);
+ H5Tclose(compact_dtype);
+ H5Tclose(dset_dtype);
+ H5Dclose(dset_id);
+ H5Pclose(dcpl_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to create many small datasets (100,000)
+ */
+static int
+test_create_many_dataset(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dataspace_id = H5I_INVALID_HID;
+ char dset_name[DSET_NAME_BUF_SIZE];
+ unsigned char data;
+ unsigned int i;
+
+ TESTING("creating many datasets");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_MANY_CREATE_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n", DATASET_MANY_CREATE_GROUP_NAME);
+ goto error;
+ }
+
+ if ((dataspace_id = H5Screate(H5S_SCALAR)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create scalar data space\n");
+ goto error;
+ }
+
+ HDprintf("\n");
+ for (i = 0; i < DATASET_NUMB; i++) {
+ HDprintf("\r %u/%u", i + 1, DATASET_NUMB);
+ sprintf(dset_name, "dset_%02u", i);
+ data = i % 256;
+
+ if ((dset_id = H5Dcreate2(group_id, dset_name, H5T_NATIVE_UCHAR, dataspace_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", dset_name);
+ goto error;
+ }
+
+ if (H5Dwrite(dset_id, H5T_NATIVE_UCHAR, H5S_ALL, H5S_ALL, H5P_DEFAULT, &data) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n", dset_name);
+ goto error;
+ }
+
+ if (H5Dclose(dset_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close dataset '%s'\n", dset_name);
+ goto error;
+ }
+ }
+
+ if (H5Sclose(dataspace_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ H5Sclose(dataspace_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that re-opening a dataset with
+ * H5Dopen succeeds.
+ */
+static int
+test_open_dataset(void)
+{
+ TESTING("H5Dopen");
+
+ SKIPPED();
+
+ return 0;
+}
+
+/*
+ * A test to check that H5Dopen fails when it is
+ * passed invalid parameters.
+ */
+static int
+test_open_dataset_invalid_params(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("H5Dopen with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_OPEN_INVALID_PARAMS_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n", DATASET_OPEN_INVALID_PARAMS_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = generate_random_dataspace(DATASET_OPEN_INVALID_PARAMS_SPACE_RANK, NULL, NULL, FALSE)) <
+ 0)
+ TEST_ERROR;
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_OPEN_INVALID_PARAMS_DSET_NAME, dset_dtype, fspace_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_OPEN_INVALID_PARAMS_DSET_NAME);
+ goto error;
+ }
+
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Dopen_invalid_loc_id)
+ {
+ TESTING_2("H5Dopen with an invalid loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ dset_id = H5Dopen2(H5I_INVALID_HID, DATASET_OPEN_INVALID_PARAMS_DSET_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (dset_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened dataset using H5Dopen2 with an invalid loc_id!\n");
+ H5Dclose(dset_id);
+ PART_ERROR(H5Dopen_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dopen_invalid_loc_id);
+
+ PART_BEGIN(H5Dopen_invalid_dataset_name)
+ {
+ TESTING_2("H5Dopen with an invalid dataset name");
+
+ H5E_BEGIN_TRY
+ {
+ dset_id = H5Dopen2(group_id, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (dset_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened dataset using H5Dopen2 with a NULL dataset name!\n");
+ H5Dclose(dset_id);
+ PART_ERROR(H5Dopen_invalid_dataset_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ dset_id = H5Dopen2(group_id, "", H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (dset_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened dataset using H5Dopen2 with an invalid dataset name of ''!\n");
+ H5Dclose(dset_id);
+ PART_ERROR(H5Dopen_invalid_dataset_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dopen_invalid_dataset_name);
+
+ PART_BEGIN(H5Dopen_invalid_dapl)
+ {
+ TESTING_2("H5Dopen with an invalid DAPL");
+
+ H5E_BEGIN_TRY
+ {
+ dset_id = H5Dopen2(group_id, DATASET_OPEN_INVALID_PARAMS_DSET_NAME, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (dset_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened dataset using H5Dopen2 with an invalid DAPL!\n");
+ H5Dclose(dset_id);
+ PART_ERROR(H5Dopen_invalid_dapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dopen_invalid_dapl);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ H5Tclose(dset_dtype);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that H5Dclose fails when it is
+ * passed an invalid dataset ID.
+ */
+static int
+test_close_dataset_invalid_params(void)
+{
+ herr_t err_ret = -1;
+ hid_t file_id = H5I_INVALID_HID;
+
+ TESTING("H5Dclose with an invalid dataset ID");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Dclose(H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Dclose succeeded with an invalid dataset ID!\n");
+ goto error;
+ }
+
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that valid copies of a dataset's dataspace
+ * and datatype can be retrieved with H5Dget_space and
+ * H5Dget_type, respectively.
+ */
+static int
+test_get_dataset_space_and_type(void)
+{
+ hsize_t dset_dims[DATASET_GET_SPACE_TYPE_TEST_SPACE_RANK];
+ size_t i;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t dset_space_id = H5I_INVALID_HID;
+ hid_t tmp_type_id = H5I_INVALID_HID;
+ hid_t tmp_space_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("retrieval of a dataset's dataspace and datatype");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_GET_SPACE_TYPE_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n", DATASET_GET_SPACE_TYPE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((dset_space_id =
+ generate_random_dataspace(DATASET_GET_SPACE_TYPE_TEST_SPACE_RANK, NULL, dset_dims, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_GET_SPACE_TYPE_TEST_DSET_NAME, dset_dtype, dset_space_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_GET_SPACE_TYPE_TEST_DSET_NAME);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ /* Retrieve the dataset's datatype and dataspace and verify them */
+ PART_BEGIN(H5Dget_type)
+ {
+ TESTING_2("H5Dget_type");
+
+ if ((tmp_type_id = H5Dget_type(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve dataset's datatype\n");
+ PART_ERROR(H5Dget_type);
+ }
+
+ {
+ htri_t types_equal = H5Tequal(tmp_type_id, dset_dtype);
+
+ if (types_equal < 0) {
+ H5_FAILED();
+ HDprintf(" datatype was invalid\n");
+ PART_ERROR(H5Dget_type);
+ }
+
+ if (!types_equal) {
+ H5_FAILED();
+ HDprintf(" dataset's datatype did not match\n");
+ PART_ERROR(H5Dget_type);
+ }
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dget_type);
+
+ PART_BEGIN(H5Dget_space)
+ {
+ TESTING_2("H5Dget_space");
+
+ if ((tmp_space_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve dataset's dataspace\n");
+ PART_ERROR(H5Dget_space);
+ }
+
+ {
+ hsize_t space_dims[DATASET_GET_SPACE_TYPE_TEST_SPACE_RANK];
+
+ if (H5Sget_simple_extent_dims(tmp_space_id, space_dims, NULL) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve dataspace dimensions\n");
+ PART_ERROR(H5Dget_space);
+ }
+
+ for (i = 0; i < DATASET_GET_SPACE_TYPE_TEST_SPACE_RANK; i++)
+ if (space_dims[i] != dset_dims[i]) {
+ H5_FAILED();
+ HDprintf(" dataset's dataspace dims didn't match\n");
+ PART_ERROR(H5Dget_space);
+ }
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dget_space);
+
+ /* Now close the dataset and verify that this still works after
+ * opening an attribute instead of creating it.
+ */
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+ if (tmp_type_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(tmp_type_id);
+ }
+ H5E_END_TRY;
+ tmp_type_id = H5I_INVALID_HID;
+ }
+ if (tmp_space_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(tmp_space_id);
+ }
+ H5E_END_TRY;
+ tmp_space_id = H5I_INVALID_HID;
+ }
+
+ PART_BEGIN(H5Dget_type_reopened)
+ {
+ TESTING_2("H5Dget_type after re-opening a dataset");
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_GET_SPACE_TYPE_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_GET_SPACE_TYPE_TEST_DSET_NAME);
+ PART_ERROR(H5Dget_type_reopened);
+ }
+
+ if ((tmp_type_id = H5Dget_type(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve dataset's datatype\n");
+ PART_ERROR(H5Dget_type_reopened);
+ }
+
+ {
+ htri_t types_equal = H5Tequal(tmp_type_id, dset_dtype);
+
+ if (types_equal < 0) {
+ H5_FAILED();
+ HDprintf(" datatype was invalid\n");
+ PART_ERROR(H5Dget_type_reopened);
+ }
+
+ if (!types_equal) {
+ H5_FAILED();
+ HDprintf(" dataset's datatype did not match\n");
+ PART_ERROR(H5Dget_type_reopened);
+ }
+ }
+
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dget_type_reopened);
+
+ PART_BEGIN(H5Dget_space_reopened)
+ {
+ TESTING_2("H5Dget_space after re-opening a dataset");
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_GET_SPACE_TYPE_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_GET_SPACE_TYPE_TEST_DSET_NAME);
+ PART_ERROR(H5Dget_space_reopened);
+ }
+
+ if ((tmp_space_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve dataset's dataspace\n");
+ PART_ERROR(H5Dget_space_reopened);
+ }
+
+ {
+ hsize_t space_dims[DATASET_GET_SPACE_TYPE_TEST_SPACE_RANK];
+
+ if (H5Sget_simple_extent_dims(tmp_space_id, space_dims, NULL) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve dataspace dimensions\n");
+ PART_ERROR(H5Dget_space_reopened);
+ }
+
+ for (i = 0; i < DATASET_GET_SPACE_TYPE_TEST_SPACE_RANK; i++) {
+ if (space_dims[i] != dset_dims[i]) {
+ H5_FAILED();
+ HDprintf(" dataset's dataspace dims didn't match!\n");
+ PART_ERROR(H5Dget_space_reopened);
+ }
+ }
+ }
+
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dget_space_reopened);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(tmp_space_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(dset_space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(tmp_type_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(tmp_space_id);
+ H5Sclose(dset_space_id);
+ H5Tclose(tmp_type_id);
+ H5Tclose(dset_dtype);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a dataset's dataspace and datatype
+ * can't be retrieved when H5Dget_space and H5Dget_type are passed
+ * invalid parameters, respectively.
+ */
+static int
+test_get_dataset_space_and_type_invalid_params(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t dset_space_id = H5I_INVALID_HID;
+ hid_t tmp_type_id = H5I_INVALID_HID;
+ hid_t tmp_space_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("H5Dget_type/H5Dget_space with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", ATTRIBUTE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_GET_SPACE_TYPE_INVALID_PARAMS_TEST_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n",
+ DATASET_GET_SPACE_TYPE_INVALID_PARAMS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((dset_space_id = generate_random_dataspace(DATASET_GET_SPACE_TYPE_INVALID_PARAMS_TEST_SPACE_RANK,
+ NULL, NULL, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_GET_SPACE_TYPE_INVALID_PARAMS_TEST_DSET_NAME, dset_dtype,
+ dset_space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_GET_SPACE_TYPE_INVALID_PARAMS_TEST_DSET_NAME);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Dget_type_invalid_dset_id)
+ {
+ TESTING_2("H5Dget_type with an invalid dset_id");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_type_id = H5Dget_type(H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (tmp_type_id >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved copy of dataset's datatype using an invalid dataset ID!\n");
+ PART_ERROR(H5Dget_type_invalid_dset_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dget_type_invalid_dset_id);
+
+ PART_BEGIN(H5Dget_space_invalid_dset_id)
+ {
+ TESTING_2("H5Dget_space with an invalid dset_id");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_space_id = H5Dget_space(H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (tmp_space_id >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved copy of dataset's dataspace using an invalid dataset ID!\n");
+ PART_ERROR(H5Dget_space_invalid_dset_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dget_space_invalid_dset_id);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(dset_space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(tmp_space_id);
+ H5Sclose(dset_space_id);
+ H5Tclose(tmp_type_id);
+ H5Tclose(dset_dtype);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test for H5Dget_space_status.
+ */
+static int
+test_get_dataset_space_status(void)
+{
+ TESTING("H5Dget_space_status");
+
+ SKIPPED();
+
+ return 0;
+}
+
+/*
+ * A test to check that a dataset's dataspace allocation
+ * status can't be retrieved with H5Dget_space_status when
+ * it is passed invalid parameters.
+ */
+static int
+test_get_dataset_space_status_invalid_params(void)
+{
+ TESTING("H5Dget_space_status with invalid parameters");
+
+ SKIPPED();
+
+ return 0;
+}
+
+/*
+ * A test to check that a DCPL used for dataset creation
+ * can be persisted and that a valid copy of that DCPL can
+ * be retrieved later with a call to H5Dget_create_plist.
+ * Also tests that a valid copy of a DAPL used for dataset
+ * access can be retrieved with a call to H5Dget_access_plist.
+ */
+static int
+test_dataset_property_lists(void)
+{
+ const char *path_prefix = "/test_prefix";
+ hsize_t dims[DATASET_PROPERTY_LIST_TEST_SPACE_RANK];
+ hsize_t chunk_dims[DATASET_PROPERTY_LIST_TEST_SPACE_RANK];
+ size_t i;
+ herr_t err_ret = -1;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id1 = H5I_INVALID_HID, dset_id2 = H5I_INVALID_HID, dset_id3 = H5I_INVALID_HID,
+ dset_id4 = H5I_INVALID_HID;
+ hid_t dcpl_id1 = H5I_INVALID_HID, dcpl_id2 = H5I_INVALID_HID;
+ hid_t dapl_id1 = H5I_INVALID_HID, dapl_id2 = H5I_INVALID_HID;
+ hid_t dset_dtype1 = H5I_INVALID_HID, dset_dtype2 = H5I_INVALID_HID, dset_dtype3 = H5I_INVALID_HID,
+ dset_dtype4 = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ char *tmp_prefix = NULL;
+ char vol_name[5];
+
+ TESTING_MULTIPART("dataset property list operations");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GET_PLIST)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, or get property list aren't supported "
+ "with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ /** for DAOS VOL, this test is problematic since auto chunking can be selected, so skip for now */
+ if (H5VLget_connector_name(file_id, vol_name, 5) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get VOL connector name\n");
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_PROPERTY_LIST_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", DATASET_PROPERTY_LIST_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((space_id = generate_random_dataspace(DATASET_PROPERTY_LIST_TEST_SPACE_RANK, NULL, dims, FALSE)) < 0)
+ TEST_ERROR;
+
+ for (i = 0; i < DATASET_PROPERTY_LIST_TEST_SPACE_RANK; i++)
+ chunk_dims[i] = (hsize_t)(rand() % (int)dims[i] + 1);
+
+ if ((dset_dtype1 = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+ if ((dset_dtype2 = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+ if ((dset_dtype3 = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+ if ((dset_dtype4 = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dcpl_id1 = H5Pcreate(H5P_DATASET_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create DCPL\n");
+ goto error;
+ }
+
+ if (H5Pset_chunk(dcpl_id1, DATASET_PROPERTY_LIST_TEST_SPACE_RANK, chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set DCPL property\n");
+ goto error;
+ }
+
+ if ((dset_id1 = H5Dcreate2(group_id, DATASET_PROPERTY_LIST_TEST_DSET_NAME1, dset_dtype1, space_id,
+ H5P_DEFAULT, dcpl_id1, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_PROPERTY_LIST_TEST_DSET_NAME1);
+ goto error;
+ }
+
+ if ((dset_id2 = H5Dcreate2(group_id, DATASET_PROPERTY_LIST_TEST_DSET_NAME2, dset_dtype2, space_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_PROPERTY_LIST_TEST_DSET_NAME2);
+ goto error;
+ }
+
+ if (H5Pclose(dcpl_id1) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Dget_create_plist)
+ {
+ TESTING_2("H5Dget_create_plist");
+
+ /* Try to receive copies of the two property lists, one which has the property set and one which
+ * does not */
+ if ((dcpl_id1 = H5Dget_create_plist(dset_id1)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get property list\n");
+ PART_ERROR(H5Dget_create_plist);
+ }
+
+ if ((dcpl_id2 = H5Dget_create_plist(dset_id2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get property list\n");
+ PART_ERROR(H5Dget_create_plist);
+ }
+
+ /* Ensure that property list 1 has the property set and property list 2 does not */
+ {
+ hsize_t tmp_chunk_dims[DATASET_PROPERTY_LIST_TEST_SPACE_RANK];
+
+ HDmemset(tmp_chunk_dims, 0, sizeof(tmp_chunk_dims));
+
+ if (H5Pget_chunk(dcpl_id1, DATASET_PROPERTY_LIST_TEST_SPACE_RANK, tmp_chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get DCPL property value\n");
+ PART_ERROR(H5Dget_create_plist);
+ }
+
+ for (i = 0; i < DATASET_PROPERTY_LIST_TEST_SPACE_RANK; i++)
+ if (tmp_chunk_dims[i] != chunk_dims[i]) {
+ H5_FAILED();
+ HDprintf(" DCPL property values were incorrect\n");
+ PART_ERROR(H5Dget_create_plist);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Pget_chunk(dcpl_id2, DATASET_PROPERTY_LIST_TEST_SPACE_RANK, tmp_chunk_dims);
+ }
+ H5E_END_TRY;
+
+ /* DAOS VOL can auto chunk, so don't fail */
+ if (err_ret >= 0 && strcmp(vol_name, "daos") != 0) {
+ H5_FAILED();
+ HDprintf(" property list 2 shouldn't have had chunk dimensionality set (not a chunked "
+ "layout)\n");
+ PART_ERROR(H5Dget_create_plist);
+ }
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dget_create_plist);
+
+ PART_BEGIN(H5Dget_access_plist)
+ {
+ TESTING_2("H5Dget_access_plist");
+
+ if ((dapl_id1 = H5Pcreate(H5P_DATASET_ACCESS)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create DAPL\n");
+ PART_ERROR(H5Dget_access_plist);
+ }
+
+ if (H5Pset_efile_prefix(dapl_id1, path_prefix) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set DAPL property\n");
+ PART_ERROR(H5Dget_access_plist);
+ }
+
+ if ((dset_id3 = H5Dcreate2(group_id, DATASET_PROPERTY_LIST_TEST_DSET_NAME3, dset_dtype3, space_id,
+ H5P_DEFAULT, H5P_DEFAULT, dapl_id1)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset\n");
+ PART_ERROR(H5Dget_access_plist);
+ }
+
+ if ((dset_id4 = H5Dcreate2(group_id, DATASET_PROPERTY_LIST_TEST_DSET_NAME4, dset_dtype4, space_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset\n");
+ PART_ERROR(H5Dget_access_plist);
+ }
+
+ if (dapl_id1 >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(dapl_id1);
+ }
+ H5E_END_TRY;
+ dapl_id1 = H5I_INVALID_HID;
+ }
+
+ /* Try to receive copies of the two property lists, one which has the property set and one which
+ * does not */
+ if ((dapl_id1 = H5Dget_access_plist(dset_id3)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get property list\n");
+ PART_ERROR(H5Dget_access_plist);
+ }
+
+ if ((dapl_id2 = H5Dget_access_plist(dset_id4)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get property list\n");
+ PART_ERROR(H5Dget_access_plist);
+ }
+
+ /* Ensure that property list 1 has the property set and property list 2 does not */
+ {
+ ssize_t buf_size = 0;
+
+ if ((buf_size = H5Pget_efile_prefix(dapl_id1, NULL, 0)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve size for property value buffer\n");
+ PART_ERROR(H5Dget_access_plist);
+ }
+
+ if (NULL == (tmp_prefix = (char *)HDcalloc(1, (size_t)buf_size + 1))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for property value\n");
+ PART_ERROR(H5Dget_access_plist);
+ }
+
+ if (H5Pget_efile_prefix(dapl_id1, tmp_prefix, (size_t)buf_size + 1) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve property list value\n");
+ PART_ERROR(H5Dget_access_plist);
+ }
+
+ if (HDstrncmp(tmp_prefix, path_prefix, (size_t)buf_size + 1)) {
+ H5_FAILED();
+ HDprintf(" DAPL values were incorrect!\n");
+ PART_ERROR(H5Dget_access_plist);
+ }
+
+ HDmemset(tmp_prefix, 0, (size_t)buf_size + 1);
+
+ if (H5Pget_efile_prefix(dapl_id2, tmp_prefix, (size_t)buf_size) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve property list value\n");
+ PART_ERROR(H5Dget_access_plist);
+ }
+
+ if (!HDstrncmp(tmp_prefix, path_prefix, (size_t)buf_size + 1)) {
+ H5_FAILED();
+ HDprintf(" DAPL property value was set!\n");
+ PART_ERROR(H5Dget_access_plist);
+ }
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dget_access_plist);
+
+ /* Now close the property lists and datasets and see if we can still retrieve copies of
+ * the property lists upon opening (instead of creating) a dataset
+ */
+ if (dcpl_id1 >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(dcpl_id1);
+ }
+ H5E_END_TRY;
+ dcpl_id1 = H5I_INVALID_HID;
+ }
+ if (dcpl_id2 >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(dcpl_id2);
+ }
+ H5E_END_TRY;
+ dcpl_id2 = H5I_INVALID_HID;
+ }
+ if (dset_id1 >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id1);
+ }
+ H5E_END_TRY;
+ dset_id1 = H5I_INVALID_HID;
+ }
+ if (dset_id2 >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id2);
+ }
+ H5E_END_TRY;
+ dset_id2 = H5I_INVALID_HID;
+ }
+
+ PART_BEGIN(H5Dget_create_plist_reopened)
+ {
+ TESTING_2("H5Dget_create_plist after re-opening a dataset");
+
+ if ((dset_id1 = H5Dopen2(group_id, DATASET_PROPERTY_LIST_TEST_DSET_NAME1, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_PROPERTY_LIST_TEST_DSET_NAME1);
+ PART_ERROR(H5Dget_create_plist_reopened);
+ }
+
+ if ((dset_id2 = H5Dopen2(group_id, DATASET_PROPERTY_LIST_TEST_DSET_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_PROPERTY_LIST_TEST_DSET_NAME2);
+ PART_ERROR(H5Dget_create_plist_reopened);
+ }
+
+ if ((dcpl_id1 = H5Dget_create_plist(dset_id1)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get property list\n");
+ PART_ERROR(H5Dget_create_plist_reopened);
+ }
+
+ if ((dcpl_id2 = H5Dget_create_plist(dset_id2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get property list\n");
+ PART_ERROR(H5Dget_create_plist_reopened);
+ }
+
+ /* Ensure that property list 1 has the property set and property list 2 does not */
+ {
+ hsize_t tmp_chunk_dims[DATASET_PROPERTY_LIST_TEST_SPACE_RANK];
+
+ HDmemset(tmp_chunk_dims, 0, sizeof(tmp_chunk_dims));
+
+ if (H5Pget_chunk(dcpl_id1, DATASET_PROPERTY_LIST_TEST_SPACE_RANK, tmp_chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get DCPL property value\n");
+ PART_ERROR(H5Dget_create_plist_reopened);
+ }
+
+ for (i = 0; i < DATASET_PROPERTY_LIST_TEST_SPACE_RANK; i++)
+ if (tmp_chunk_dims[i] != chunk_dims[i]) {
+ H5_FAILED();
+ HDprintf(" DCPL property values were incorrect\n");
+ PART_ERROR(H5Dget_create_plist_reopened);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Pget_chunk(dcpl_id2, DATASET_PROPERTY_LIST_TEST_SPACE_RANK, tmp_chunk_dims);
+ }
+ H5E_END_TRY;
+
+ /* DAOS VOL can auto chunk, so don't fail */
+ if (err_ret >= 0 && strcmp(vol_name, "daos") != 0) {
+ H5_FAILED();
+ HDprintf(" property list 2 shouldn't have had chunk dimensionality set (not a chunked "
+ "layout)\n");
+ PART_ERROR(H5Dget_create_plist_reopened);
+ }
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dget_create_plist_reopened);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (tmp_prefix) {
+ HDfree(tmp_prefix);
+ tmp_prefix = NULL;
+ }
+
+ if (H5Pclose(dcpl_id1) < 0)
+ TEST_ERROR;
+ if (H5Pclose(dcpl_id2) < 0)
+ TEST_ERROR;
+ if (H5Pclose(dapl_id1) < 0)
+ TEST_ERROR;
+ if (H5Pclose(dapl_id2) < 0)
+ TEST_ERROR;
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype1) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype2) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype3) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype4) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id1) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id2) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id3) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id4) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (tmp_prefix)
+ HDfree(tmp_prefix);
+ H5Pclose(dcpl_id1);
+ H5Pclose(dcpl_id2);
+ H5Pclose(dapl_id1);
+ H5Pclose(dapl_id2);
+ H5Sclose(space_id);
+ H5Tclose(dset_dtype1);
+ H5Tclose(dset_dtype2);
+ H5Tclose(dset_dtype3);
+ H5Tclose(dset_dtype4);
+ H5Dclose(dset_id1);
+ H5Dclose(dset_id2);
+ H5Dclose(dset_id3);
+ H5Dclose(dset_id4);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test for H5Dget_storage_size.
+ */
+static int
+test_get_dataset_storage_size(void)
+{
+ TESTING("H5Dget_storage_size");
+
+ SKIPPED();
+
+ return 0;
+}
+
+/*
+ * A test to check that a dataset's storage size can't
+ * be retrieved when H5Dget_storage_size is passed
+ * invalid parameters.
+ */
+static int
+test_get_dataset_storage_size_invalid_params(void)
+{
+ TESTING("H5Dget_storage_size with invalid parameters");
+
+ SKIPPED();
+
+ return 0;
+}
+
+/*
+ * A test for H5Dget_chunk_storage_size.
+ */
+static int
+test_get_dataset_chunk_storage_size(void)
+{
+ TESTING("H5Dget_chunk_storage_size");
+
+ SKIPPED();
+
+ return 0;
+}
+
+/*
+ * A test to check that the size of an allocated chunk in
+ * a dataset can't be retrieved when H5Dget_chunk_storage_size
+ * is passed invalid parameters.
+ */
+static int
+test_get_dataset_chunk_storage_size_invalid_params(void)
+{
+ TESTING("H5Dget_chunk_storage_size with invalid parameters");
+
+ SKIPPED();
+
+ return 0;
+}
+
+/*
+ * A test for H5Dget_offset.
+ */
+static int
+test_get_dataset_offset(void)
+{
+ TESTING("H5Dget_offset");
+
+ SKIPPED();
+
+ return 0;
+}
+
+/*
+ * A test to check that a dataset's offset can't be
+ * retrieved when H5Dget_offset is passed invalid
+ * parameters.
+ */
+static int
+test_get_dataset_offset_invalid_params(void)
+{
+ TESTING("H5Dget_offset with invalid parameters");
+
+ SKIPPED();
+
+ return 0;
+}
+
+/*
+ * A test to check that a small amount of data can be
+ * read back from a dataset using an H5S_ALL selection.
+ */
+static int
+test_read_dataset_small_all(void)
+{
+ hsize_t dims[DATASET_SMALL_READ_TEST_ALL_DSET_SPACE_RANK] = {10, 5, 3};
+ size_t i, data_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ void *read_buf = NULL;
+
+ TESTING("small read from dataset with H5S_ALL");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_SMALL_READ_TEST_ALL_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", DATASET_SMALL_READ_TEST_ALL_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_SMALL_READ_TEST_ALL_DSET_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_SMALL_READ_TEST_ALL_DSET_NAME,
+ DATASET_SMALL_READ_TEST_ALL_DSET_DTYPE, fspace_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_SMALL_READ_TEST_ALL_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0, data_size = 1; i < DATASET_SMALL_READ_TEST_ALL_DSET_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_SMALL_READ_TEST_ALL_DSET_DTYPESIZE;
+
+ if (NULL == (read_buf = HDmalloc(data_size)))
+ TEST_ERROR;
+
+ if (H5Dread(dset_id, DATASET_SMALL_READ_TEST_ALL_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT, read_buf) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_SMALL_READ_TEST_ALL_DSET_NAME);
+ goto error;
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a small amount of data can be
+ * read back from a dataset using a hyperslab selection.
+ */
+static int
+test_read_dataset_small_hyperslab(void)
+{
+ hsize_t start[DATASET_SMALL_READ_TEST_HYPERSLAB_DSET_SPACE_RANK];
+ hsize_t stride[DATASET_SMALL_READ_TEST_HYPERSLAB_DSET_SPACE_RANK];
+ hsize_t count[DATASET_SMALL_READ_TEST_HYPERSLAB_DSET_SPACE_RANK];
+ hsize_t block[DATASET_SMALL_READ_TEST_HYPERSLAB_DSET_SPACE_RANK];
+ hsize_t dims[DATASET_SMALL_READ_TEST_HYPERSLAB_DSET_SPACE_RANK] = {10, 5, 3};
+ size_t i, data_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID, fspace_id = H5I_INVALID_HID;
+ void *read_buf = NULL;
+
+ TESTING("small read from dataset with a hyperslab selection");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_SMALL_READ_TEST_HYPERSLAB_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_SMALL_READ_TEST_HYPERSLAB_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_SMALL_READ_TEST_HYPERSLAB_DSET_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+ if ((mspace_id = H5Screate_simple(DATASET_SMALL_READ_TEST_HYPERSLAB_DSET_SPACE_RANK - 1, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_SMALL_READ_TEST_HYPERSLAB_DSET_NAME,
+ DATASET_SMALL_READ_TEST_HYPERSLAB_DSET_DTYPE, fspace_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_SMALL_READ_TEST_HYPERSLAB_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < DATASET_SMALL_READ_TEST_HYPERSLAB_DSET_SPACE_RANK; i++) {
+ start[i] = 0;
+ stride[i] = 1;
+ count[i] = dims[i];
+ block[i] = 1;
+ }
+
+ count[2] = 1;
+
+ if (H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, stride, count, block) < 0)
+ TEST_ERROR;
+
+ for (i = 0, data_size = 1; i < DATASET_SMALL_READ_TEST_HYPERSLAB_DSET_SPACE_RANK - 1; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_SMALL_READ_TEST_HYPERSLAB_DSET_DTYPESIZE;
+
+ if (NULL == (read_buf = HDmalloc(data_size)))
+ TEST_ERROR;
+
+ if (H5Dread(dset_id, DATASET_SMALL_READ_TEST_HYPERSLAB_DSET_DTYPE, mspace_id, fspace_id, H5P_DEFAULT,
+ read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_SMALL_READ_TEST_HYPERSLAB_DSET_NAME);
+ goto error;
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (H5Sclose(mspace_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a small amount of data can be
+ * read back from a dataset using a point selection.
+ */
+static int
+test_read_dataset_small_point_selection(void)
+{
+ hsize_t points[DATASET_SMALL_READ_TEST_POINT_SELECTION_NUM_POINTS *
+ DATASET_SMALL_READ_TEST_POINT_SELECTION_DSET_SPACE_RANK];
+ hsize_t dims[DATASET_SMALL_READ_TEST_POINT_SELECTION_DSET_SPACE_RANK] = {10, 10, 10};
+ hsize_t mspace_dims[] = {DATASET_SMALL_READ_TEST_POINT_SELECTION_NUM_POINTS};
+ size_t i, data_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ void *data = NULL;
+
+ TESTING("small read from dataset with a point selection");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_SMALL_READ_TEST_POINT_SELECTION_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_SMALL_READ_TEST_POINT_SELECTION_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_SMALL_READ_TEST_POINT_SELECTION_DSET_SPACE_RANK, dims, NULL)) <
+ 0)
+ TEST_ERROR;
+ if ((mspace_id = H5Screate_simple(1, mspace_dims, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_SMALL_READ_TEST_POINT_SELECTION_DSET_NAME,
+ DATASET_SMALL_READ_TEST_POINT_SELECTION_DSET_DTYPE, fspace_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_SMALL_READ_TEST_POINT_SELECTION_DSET_NAME);
+ goto error;
+ }
+
+ data_size = DATASET_SMALL_READ_TEST_POINT_SELECTION_NUM_POINTS *
+ DATASET_SMALL_READ_TEST_POINT_SELECTION_DSET_DTYPESIZE;
+
+ if (NULL == (data = HDmalloc(data_size)))
+ TEST_ERROR;
+
+ for (i = 0; i < DATASET_SMALL_READ_TEST_POINT_SELECTION_NUM_POINTS; i++) {
+ size_t j;
+
+ for (j = 0; j < DATASET_SMALL_READ_TEST_POINT_SELECTION_DSET_SPACE_RANK; j++)
+ points[(i * DATASET_SMALL_READ_TEST_POINT_SELECTION_DSET_SPACE_RANK) + j] = i;
+ }
+
+ if (H5Sselect_elements(fspace_id, H5S_SELECT_SET, DATASET_SMALL_READ_TEST_POINT_SELECTION_NUM_POINTS,
+ points) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't select points\n");
+ goto error;
+ }
+
+ if (H5Dread(dset_id, DATASET_SMALL_READ_TEST_POINT_SELECTION_DSET_DTYPE, mspace_id, fspace_id,
+ H5P_DEFAULT, data) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_SMALL_READ_TEST_POINT_SELECTION_DSET_NAME);
+ goto error;
+ }
+
+ if (data) {
+ HDfree(data);
+ data = NULL;
+ }
+
+ if (H5Sclose(mspace_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (data)
+ HDfree(data);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * Tests point selection I/O with different patterns
+ */
+#define DATASET_IO_POINT_DIM_0 6
+#define DATASET_IO_POINT_DIM_1 9
+#define DATASET_IO_POINT_CDIM_0 4
+#define DATASET_IO_POINT_CDIM_1 3
+#define DATASET_IO_POINT_NPOINTS 10
+#define DATASET_IO_POINT_GEN_POINTS(POINTS, I, J) \
+ { \
+ for ((I) = 0; (I) < DATASET_IO_POINT_NPOINTS; (I)++) \
+ do { \
+ (POINTS)[2 * (I)] = (hsize_t)(rand() % DATASET_IO_POINT_DIM_0); \
+ (POINTS)[2 * (I) + 1] = (hsize_t)(rand() % DATASET_IO_POINT_DIM_1); \
+ for ((J) = 0; ((J) < (I)) && (((POINTS)[2 * (I)] != (POINTS)[2 * (J)]) || \
+ ((POINTS)[2 * (I) + 1] != (POINTS)[2 * (J) + 1])); \
+ (J)++) \
+ ; \
+ } while ((J) < (I)); \
+ }
+static int
+test_dataset_io_point_selections(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t mspace_id_full = H5I_INVALID_HID, mspace_id_all = H5I_INVALID_HID, fspace_id = H5I_INVALID_HID;
+ hid_t dcpl_id_chunk = H5I_INVALID_HID;
+ hsize_t dims[2] = {DATASET_IO_POINT_DIM_0, DATASET_IO_POINT_DIM_1};
+ hsize_t cdims[2] = {DATASET_IO_POINT_CDIM_0, DATASET_IO_POINT_CDIM_1};
+ hsize_t points[DATASET_IO_POINT_NPOINTS * 2];
+ hsize_t points2[DATASET_IO_POINT_NPOINTS * 2];
+ hsize_t npoints = DATASET_IO_POINT_NPOINTS;
+ hsize_t start[2] = {1, 2};
+ hsize_t stride[2] = {2, 5};
+ hsize_t count[2] = {2, 1};
+ hsize_t block[2] = {1, 5};
+ int buf_all[DATASET_IO_POINT_DIM_0][DATASET_IO_POINT_DIM_1];
+ int file_state[DATASET_IO_POINT_DIM_0][DATASET_IO_POINT_DIM_1];
+ int erbuf[DATASET_IO_POINT_DIM_0][DATASET_IO_POINT_DIM_1];
+ int buf_point[DATASET_IO_POINT_NPOINTS];
+ hbool_t do_chunk;
+ int i, j;
+
+ TESTING("point selection I/O with all selection in memory and points in file");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ /* Create dataspaces and DCPL */
+ if ((mspace_id_full = H5Screate_simple(2, dims, NULL)) < 0)
+ TEST_ERROR;
+ if ((mspace_id_all = H5Screate_simple(1, &npoints, NULL)) < 0)
+ TEST_ERROR;
+ if ((fspace_id = H5Screate_simple(2, dims, NULL)) < 0)
+ TEST_ERROR;
+ if ((dcpl_id_chunk = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR;
+
+ /* Enable chunking on chunk DCPL */
+ if (H5Pset_chunk(dcpl_id_chunk, 2, cdims) < 0)
+ TEST_ERROR;
+
+ /* Open file */
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+
+ /* Open container group */
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+
+ /* Create group */
+ if ((group_id = H5Gcreate2(container_group, DATASET_IO_POINT_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+
+ /* Perform with and without chunking */
+ for (do_chunk = FALSE;; do_chunk = TRUE) {
+ if (do_chunk) {
+ TESTING("point selection I/O with all selection in memory and points in file with chunking");
+
+ /* Create chunked dataset */
+ if ((dset_id = H5Dcreate2(group_id, DATASET_IO_POINT_DSET_NAME_CHUNK, H5T_NATIVE_INT, fspace_id,
+ H5P_DEFAULT, dcpl_id_chunk, H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+ } /* end if */
+ else
+ /* Create non-chunked dataset */
+ if ((dset_id = H5Dcreate2(group_id, DATASET_IO_POINT_DSET_NAME_NOCHUNK, H5T_NATIVE_INT, fspace_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+
+ /* Fill write buffer */
+ for (i = 0; i < DATASET_IO_POINT_DIM_0; i++)
+ for (j = 0; j < DATASET_IO_POINT_DIM_1; j++)
+ buf_all[i][j] = rand();
+
+ /* Write data */
+ if (H5Dwrite(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_all) < 0)
+ FAIL_PUTS_ERROR("Failed to write entire dataset");
+
+ /* Update file_state */
+ for (i = 0; i < DATASET_IO_POINT_DIM_0; i++)
+ for (j = 0; j < DATASET_IO_POINT_DIM_1; j++)
+ file_state[i][j] = buf_all[i][j];
+
+ /* Generate points to read */
+ DATASET_IO_POINT_GEN_POINTS(points, i, j);
+
+ /* Select points */
+ if (H5Sselect_elements(fspace_id, H5S_SELECT_SET, DATASET_IO_POINT_NPOINTS, points) < 0)
+ TEST_ERROR;
+
+ /* Wipe read buffer */
+ memset(buf_point, 0, sizeof(buf_point));
+
+ /* Read points to "all" memory buffer */
+ if (H5Dread(dset_id, H5T_NATIVE_INT, mspace_id_all, fspace_id, H5P_DEFAULT, buf_point) < 0)
+ FAIL_PUTS_ERROR("Failed to read points from dataset to all memory buffer");
+
+ /* Verify data */
+ for (i = 0; i < DATASET_IO_POINT_NPOINTS; i++)
+ if (buf_point[i] != file_state[points[2 * i]][points[2 * i + 1]])
+ FAIL_PUTS_ERROR("Incorrect data read from points to all memory buffer");
+
+ /* Generate points to write */
+ DATASET_IO_POINT_GEN_POINTS(points, i, j);
+
+ /* Select points */
+ if (H5Sselect_elements(fspace_id, H5S_SELECT_SET, DATASET_IO_POINT_NPOINTS, points) < 0)
+ TEST_ERROR;
+
+ /* Fill write buffer */
+ for (i = 0; i < DATASET_IO_POINT_NPOINTS; i++)
+ buf_point[i] = rand();
+
+ /* Write points from "all" memory buffer */
+ if (H5Dwrite(dset_id, H5T_NATIVE_INT, mspace_id_all, fspace_id, H5P_DEFAULT, buf_point) < 0)
+ FAIL_PUTS_ERROR("Failed to write points to dataset from all memory buffer");
+
+ /* Update file state */
+ for (i = 0; i < DATASET_IO_POINT_NPOINTS; i++)
+ file_state[points[2 * i]][points[2 * i + 1]] = buf_point[i];
+
+ /* Wipe read buffer */
+ memset(buf_all, 0, sizeof(buf_all));
+
+ /* Read entire dataset */
+ if (H5Dread(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_all) < 0)
+ FAIL_PUTS_ERROR("Failed to read entire dataset");
+
+ /* Verify data */
+ for (i = 0; i < DATASET_IO_POINT_DIM_0; i++)
+ for (j = 0; j < DATASET_IO_POINT_DIM_1; j++)
+ if (buf_all[i][j] != file_state[i][j])
+ FAIL_PUTS_ERROR("Incorrect data found after writing from all memory buffer to points");
+
+ PASSED();
+
+ if (do_chunk)
+ TESTING("point selection I/O with points in memory and file (same shape) with chunking");
+ else
+ TESTING("point selection I/O with points in memory and file (same shape)");
+
+ /* Generate points to read */
+ DATASET_IO_POINT_GEN_POINTS(points, i, j);
+
+ /* Select points */
+ if (H5Sselect_elements(fspace_id, H5S_SELECT_SET, DATASET_IO_POINT_NPOINTS, points) < 0)
+ TEST_ERROR;
+
+ /* Wipe read buffer */
+ memset(buf_all, 0, sizeof(buf_all));
+
+ /* Generate expected read buffer */
+ memset(erbuf, 0, sizeof(erbuf));
+ for (i = 0; i < DATASET_IO_POINT_NPOINTS; i++)
+ erbuf[points[2 * i]][points[2 * i + 1]] = file_state[points[2 * i]][points[2 * i + 1]];
+
+ /* Read data points->points */
+ if (H5Dread(dset_id, H5T_NATIVE_INT, fspace_id, fspace_id, H5P_DEFAULT, buf_all) < 0)
+ FAIL_PUTS_ERROR("Failed to read points from dataset to points in memory buffer");
+
+ /* Verify data */
+ for (i = 0; i < DATASET_IO_POINT_DIM_0; i++)
+ for (j = 0; j < DATASET_IO_POINT_DIM_1; j++)
+ if (buf_all[i][j] != erbuf[i][j])
+ FAIL_PUTS_ERROR("Incorrect data found read from points in file to points in memory");
+
+ /* Generate points to write */
+ DATASET_IO_POINT_GEN_POINTS(points, i, j);
+
+ /* Select points */
+ if (H5Sselect_elements(fspace_id, H5S_SELECT_SET, DATASET_IO_POINT_NPOINTS, points) < 0)
+ TEST_ERROR;
+
+ /* Fill write buffer */
+ for (i = 0; i < DATASET_IO_POINT_DIM_0; i++)
+ for (j = 0; j < DATASET_IO_POINT_DIM_1; j++)
+ buf_all[i][j] = rand();
+
+ /* Write data points->points */
+ if (H5Dwrite(dset_id, H5T_NATIVE_INT, fspace_id, fspace_id, H5P_DEFAULT, buf_all) < 0)
+ FAIL_PUTS_ERROR("Failed to write from in memory to points in dataset");
+
+ /* Update file_state */
+ for (i = 0; i < DATASET_IO_POINT_NPOINTS; i++)
+ file_state[points[2 * i]][points[2 * i + 1]] = buf_all[points[2 * i]][points[2 * i + 1]];
+
+ /* Wipe read buffer */
+ memset(buf_all, 0, sizeof(buf_all));
+
+ /* Read entire dataset */
+ if (H5Dread(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_all) < 0)
+ FAIL_PUTS_ERROR("Failed to read entire dataset");
+
+ /* Verify data */
+ for (i = 0; i < DATASET_IO_POINT_DIM_0; i++)
+ for (j = 0; j < DATASET_IO_POINT_DIM_1; j++)
+ if (buf_all[i][j] != file_state[i][j])
+ FAIL_PUTS_ERROR(
+ "Incorrect data found after writing from points in memory to points in dataset");
+
+ PASSED();
+
+ if (do_chunk)
+ TESTING("point selection I/O with points in memory and file (different shape) with chunking");
+ else
+ TESTING("point selection I/O with points in memory and file (different shape)");
+
+ /* Generate points to read */
+ DATASET_IO_POINT_GEN_POINTS(points, i, j);
+ DATASET_IO_POINT_GEN_POINTS(points2, i, j);
+
+ /* Select points */
+ if (H5Sselect_elements(fspace_id, H5S_SELECT_SET, DATASET_IO_POINT_NPOINTS, points) < 0)
+ TEST_ERROR;
+ if (H5Sselect_elements(mspace_id_full, H5S_SELECT_SET, DATASET_IO_POINT_NPOINTS, points2) < 0)
+ TEST_ERROR;
+
+ /* Wipe read buffer */
+ memset(buf_all, 0, sizeof(buf_all));
+
+ /* Generate expected read buffer */
+ memset(erbuf, 0, sizeof(erbuf));
+ for (i = 0; i < DATASET_IO_POINT_NPOINTS; i++)
+ erbuf[points2[2 * i]][points2[2 * i + 1]] = file_state[points[2 * i]][points[2 * i + 1]];
+
+ /* Read data points->points */
+ if (H5Dread(dset_id, H5T_NATIVE_INT, mspace_id_full, fspace_id, H5P_DEFAULT, buf_all) < 0)
+ FAIL_PUTS_ERROR("Failed to read points from dataset to points in memory buffer");
+
+ /* Verify data */
+ for (i = 0; i < DATASET_IO_POINT_DIM_0; i++)
+ for (j = 0; j < DATASET_IO_POINT_DIM_1; j++)
+ if (buf_all[i][j] != erbuf[i][j])
+ FAIL_PUTS_ERROR(
+ "Incorrect data found after reading from points in file to points in memory");
+
+ /* Generate points to write */
+ DATASET_IO_POINT_GEN_POINTS(points, i, j);
+ DATASET_IO_POINT_GEN_POINTS(points2, i, j);
+
+ /* Select points */
+ if (H5Sselect_elements(fspace_id, H5S_SELECT_SET, DATASET_IO_POINT_NPOINTS, points) < 0)
+ TEST_ERROR;
+ if (H5Sselect_elements(mspace_id_full, H5S_SELECT_SET, DATASET_IO_POINT_NPOINTS, points2) < 0)
+ TEST_ERROR;
+
+ /* Fill write buffer */
+ for (i = 0; i < DATASET_IO_POINT_DIM_0; i++)
+ for (j = 0; j < DATASET_IO_POINT_DIM_1; j++)
+ buf_all[i][j] = rand();
+
+ /* Write data points->points */
+ if (H5Dwrite(dset_id, H5T_NATIVE_INT, mspace_id_full, fspace_id, H5P_DEFAULT, buf_all) < 0)
+ FAIL_PUTS_ERROR("Failed to write from points in memory to points in dataset");
+
+ /* Update file_state */
+ for (i = 0; i < DATASET_IO_POINT_NPOINTS; i++)
+ file_state[points[2 * i]][points[2 * i + 1]] = buf_all[points2[2 * i]][points2[2 * i + 1]];
+
+ /* Wipe read buffer */
+ memset(buf_all, 0, sizeof(buf_all));
+
+ /* Read entire dataset */
+ if (H5Dread(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_all) < 0)
+ FAIL_PUTS_ERROR("Failed to read entire dataset");
+
+ /* Verify data */
+ for (i = 0; i < DATASET_IO_POINT_DIM_0; i++)
+ for (j = 0; j < DATASET_IO_POINT_DIM_1; j++)
+ if (buf_all[i][j] != file_state[i][j])
+ FAIL_PUTS_ERROR(
+ "Incorrect data found after writing from points in memory to points in dataset");
+
+ PASSED();
+
+ if (do_chunk)
+ TESTING("point selection I/O with hyperslab in memory and points in file with chunking");
+ else
+ TESTING("point selection I/O with hyperslab in memory and points in file");
+
+ /* Generate points to read */
+ DATASET_IO_POINT_GEN_POINTS(points, i, j);
+
+ /* Select points */
+ if (H5Sselect_elements(fspace_id, H5S_SELECT_SET, DATASET_IO_POINT_NPOINTS, points) < 0)
+ TEST_ERROR;
+
+ /* Select hyperslab */
+ if (H5Sselect_hyperslab(mspace_id_full, H5S_SELECT_SET, start, stride, count, block) < 0)
+ TEST_ERROR;
+
+ /* Wipe read buffer */
+ memset(buf_all, 0, sizeof(buf_all));
+
+ /* Generate expected read buffer */
+ memset(erbuf, 0, sizeof(erbuf));
+ for (i = 0; i < DATASET_IO_POINT_NPOINTS; i++)
+ erbuf[start[0] + (stride[0] * ((hsize_t)i / block[1]))][start[1] + ((hsize_t)i % block[1])] =
+ file_state[points[2 * i]][points[2 * i + 1]];
+
+ /* Read data points->hslab */
+ if (H5Dread(dset_id, H5T_NATIVE_INT, mspace_id_full, fspace_id, H5P_DEFAULT, buf_all) < 0)
+ FAIL_PUTS_ERROR("Failed to read points from dataset to hyperslab in memory buffer");
+
+ /* Verify data */
+ for (i = 0; i < DATASET_IO_POINT_DIM_0; i++)
+ for (j = 0; j < DATASET_IO_POINT_DIM_1; j++)
+ if (buf_all[i][j] != erbuf[i][j])
+ FAIL_PUTS_ERROR(
+ "Incorrect data found after reading from points in file to hyperslab in memory");
+
+ /* Generate points to write */
+ DATASET_IO_POINT_GEN_POINTS(points, i, j);
+
+ /* Select points */
+ if (H5Sselect_elements(fspace_id, H5S_SELECT_SET, DATASET_IO_POINT_NPOINTS, points) < 0)
+ TEST_ERROR;
+
+ /* Fill write buffer */
+ for (i = 0; i < DATASET_IO_POINT_DIM_0; i++)
+ for (j = 0; j < DATASET_IO_POINT_DIM_1; j++)
+ buf_all[i][j] = rand();
+
+ /* Write data hlsab->points */
+ if (H5Dwrite(dset_id, H5T_NATIVE_INT, mspace_id_full, fspace_id, H5P_DEFAULT, buf_all) < 0)
+ FAIL_PUTS_ERROR("Failed to write from hyperslab in memory to points in dataset");
+
+ /* Update file_state */
+ for (i = 0; i < DATASET_IO_POINT_NPOINTS; i++)
+ file_state[points[2 * i]][points[2 * i + 1]] =
+ buf_all[start[0] + (stride[0] * ((hsize_t)i / block[1]))][start[1] + ((hsize_t)i % block[1])];
+
+ /* Wipe read buffer */
+ memset(buf_all, 0, sizeof(buf_all));
+
+ /* Read entire dataset */
+ if (H5Dread(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_all) < 0)
+ FAIL_PUTS_ERROR("Failed to read entire dataset");
+
+ /* Verify data */
+ for (i = 0; i < DATASET_IO_POINT_DIM_0; i++)
+ for (j = 0; j < DATASET_IO_POINT_DIM_1; j++)
+ if (buf_all[i][j] != file_state[i][j])
+ FAIL_PUTS_ERROR(
+ "Incorrect data found after writing from hyperslab in memory to points in dataset");
+
+ PASSED();
+
+ if (do_chunk)
+ TESTING("point selection I/O with points in memory and hyperslab in file with chunking");
+ else
+ TESTING("point selection I/O with points in memory and hyperslab in file");
+
+ /* Generate points to read */
+ DATASET_IO_POINT_GEN_POINTS(points, i, j);
+
+ /* Select points */
+ if (H5Sselect_elements(mspace_id_full, H5S_SELECT_SET, DATASET_IO_POINT_NPOINTS, points) < 0)
+ TEST_ERROR;
+
+ /* Select hyperslab */
+ if (H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, stride, count, block) < 0)
+ TEST_ERROR;
+
+ /* Wipe read buffer */
+ memset(buf_all, 0, sizeof(buf_all));
+
+ /* Generate expected read buffer */
+ memset(erbuf, 0, sizeof(erbuf));
+ for (i = 0; i < DATASET_IO_POINT_NPOINTS; i++)
+ erbuf[points[2 * i]][points[2 * i + 1]] =
+ file_state[start[0] + (stride[0] * ((hsize_t)i / block[1]))]
+ [start[1] + ((hsize_t)i % block[1])];
+
+ /* Read data hslab->points */
+ if (H5Dread(dset_id, H5T_NATIVE_INT, mspace_id_full, fspace_id, H5P_DEFAULT, buf_all) < 0)
+ FAIL_PUTS_ERROR("Failed to read hyperslab from dataset to points in memory buffer");
+
+ /* Verify data */
+ for (i = 0; i < DATASET_IO_POINT_DIM_0; i++)
+ for (j = 0; j < DATASET_IO_POINT_DIM_1; j++)
+ if (buf_all[i][j] != erbuf[i][j])
+ FAIL_PUTS_ERROR(
+ "Incorrect data found after reading from hyperslab in file to points in memory");
+
+ /* Generate points to write */
+ DATASET_IO_POINT_GEN_POINTS(points, i, j);
+
+ /* Select points */
+ if (H5Sselect_elements(mspace_id_full, H5S_SELECT_SET, DATASET_IO_POINT_NPOINTS, points) < 0)
+ TEST_ERROR;
+
+ /* Fill write buffer */
+ for (i = 0; i < DATASET_IO_POINT_DIM_0; i++)
+ for (j = 0; j < DATASET_IO_POINT_DIM_1; j++)
+ buf_all[i][j] = rand();
+
+ /* Write data points->hslab */
+ if (H5Dwrite(dset_id, H5T_NATIVE_INT, mspace_id_full, fspace_id, H5P_DEFAULT, buf_all) < 0)
+ FAIL_PUTS_ERROR("Failed to write from points in memory to hyperslab in dataset");
+
+ /* Update file_state */
+ for (i = 0; i < DATASET_IO_POINT_NPOINTS; i++)
+ file_state[start[0] + (stride[0] * ((hsize_t)i / block[1]))][start[1] + ((hsize_t)i % block[1])] =
+ buf_all[points[2 * i]][points[2 * i + 1]];
+
+ /* Wipe read buffer */
+ memset(buf_all, 0, sizeof(buf_all));
+
+ /* Read entire dataset */
+ if (H5Dread(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_all) < 0)
+ FAIL_PUTS_ERROR("Failed to read entire dataset");
+
+ /* Verify data */
+ for (i = 0; i < DATASET_IO_POINT_DIM_0; i++)
+ for (j = 0; j < DATASET_IO_POINT_DIM_1; j++)
+ if (buf_all[i][j] != file_state[i][j])
+ FAIL_PUTS_ERROR(
+ "Incorrect data found after writing from points in memory to hyperslab in dataset");
+
+ if (!do_chunk)
+ PASSED();
+
+ /* Close dataset */
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+
+ /* Exit after chunked run */
+ if (do_chunk)
+ break;
+ } /* end for */
+
+ /* Close */
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+ if (H5Pclose(dcpl_id_chunk) < 0)
+ TEST_ERROR;
+ if (H5Sclose(mspace_id_full) < 0)
+ TEST_ERROR;
+ if (H5Sclose(mspace_id_all) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ H5Sclose(mspace_id_full);
+ H5Sclose(mspace_id_all);
+ H5Pclose(dcpl_id_chunk);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+} /* end test_dataset_io_point_selections() */
+
+#ifndef NO_LARGE_TESTS
+/*
+ * A test to check that a large amount of data can be
+ * read back from a dataset using an H5S_ALL selection.
+ */
+static int
+test_read_dataset_large_all(void)
+{
+ hsize_t dims[DATASET_LARGE_READ_TEST_ALL_DSET_SPACE_RANK] = {600, 600, 600};
+ size_t i, data_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ void *read_buf = NULL;
+
+ TESTING("large read from dataset with H5S_ALL");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_LARGE_READ_TEST_ALL_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", DATASET_LARGE_READ_TEST_ALL_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_LARGE_READ_TEST_ALL_DSET_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_LARGE_READ_TEST_ALL_DSET_NAME,
+ DATASET_LARGE_READ_TEST_ALL_DSET_DTYPE, fspace_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_LARGE_READ_TEST_ALL_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0, data_size = 1; i < DATASET_LARGE_READ_TEST_ALL_DSET_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_LARGE_READ_TEST_ALL_DSET_DTYPESIZE;
+
+ if (NULL == (read_buf = HDmalloc(data_size)))
+ TEST_ERROR;
+
+ if (H5Dread(dset_id, DATASET_LARGE_READ_TEST_ALL_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT, read_buf) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_LARGE_READ_TEST_ALL_DSET_NAME);
+ goto error;
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a large amount of data can be
+ * read back from a dataset using a hyperslab selection.
+ */
+static int
+test_read_dataset_large_hyperslab(void)
+{
+ hsize_t start[DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_SPACE_RANK];
+ hsize_t stride[DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_SPACE_RANK];
+ hsize_t count[DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_SPACE_RANK];
+ hsize_t block[DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_SPACE_RANK];
+ hsize_t dims[DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_SPACE_RANK] = {600, 600, 600};
+ size_t i, data_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID, fspace_id = H5I_INVALID_HID;
+ void *read_buf = NULL;
+
+ TESTING("large read from dataset with a hyperslab selection");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_LARGE_READ_TEST_HYPERSLAB_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_LARGE_READ_TEST_HYPERSLAB_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+ if ((mspace_id = H5Screate_simple(DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_NAME,
+ DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_DTYPE, fspace_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_SPACE_RANK; i++) {
+ start[i] = 0;
+ stride[i] = 1;
+ count[i] = dims[i];
+ block[i] = 1;
+ }
+
+ if (H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, stride, count, block) < 0)
+ TEST_ERROR;
+
+ for (i = 0, data_size = 1; i < DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_DTYPESIZE;
+
+ if (NULL == (read_buf = HDmalloc(data_size)))
+ TEST_ERROR;
+
+ if (H5Dread(dset_id, DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_DTYPE, mspace_id, fspace_id, H5P_DEFAULT,
+ read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_NAME);
+ goto error;
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (H5Sclose(mspace_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a large amount of data can be
+ * read back from a dataset using a large point selection.
+ *
+ * XXX: Test takes up significant amounts of memory.
+ */
+static int
+test_read_dataset_large_point_selection(void)
+{
+ hsize_t *points = NULL;
+ hsize_t dims[DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_SPACE_RANK] = {225000000};
+ size_t i, data_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ void *data = NULL;
+
+ TESTING("large read from dataset with a point selection");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_LARGE_READ_TEST_POINT_SELECTION_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_LARGE_READ_TEST_POINT_SELECTION_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_SPACE_RANK, dims, NULL)) <
+ 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_NAME,
+ DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_DTYPE, fspace_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0, data_size = 1; i < DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_DTYPESIZE;
+
+ if (NULL == (data = HDmalloc(data_size)))
+ TEST_ERROR;
+ if (NULL ==
+ (points = HDmalloc((data_size / DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_DTYPESIZE) *
+ ((DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_SPACE_RANK) * (sizeof(hsize_t))))))
+ TEST_ERROR;
+
+ /* Select the entire dataspace */
+ for (i = 0; i < data_size / DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_DTYPESIZE; i++) {
+ points[i] = i;
+ }
+
+ if (H5Sselect_elements(fspace_id, H5S_SELECT_SET,
+ data_size / DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_DTYPESIZE, points) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't select points\n");
+ goto error;
+ }
+
+ if (H5Dread(dset_id, DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_DTYPE, H5S_ALL, fspace_id, H5P_DEFAULT,
+ data) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_NAME);
+ goto error;
+ }
+
+ if (data) {
+ HDfree(data);
+ data = NULL;
+ }
+
+ if (points) {
+ HDfree(points);
+ points = NULL;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (data)
+ HDfree(data);
+ if (points)
+ HDfree(points);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+#endif
+
+/*
+ * A test to check that data can't be read from a
+ * dataset when H5Dread is passed invalid parameters.
+ */
+static int
+test_read_dataset_invalid_params(void)
+{
+ hsize_t dims[DATASET_READ_INVALID_PARAMS_TEST_DSET_SPACE_RANK] = {10, 5, 3};
+ herr_t err_ret = -1;
+ size_t i, data_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ void *read_buf = NULL;
+
+ TESTING_MULTIPART("H5Dread with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_READ_INVALID_PARAMS_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_READ_INVALID_PARAMS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_READ_INVALID_PARAMS_TEST_DSET_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_READ_INVALID_PARAMS_TEST_DSET_NAME,
+ DATASET_READ_INVALID_PARAMS_TEST_DSET_DTYPE, fspace_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_READ_INVALID_PARAMS_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0, data_size = 1; i < DATASET_READ_INVALID_PARAMS_TEST_DSET_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_READ_INVALID_PARAMS_TEST_DSET_DTYPESIZE;
+
+ if (NULL == (read_buf = HDmalloc(data_size)))
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Dread_invalid_dset_id)
+ {
+ TESTING_2("H5Dread with an invalid dataset ID");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Dread(H5I_INVALID_HID, DATASET_READ_INVALID_PARAMS_TEST_DSET_DTYPE, H5S_ALL,
+ H5S_ALL, H5P_DEFAULT, read_buf);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" read from dataset using H5Dread with an invalid dataset ID!\n");
+ PART_ERROR(H5Dread_invalid_dset_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dread_invalid_dset_id);
+
+ PART_BEGIN(H5Dread_invalid_datatype)
+ {
+ TESTING_2("H5Dread with an invalid memory datatype");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Dread(dset_id, H5I_INVALID_HID, H5S_ALL, H5S_ALL, H5P_DEFAULT, read_buf);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" read from dataset using H5Dread with an invalid memory datatype!\n");
+ PART_ERROR(H5Dread_invalid_datatype);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dread_invalid_datatype);
+
+ PART_BEGIN(H5Dread_invalid_mem_dataspace)
+ {
+ TESTING_2("H5Dread with an invalid memory dataspace");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Dread(dset_id, DATASET_READ_INVALID_PARAMS_TEST_DSET_DTYPE, H5I_INVALID_HID,
+ H5S_ALL, H5P_DEFAULT, read_buf);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" read from dataset using H5Dread with an invalid memory dataspace!\n");
+ PART_ERROR(H5Dread_invalid_mem_dataspace);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dread_invalid_mem_dataspace);
+
+ PART_BEGIN(H5Dread_invalid_file_dataspace)
+ {
+ TESTING_2("H5Dread with an invalid file dataspace");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Dread(dset_id, DATASET_READ_INVALID_PARAMS_TEST_DSET_DTYPE, H5S_ALL,
+ H5I_INVALID_HID, H5P_DEFAULT, read_buf);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" read from dataset using H5Dread with an invalid file dataspace!\n");
+ PART_ERROR(H5Dread_invalid_file_dataspace);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dread_invalid_file_dataspace);
+
+ PART_BEGIN(H5Dread_invalid_dxpl)
+ {
+ TESTING_2("H5Dread with an invalid DXPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Dread(dset_id, DATASET_READ_INVALID_PARAMS_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL,
+ H5I_INVALID_HID, read_buf);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" read from dataset using H5Dread with an invalid DXPL!\n");
+ PART_ERROR(H5Dread_invalid_dxpl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dread_invalid_dxpl);
+
+ PART_BEGIN(H5Dread_invalid_data_buf)
+ {
+ TESTING_2("H5Dread with an invalid data buffer");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Dread(dset_id, DATASET_READ_INVALID_PARAMS_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL,
+ H5P_DEFAULT, NULL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" read from dataset using H5Dread with an invalid data buffer!\n");
+ PART_ERROR(H5Dread_invalid_data_buf);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dread_invalid_data_buf);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a small write can be
+ * made to a dataset using an H5S_ALL selection.
+ */
+static int
+test_write_dataset_small_all(void)
+{
+ hssize_t space_npoints;
+ hsize_t dims[DATASET_SMALL_WRITE_TEST_ALL_DSET_SPACE_RANK] = {10, 5, 3};
+ size_t i;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ void *data = NULL;
+
+ TESTING("small write to dataset with H5S_ALL");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_SMALL_WRITE_TEST_ALL_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", DATASET_SMALL_WRITE_TEST_ALL_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_SMALL_WRITE_TEST_ALL_DSET_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_SMALL_WRITE_TEST_ALL_DSET_NAME,
+ DATASET_SMALL_WRITE_TEST_ALL_DSET_DTYPE, fspace_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_SMALL_WRITE_TEST_ALL_DSET_NAME);
+ goto error;
+ }
+
+ /* Close the dataset and dataspace to ensure that writing works correctly in this manner */
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_SMALL_WRITE_TEST_ALL_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_SMALL_WRITE_TEST_ALL_DSET_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ goto error;
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ goto error;
+ }
+
+ if (NULL == (data = HDmalloc((hsize_t)space_npoints * DATASET_SMALL_WRITE_TEST_ALL_DSET_DTYPESIZE)))
+ TEST_ERROR;
+
+ for (i = 0; i < (hsize_t)space_npoints; i++)
+ ((int *)data)[i] = (int)i;
+
+ if (H5Dwrite(dset_id, DATASET_SMALL_WRITE_TEST_ALL_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT, data) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n", DATASET_SMALL_WRITE_TEST_ALL_DSET_NAME);
+ goto error;
+ }
+
+ if (data) {
+ HDfree(data);
+ data = NULL;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (data)
+ HDfree(data);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a small write can be made
+ * to a dataset using a hyperslab selection.
+ */
+static int
+test_write_dataset_small_hyperslab(void)
+{
+ hsize_t start[DATASET_SMALL_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK];
+ hsize_t stride[DATASET_SMALL_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK];
+ hsize_t count[DATASET_SMALL_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK];
+ hsize_t block[DATASET_SMALL_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK];
+ hsize_t dims[DATASET_SMALL_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK] = {10, 5, 3};
+ size_t i, data_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID, fspace_id = H5I_INVALID_HID;
+ void *data = NULL;
+
+ TESTING("small write to dataset with a hyperslab selection");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_SMALL_WRITE_TEST_HYPERSLAB_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_SMALL_WRITE_TEST_HYPERSLAB_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_SMALL_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+ if ((mspace_id = H5Screate_simple(DATASET_SMALL_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK - 1, dims, NULL)) <
+ 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_SMALL_WRITE_TEST_HYPERSLAB_DSET_NAME,
+ DATASET_SMALL_WRITE_TEST_HYPERSLAB_DSET_DTYPE, fspace_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_SMALL_WRITE_TEST_HYPERSLAB_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0, data_size = 1; i < DATASET_SMALL_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK - 1; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_SMALL_WRITE_TEST_HYPERSLAB_DSET_DTYPESIZE;
+
+ if (NULL == (data = HDmalloc(data_size)))
+ TEST_ERROR;
+
+ for (i = 0; i < data_size / DATASET_SMALL_WRITE_TEST_HYPERSLAB_DSET_DTYPESIZE; i++)
+ ((int *)data)[i] = (int)i;
+
+ for (i = 0; i < DATASET_SMALL_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK; i++) {
+ start[i] = 0;
+ stride[i] = 1;
+ count[i] = dims[i];
+ block[i] = 1;
+ }
+
+ count[2] = 1;
+
+ if (H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, stride, count, block) < 0)
+ TEST_ERROR;
+
+ if (H5Dwrite(dset_id, DATASET_SMALL_WRITE_TEST_HYPERSLAB_DSET_DTYPE, mspace_id, fspace_id, H5P_DEFAULT,
+ data) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n", DATASET_SMALL_WRITE_TEST_HYPERSLAB_DSET_NAME);
+ goto error;
+ }
+
+ if (data) {
+ HDfree(data);
+ data = NULL;
+ }
+
+ if (H5Sclose(mspace_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (data)
+ HDfree(data);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a small write can be made
+ * to a dataset using a point selection.
+ */
+static int
+test_write_dataset_small_point_selection(void)
+{
+ hsize_t points[DATASET_SMALL_WRITE_TEST_POINT_SELECTION_NUM_POINTS *
+ DATASET_SMALL_WRITE_TEST_POINT_SELECTION_DSET_SPACE_RANK];
+ hsize_t dims[DATASET_SMALL_WRITE_TEST_POINT_SELECTION_DSET_SPACE_RANK] = {10, 10, 10};
+ hsize_t mdims[] = {DATASET_SMALL_WRITE_TEST_POINT_SELECTION_NUM_POINTS};
+ size_t i, data_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ void *data = NULL;
+
+ TESTING("small write to dataset with a point selection");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_SMALL_WRITE_TEST_POINT_SELECTION_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_SMALL_WRITE_TEST_POINT_SELECTION_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_SMALL_WRITE_TEST_POINT_SELECTION_DSET_SPACE_RANK, dims, NULL)) <
+ 0)
+ TEST_ERROR;
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_SMALL_WRITE_TEST_POINT_SELECTION_DSET_NAME,
+ DATASET_SMALL_WRITE_TEST_POINT_SELECTION_DSET_DTYPE, fspace_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_SMALL_WRITE_TEST_POINT_SELECTION_DSET_NAME);
+ goto error;
+ }
+
+ data_size = DATASET_SMALL_WRITE_TEST_POINT_SELECTION_NUM_POINTS *
+ DATASET_SMALL_WRITE_TEST_POINT_SELECTION_DSET_DTYPESIZE;
+
+ if (NULL == (data = HDmalloc(data_size)))
+ TEST_ERROR;
+
+ for (i = 0; i < data_size / DATASET_SMALL_WRITE_TEST_POINT_SELECTION_DSET_DTYPESIZE; i++)
+ ((int *)data)[i] = (int)i;
+
+ for (i = 0; i < DATASET_SMALL_WRITE_TEST_POINT_SELECTION_NUM_POINTS; i++) {
+ size_t j;
+
+ for (j = 0; j < DATASET_SMALL_WRITE_TEST_POINT_SELECTION_DSET_SPACE_RANK; j++)
+ points[(i * DATASET_SMALL_WRITE_TEST_POINT_SELECTION_DSET_SPACE_RANK) + j] = i;
+ }
+
+ if (H5Sselect_elements(fspace_id, H5S_SELECT_SET, DATASET_SMALL_WRITE_TEST_POINT_SELECTION_NUM_POINTS,
+ points) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't select points\n");
+ goto error;
+ }
+
+ if (H5Dwrite(dset_id, DATASET_SMALL_WRITE_TEST_POINT_SELECTION_DSET_DTYPE, mspace_id, fspace_id,
+ H5P_DEFAULT, data) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n", DATASET_SMALL_WRITE_TEST_POINT_SELECTION_DSET_NAME);
+ goto error;
+ }
+
+ if (data) {
+ HDfree(data);
+ data = NULL;
+ }
+
+ if (H5Sclose(mspace_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (data)
+ HDfree(data);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+#ifndef NO_LARGE_TESTS
+/*
+ * A test to check that a large write can be made
+ * to a dataset using an H5S_ALL selection.
+ */
+static int
+test_write_dataset_large_all(void)
+{
+ hssize_t space_npoints;
+ hsize_t dims[DATASET_LARGE_WRITE_TEST_ALL_DSET_SPACE_RANK] = {600, 600, 600};
+ size_t i, data_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ void *data = NULL;
+
+ TESTING("large write to dataset with H5S_ALL");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, basic or more dataset aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_LARGE_WRITE_TEST_ALL_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", DATASET_LARGE_WRITE_TEST_ALL_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_LARGE_WRITE_TEST_ALL_DSET_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_LARGE_WRITE_TEST_ALL_DSET_NAME,
+ DATASET_LARGE_WRITE_TEST_ALL_DSET_DTYPE, fspace_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_LARGE_WRITE_TEST_ALL_DSET_NAME);
+ goto error;
+ }
+
+ /* Close the dataset and dataspace to ensure that retrieval of file space ID is working */
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_LARGE_WRITE_TEST_ALL_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_LARGE_WRITE_TEST_ALL_DSET_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ goto error;
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ goto error;
+ }
+
+ if (NULL == (data = HDmalloc((hsize_t)space_npoints * DATASET_LARGE_WRITE_TEST_ALL_DSET_DTYPESIZE)))
+ TEST_ERROR;
+
+ for (i = 0; i < (hsize_t)space_npoints; i++)
+ ((int *)data)[i] = (int)i;
+
+ if (H5Dwrite(dset_id, DATASET_LARGE_WRITE_TEST_ALL_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT, data) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n", DATASET_LARGE_WRITE_TEST_ALL_DSET_NAME);
+ goto error;
+ }
+
+ if (data) {
+ HDfree(data);
+ data = NULL;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a large write can be made
+ * to a dataset using a hyperslab selection.
+ */
+static int
+test_write_dataset_large_hyperslab(void)
+{
+ hsize_t start[DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK];
+ hsize_t stride[DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK];
+ hsize_t count[DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK];
+ hsize_t block[DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK];
+ hsize_t dims[DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK] = {600, 600, 600};
+ size_t i, data_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID, fspace_id = H5I_INVALID_HID;
+ void *data = NULL;
+
+ TESTING("large write to dataset with a hyperslab selection");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_LARGE_WRITE_TEST_HYPERSLAB_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_LARGE_WRITE_TEST_HYPERSLAB_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+ if ((mspace_id = H5Screate_simple(DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_NAME,
+ DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_DTYPE, fspace_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0, data_size = 1; i < DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_DTYPESIZE;
+
+ if (NULL == (data = HDmalloc(data_size)))
+ TEST_ERROR;
+
+ for (i = 0; i < data_size / DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_DTYPESIZE; i++)
+ ((int *)data)[i] = (int)i;
+
+ for (i = 0; i < DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK; i++) {
+ start[i] = 0;
+ stride[i] = 1;
+ count[i] = dims[i];
+ block[i] = 1;
+ }
+
+ if (H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, stride, count, block) < 0)
+ TEST_ERROR;
+
+ if (H5Dwrite(dset_id, DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_DTYPE, mspace_id, fspace_id, H5P_DEFAULT,
+ data) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n", DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_NAME);
+ goto error;
+ }
+
+ if (data) {
+ HDfree(data);
+ data = NULL;
+ }
+
+ if (H5Sclose(mspace_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (data)
+ HDfree(data);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a large write can be made
+ * to a dataset using a point selection.
+ */
+static int
+test_write_dataset_large_point_selection(void)
+{
+ TESTING("large write to dataset with a point selection");
+
+ SKIPPED();
+
+ return 0;
+
+error:
+ return 1;
+}
+#endif
+
+/*
+ * A test to ensure that data is read back correctly from
+ * a dataset after it has been written.
+ */
+static int
+test_write_dataset_data_verification(void)
+{
+ hssize_t space_npoints;
+ hsize_t dims[DATASET_DATA_VERIFY_WRITE_TEST_DSET_SPACE_RANK] = {10, 10, 10};
+ hsize_t start[DATASET_DATA_VERIFY_WRITE_TEST_DSET_SPACE_RANK];
+ hsize_t stride[DATASET_DATA_VERIFY_WRITE_TEST_DSET_SPACE_RANK];
+ hsize_t count[DATASET_DATA_VERIFY_WRITE_TEST_DSET_SPACE_RANK];
+ hsize_t block[DATASET_DATA_VERIFY_WRITE_TEST_DSET_SPACE_RANK];
+ hsize_t
+ points[DATASET_DATA_VERIFY_WRITE_TEST_NUM_POINTS * DATASET_DATA_VERIFY_WRITE_TEST_DSET_SPACE_RANK];
+ size_t i, data_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ void *data = NULL;
+ void *write_buf = NULL;
+ void *read_buf = NULL;
+
+ TESTING_MULTIPART("verification of dataset data using H5Dwrite then H5Dread");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, basic or more dataset aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_DATA_VERIFY_WRITE_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", DATASET_DATA_VERIFY_WRITE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_DATA_VERIFY_WRITE_TEST_DSET_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_DATA_VERIFY_WRITE_TEST_DSET_NAME,
+ DATASET_DATA_VERIFY_WRITE_TEST_DSET_DTYPE, fspace_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_DATA_VERIFY_WRITE_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0, data_size = 1; i < DATASET_DATA_VERIFY_WRITE_TEST_DSET_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_DATA_VERIFY_WRITE_TEST_DSET_DTYPESIZE;
+
+ if (NULL == (data = HDmalloc(data_size)))
+ TEST_ERROR;
+
+ for (i = 0; i < data_size / DATASET_DATA_VERIFY_WRITE_TEST_DSET_DTYPESIZE; i++)
+ ((int *)data)[i] = (int)i;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Dwrite_all_read)
+ {
+ TESTING_2("H5Dwrite using H5S_ALL then H5Dread");
+
+ if (H5Dwrite(dset_id, DATASET_DATA_VERIFY_WRITE_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ data) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n", DATASET_DATA_VERIFY_WRITE_TEST_DSET_NAME);
+ PART_ERROR(H5Dwrite_all_read);
+ }
+
+ if (data) {
+ HDfree(data);
+ data = NULL;
+ }
+
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_DATA_VERIFY_WRITE_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_DATA_VERIFY_WRITE_TEST_DSET_NAME);
+ PART_ERROR(H5Dwrite_all_read);
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ PART_ERROR(H5Dwrite_all_read);
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ PART_ERROR(H5Dwrite_all_read);
+ }
+
+ if (NULL ==
+ (data = HDmalloc((hsize_t)space_npoints * DATASET_DATA_VERIFY_WRITE_TEST_DSET_DTYPESIZE))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ PART_ERROR(H5Dwrite_all_read);
+ }
+
+ if (H5Dread(dset_id, DATASET_DATA_VERIFY_WRITE_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ data) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_DATA_VERIFY_WRITE_TEST_DSET_NAME);
+ PART_ERROR(H5Dwrite_all_read);
+ }
+
+ for (i = 0; i < (hsize_t)space_npoints; i++)
+ if (((int *)data)[i] != (int)i) {
+ H5_FAILED();
+ HDprintf(" H5S_ALL selection data verification failed\n");
+ PART_ERROR(H5Dwrite_all_read);
+ }
+
+ if (data) {
+ HDfree(data);
+ data = NULL;
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dwrite_all_read);
+
+ PART_BEGIN(H5Dwrite_hyperslab_read)
+ {
+ TESTING_2("H5Dwrite using hyperslab selection then H5Dread");
+
+ data_size = dims[1] * 2 * DATASET_DATA_VERIFY_WRITE_TEST_DSET_DTYPESIZE;
+
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ for (i = 0; i < data_size / DATASET_DATA_VERIFY_WRITE_TEST_DSET_DTYPESIZE; i++)
+ ((int *)write_buf)[i] = 56;
+
+ for (i = 0, data_size = 1; i < DATASET_DATA_VERIFY_WRITE_TEST_DSET_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_DATA_VERIFY_WRITE_TEST_DSET_DTYPESIZE;
+
+ if (NULL == (data = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset data verification\n");
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ if (H5Dread(dset_id, DATASET_DATA_VERIFY_WRITE_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ data) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_DATA_VERIFY_WRITE_TEST_DSET_NAME);
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ for (i = 0; i < 2; i++) {
+ size_t j;
+
+ for (j = 0; j < dims[1]; j++)
+ ((int *)data)[(i * dims[1] * dims[2]) + (j * dims[2])] = 56;
+ }
+
+ /* Write to first two rows of dataset */
+ start[0] = start[1] = start[2] = 0;
+ stride[0] = stride[1] = stride[2] = 1;
+ count[0] = 2;
+ count[1] = dims[1];
+ count[2] = 1;
+ block[0] = block[1] = block[2] = 1;
+
+ if (H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, stride, count, block) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't select hyperslab for dataset write\n");
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ {
+ hsize_t mdims[] = {(hsize_t)2 * dims[1]};
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+ }
+
+ if (H5Dwrite(dset_id, DATASET_DATA_VERIFY_WRITE_TEST_DSET_DTYPE, mspace_id, fspace_id,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n", DATASET_DATA_VERIFY_WRITE_TEST_DSET_NAME);
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ if (mspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(mspace_id);
+ }
+ H5E_END_TRY;
+ mspace_id = H5I_INVALID_HID;
+ }
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_DATA_VERIFY_WRITE_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_DATA_VERIFY_WRITE_TEST_DSET_NAME);
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ if (NULL == (read_buf = HDmalloc((hsize_t)space_npoints *
+ DATASET_DATA_VERIFY_WRITE_TEST_DSET_DTYPESIZE))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ if (H5Dread(dset_id, DATASET_DATA_VERIFY_WRITE_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_DATA_VERIFY_WRITE_TEST_DSET_NAME);
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ if (memcmp(data, read_buf, data_size)) {
+ H5_FAILED();
+ HDprintf(" hyperslab selection data verification failed\n");
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ if (data) {
+ HDfree(data);
+ data = NULL;
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dwrite_hyperslab_read);
+
+ PART_BEGIN(H5Dwrite_point_sel_read)
+ {
+ TESTING_2("H5Dwrite using point selection then H5Dread");
+
+ data_size =
+ DATASET_DATA_VERIFY_WRITE_TEST_NUM_POINTS * DATASET_DATA_VERIFY_WRITE_TEST_DSET_DTYPESIZE;
+
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ for (i = 0; i < data_size / DATASET_DATA_VERIFY_WRITE_TEST_DSET_DTYPESIZE; i++)
+ ((int *)write_buf)[i] = 13;
+
+ for (i = 0, data_size = 1; i < DATASET_DATA_VERIFY_WRITE_TEST_DSET_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_DATA_VERIFY_WRITE_TEST_DSET_DTYPESIZE;
+
+ if (NULL == (data = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset data verification\n");
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ if (H5Dread(dset_id, DATASET_DATA_VERIFY_WRITE_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ data) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_DATA_VERIFY_WRITE_TEST_DSET_NAME);
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ for (i = 0; i < dims[0]; i++) {
+ size_t j;
+
+ for (j = 0; j < dims[1]; j++) {
+ size_t k;
+
+ for (k = 0; k < dims[2]; k++) {
+ if (i == j && j == k)
+ ((int *)data)[(i * dims[1] * dims[2]) + (j * dims[2]) + k] = 13;
+ }
+ }
+ }
+
+ /* Select a series of 10 points in the dataset */
+ for (i = 0; i < DATASET_DATA_VERIFY_WRITE_TEST_NUM_POINTS; i++) {
+ size_t j;
+
+ for (j = 0; j < DATASET_DATA_VERIFY_WRITE_TEST_DSET_SPACE_RANK; j++)
+ points[(i * DATASET_DATA_VERIFY_WRITE_TEST_DSET_SPACE_RANK) + j] = i;
+ }
+
+ if (H5Sselect_elements(fspace_id, H5S_SELECT_SET, DATASET_DATA_VERIFY_WRITE_TEST_NUM_POINTS,
+ points) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't select elements in dataspace\n");
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ {
+ hsize_t mdims[] = {(hsize_t)DATASET_DATA_VERIFY_WRITE_TEST_NUM_POINTS};
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+ }
+
+ if (H5Dwrite(dset_id, DATASET_DATA_VERIFY_WRITE_TEST_DSET_DTYPE, mspace_id, fspace_id,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n", DATASET_DATA_VERIFY_WRITE_TEST_DSET_NAME);
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ if (mspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(mspace_id);
+ }
+ H5E_END_TRY;
+ mspace_id = H5I_INVALID_HID;
+ }
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_DATA_VERIFY_WRITE_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_DATA_VERIFY_WRITE_TEST_DSET_NAME);
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ if (NULL == (read_buf = HDmalloc((hsize_t)space_npoints *
+ DATASET_DATA_VERIFY_WRITE_TEST_DSET_DTYPESIZE))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ if (H5Dread(dset_id, DATASET_DATA_VERIFY_WRITE_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_DATA_VERIFY_WRITE_TEST_DSET_NAME);
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ if (memcmp(data, read_buf, data_size)) {
+ H5_FAILED();
+ HDprintf(" point selection data verification failed\n");
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dwrite_point_sel_read);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (data) {
+ HDfree(data);
+ data = NULL;
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (data)
+ HDfree(data);
+ if (write_buf)
+ HDfree(write_buf);
+ if (read_buf)
+ HDfree(read_buf);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a dataset can't be written to
+ * when H5Dwrite is passed invalid parameters.
+ */
+static int
+test_write_dataset_invalid_params(void)
+{
+ hssize_t space_npoints;
+ hsize_t dims[DATASET_WRITE_INVALID_PARAMS_TEST_DSET_SPACE_RANK] = {10, 5, 3};
+ herr_t err_ret = -1;
+ size_t i;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ void *data = NULL;
+
+ TESTING_MULTIPART("H5Dwrite with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_WRITE_INVALID_PARAMS_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_WRITE_INVALID_PARAMS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_WRITE_INVALID_PARAMS_TEST_DSET_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_WRITE_INVALID_PARAMS_TEST_DSET_NAME,
+ DATASET_SMALL_WRITE_TEST_ALL_DSET_DTYPE, fspace_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_WRITE_INVALID_PARAMS_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ goto error;
+ }
+
+ if (NULL == (data = HDmalloc((hsize_t)space_npoints * DATASET_WRITE_INVALID_PARAMS_TEST_DSET_DTYPESIZE)))
+ TEST_ERROR;
+
+ for (i = 0; i < (hsize_t)space_npoints; i++)
+ ((int *)data)[i] = (int)i;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Dwrite_invalid_dset_id)
+ {
+ TESTING_2("H5Dwrite with an invalid dataset ID");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Dwrite(H5I_INVALID_HID, DATASET_WRITE_INVALID_PARAMS_TEST_DSET_DTYPE, H5S_ALL,
+ H5S_ALL, H5P_DEFAULT, data);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" wrote to dataset using H5Dwrite with an invalid dataset ID!\n");
+ PART_ERROR(H5Dwrite_invalid_dset_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dwrite_invalid_dset_id);
+
+ PART_BEGIN(H5Dwrite_invalid_datatype)
+ {
+ TESTING_2("H5Dwrite with an invalid memory datatype");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Dwrite(dset_id, H5I_INVALID_HID, H5S_ALL, H5S_ALL, H5P_DEFAULT, data);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" wrote to dataset using H5Dwrite with an invalid memory datatype!\n");
+ PART_ERROR(H5Dwrite_invalid_datatype);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dwrite_invalid_datatype);
+
+ PART_BEGIN(H5Dwrite_invalid_mem_dataspace)
+ {
+ TESTING_2("H5Dwrite with an invalid memory dataspace");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Dwrite(dset_id, DATASET_WRITE_INVALID_PARAMS_TEST_DSET_DTYPE, H5I_INVALID_HID,
+ H5S_ALL, H5P_DEFAULT, data);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" wrote to dataset using H5Dwrite with an invalid memory dataspace!\n");
+ PART_ERROR(H5Dwrite_invalid_mem_dataspace);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dwrite_invalid_mem_dataspace);
+
+ PART_BEGIN(H5Dwrite_invalid_file_dataspace)
+ {
+ TESTING_2("H5Dwrite with an invalid file dataspace");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Dwrite(dset_id, DATASET_WRITE_INVALID_PARAMS_TEST_DSET_DTYPE, H5S_ALL,
+ H5I_INVALID_HID, H5P_DEFAULT, data);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" wrote to dataset using H5Dwrite with an invalid file dataspace!\n");
+ PART_ERROR(H5Dwrite_invalid_file_dataspace);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dwrite_invalid_file_dataspace);
+
+ PART_BEGIN(H5Dwrite_invalid_dxpl)
+ {
+ TESTING_2("H5Dwrite with an invalid DXPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Dwrite(dset_id, DATASET_WRITE_INVALID_PARAMS_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL,
+ H5I_INVALID_HID, data);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" wrote to dataset using H5Dwrite with an invalid DXPL!\n");
+ PART_ERROR(H5Dwrite_invalid_dxpl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dwrite_invalid_dxpl);
+
+ PART_BEGIN(H5Dwrite_invalid_data_buf)
+ {
+ TESTING_2("H5Dwrite with an invalid data buffer");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Dwrite(dset_id, DATASET_WRITE_INVALID_PARAMS_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL,
+ H5P_DEFAULT, NULL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" wrote to dataset using H5Dwrite with an invalid data buffer!\n");
+ PART_ERROR(H5Dwrite_invalid_data_buf);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dwrite_invalid_data_buf);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (data) {
+ HDfree(data);
+ data = NULL;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (data)
+ HDfree(data);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to ensure that data is read back correctly from a dataset after it has
+ * been written, using type conversion with builtin types.
+ */
+static int
+test_dataset_builtin_type_conversion(void)
+{
+ hssize_t space_npoints;
+ hsize_t dims[DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_SPACE_RANK] = {10, 10, 10};
+ hsize_t start[DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_SPACE_RANK];
+ hsize_t stride[DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_SPACE_RANK];
+ hsize_t count[DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_SPACE_RANK];
+ hsize_t block[DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_SPACE_RANK];
+ hsize_t points[DATASET_DATA_BUILTIN_CONVERSION_TEST_NUM_POINTS *
+ DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_SPACE_RANK];
+ size_t i, data_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ hid_t file_type_id = H5I_INVALID_HID;
+ H5T_order_t native_order;
+ void *data = NULL;
+ void *write_buf = NULL;
+ void *read_buf = NULL;
+
+ TESTING_MULTIPART(
+ "verification of dataset data using H5Dwrite then H5Dread with type conversion of builtin types");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, basic or more dataset aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((native_order = H5Tget_order(DATASET_DATA_BUILTIN_CONVERSION_TEST_MEM_DTYPE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get native byte order\n");
+ goto error;
+ }
+ if (native_order == H5T_ORDER_LE)
+ file_type_id = H5T_STD_I32BE;
+ else
+ file_type_id = H5T_STD_I32LE;
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_DATA_BUILTIN_CONVERSION_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", DATASET_DATA_VERIFY_WRITE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_NAME, file_type_id,
+ fspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0, data_size = 1; i < DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_DATA_BUILTIN_CONVERSION_TEST_MEM_DTYPESIZE;
+
+ if (NULL == (data = HDmalloc(data_size)))
+ TEST_ERROR;
+
+ for (i = 0; i < data_size / DATASET_DATA_BUILTIN_CONVERSION_TEST_MEM_DTYPESIZE; i++)
+ ((int *)data)[i] = (int)i;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Dwrite_all_read)
+ {
+ TESTING_2("H5Dwrite then H5Dread with H5S_ALL selection");
+
+ if (H5Dwrite(dset_id, DATASET_DATA_BUILTIN_CONVERSION_TEST_MEM_DTYPE, H5S_ALL, H5S_ALL,
+ H5P_DEFAULT, data) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n",
+ DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_NAME);
+ PART_ERROR(H5Dwrite_all_read);
+ }
+
+ if (data) {
+ HDfree(data);
+ data = NULL;
+ }
+
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_NAME);
+ PART_ERROR(H5Dwrite_all_read);
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ PART_ERROR(H5Dwrite_all_read);
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ PART_ERROR(H5Dwrite_all_read);
+ }
+
+ if (NULL == (data = HDmalloc((hsize_t)space_npoints *
+ DATASET_DATA_BUILTIN_CONVERSION_TEST_MEM_DTYPESIZE))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ PART_ERROR(H5Dwrite_all_read);
+ }
+
+ if (H5Dread(dset_id, DATASET_DATA_BUILTIN_CONVERSION_TEST_MEM_DTYPE, H5S_ALL, H5S_ALL,
+ H5P_DEFAULT, data) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n",
+ DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_NAME);
+ PART_ERROR(H5Dwrite_all_read);
+ }
+
+ for (i = 0; i < (hsize_t)space_npoints; i++)
+ if (((int *)data)[i] != (int)i) {
+ H5_FAILED();
+ HDprintf(" H5S_ALL selection data verification failed\n");
+ PART_ERROR(H5Dwrite_all_read);
+ }
+
+ if (data) {
+ HDfree(data);
+ data = NULL;
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dwrite_all_read);
+
+ PART_BEGIN(H5Dwrite_hyperslab_read)
+ {
+ TESTING_2("H5Dwrite using hyperslab selection then H5Dread");
+
+ data_size = dims[1] * 2 * DATASET_DATA_BUILTIN_CONVERSION_TEST_MEM_DTYPESIZE;
+
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ for (i = 0; i < data_size / DATASET_DATA_BUILTIN_CONVERSION_TEST_MEM_DTYPESIZE; i++)
+ ((int *)write_buf)[i] = 56;
+
+ for (i = 0, data_size = 1; i < DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_DATA_BUILTIN_CONVERSION_TEST_MEM_DTYPESIZE;
+
+ if (NULL == (data = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset data verification\n");
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ if (H5Dread(dset_id, DATASET_DATA_BUILTIN_CONVERSION_TEST_MEM_DTYPE, H5S_ALL, H5S_ALL,
+ H5P_DEFAULT, data) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n",
+ DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_NAME);
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ for (i = 0; i < 2; i++) {
+ size_t j;
+
+ for (j = 0; j < dims[1]; j++)
+ ((int *)data)[(i * dims[1] * dims[2]) + (j * dims[2])] = 56;
+ }
+
+ /* Write to first two rows of dataset */
+ start[0] = start[1] = start[2] = 0;
+ stride[0] = stride[1] = stride[2] = 1;
+ count[0] = 2;
+ count[1] = dims[1];
+ count[2] = 1;
+ block[0] = block[1] = block[2] = 1;
+
+ if (H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, stride, count, block) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't select hyperslab for dataset write\n");
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ {
+ hsize_t mdims[] = {(hsize_t)2 * dims[1]};
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+ }
+
+ if (H5Dwrite(dset_id, DATASET_DATA_BUILTIN_CONVERSION_TEST_MEM_DTYPE, mspace_id, fspace_id,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n",
+ DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_NAME);
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ if (mspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(mspace_id);
+ }
+ H5E_END_TRY;
+ mspace_id = H5I_INVALID_HID;
+ }
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_NAME);
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ if (NULL == (read_buf = HDmalloc((hsize_t)space_npoints *
+ DATASET_DATA_BUILTIN_CONVERSION_TEST_MEM_DTYPESIZE))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ if (H5Dread(dset_id, DATASET_DATA_BUILTIN_CONVERSION_TEST_MEM_DTYPE, H5S_ALL, H5S_ALL,
+ H5P_DEFAULT, read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n",
+ DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_NAME);
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ if (memcmp(data, read_buf, data_size)) {
+ H5_FAILED();
+ HDprintf(" hyperslab selection data verification failed\n");
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ if (data) {
+ HDfree(data);
+ data = NULL;
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dwrite_hyperslab_read);
+
+ PART_BEGIN(H5Dwrite_point_sel_read)
+ {
+ TESTING_2("H5Dwrite using point selection then H5Dread");
+
+ data_size = DATASET_DATA_BUILTIN_CONVERSION_TEST_NUM_POINTS *
+ DATASET_DATA_BUILTIN_CONVERSION_TEST_MEM_DTYPESIZE;
+
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ for (i = 0; i < data_size / DATASET_DATA_BUILTIN_CONVERSION_TEST_MEM_DTYPESIZE; i++)
+ ((int *)write_buf)[i] = 13;
+
+ for (i = 0, data_size = 1; i < DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_DATA_BUILTIN_CONVERSION_TEST_MEM_DTYPESIZE;
+
+ if (NULL == (data = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset data verification\n");
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ if (H5Dread(dset_id, DATASET_DATA_BUILTIN_CONVERSION_TEST_MEM_DTYPE, H5S_ALL, H5S_ALL,
+ H5P_DEFAULT, data) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n",
+ DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_NAME);
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ for (i = 0; i < dims[0]; i++) {
+ size_t j;
+
+ for (j = 0; j < dims[1]; j++) {
+ size_t k;
+
+ for (k = 0; k < dims[2]; k++) {
+ if (i == j && j == k)
+ ((int *)data)[(i * dims[1] * dims[2]) + (j * dims[2]) + k] = 13;
+ }
+ }
+ }
+
+ /* Select a series of 10 points in the dataset */
+ for (i = 0; i < DATASET_DATA_BUILTIN_CONVERSION_TEST_NUM_POINTS; i++) {
+ size_t j;
+
+ for (j = 0; j < DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_SPACE_RANK; j++)
+ points[(i * DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_SPACE_RANK) + j] = i;
+ }
+
+ if (H5Sselect_elements(fspace_id, H5S_SELECT_SET, DATASET_DATA_BUILTIN_CONVERSION_TEST_NUM_POINTS,
+ points) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't select elements in dataspace\n");
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ {
+ hsize_t mdims[] = {(hsize_t)DATASET_DATA_BUILTIN_CONVERSION_TEST_NUM_POINTS};
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+ }
+
+ if (H5Dwrite(dset_id, DATASET_DATA_BUILTIN_CONVERSION_TEST_MEM_DTYPE, mspace_id, fspace_id,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n",
+ DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_NAME);
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ if (mspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(mspace_id);
+ }
+ H5E_END_TRY;
+ mspace_id = H5I_INVALID_HID;
+ }
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_NAME);
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ if (NULL == (read_buf = HDmalloc((hsize_t)space_npoints *
+ DATASET_DATA_BUILTIN_CONVERSION_TEST_MEM_DTYPESIZE))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ if (H5Dread(dset_id, DATASET_DATA_BUILTIN_CONVERSION_TEST_MEM_DTYPE, H5S_ALL, H5S_ALL,
+ H5P_DEFAULT, read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n",
+ DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_NAME);
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ if (memcmp(data, read_buf, data_size)) {
+ H5_FAILED();
+ HDprintf(" point selection data verification failed\n");
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dwrite_point_sel_read);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (data) {
+ HDfree(data);
+ data = NULL;
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (data)
+ HDfree(data);
+ if (write_buf)
+ HDfree(write_buf);
+ if (read_buf)
+ HDfree(read_buf);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to ensure that data is read back correctly from a dataset after it has
+ * been written, using partial element I/O with compound types
+ */
+typedef struct dataset_compount_partial_io_t {
+ int a;
+ int b;
+} dataset_compount_partial_io_t;
+
+static int
+test_dataset_compound_partial_io(void)
+{
+ hsize_t dims[1] = {DATASET_COMPOUND_PARTIAL_IO_DSET_DIMS};
+ size_t i;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ hid_t full_type_id = H5I_INVALID_HID;
+ hid_t a_type_id = H5I_INVALID_HID;
+ hid_t b_type_id = H5I_INVALID_HID;
+ dataset_compount_partial_io_t wbuf[DATASET_COMPOUND_PARTIAL_IO_DSET_DIMS];
+ dataset_compount_partial_io_t rbuf[DATASET_COMPOUND_PARTIAL_IO_DSET_DIMS];
+ dataset_compount_partial_io_t fbuf[DATASET_COMPOUND_PARTIAL_IO_DSET_DIMS];
+ dataset_compount_partial_io_t erbuf[DATASET_COMPOUND_PARTIAL_IO_DSET_DIMS];
+
+ TESTING_MULTIPART(
+ "verification of dataset data using H5Dwrite then H5Dread with partial element compound type I/O");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_DATA_COMPOUND_PARTIAL_IO_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_DATA_COMPOUND_PARTIAL_IO_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((space_id = H5Screate_simple(1, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((full_type_id = H5Tcreate(H5T_COMPOUND, sizeof(dataset_compount_partial_io_t))) < 0)
+ TEST_ERROR;
+ if (H5Tinsert(full_type_id, "a", HOFFSET(dataset_compount_partial_io_t, a), H5T_NATIVE_INT) < 0)
+ TEST_ERROR;
+ if (H5Tinsert(full_type_id, "b", HOFFSET(dataset_compount_partial_io_t, b), H5T_NATIVE_INT) < 0)
+ TEST_ERROR;
+
+ if ((a_type_id = H5Tcreate(H5T_COMPOUND, sizeof(dataset_compount_partial_io_t))) < 0)
+ TEST_ERROR;
+ if (H5Tinsert(a_type_id, "a", HOFFSET(dataset_compount_partial_io_t, a), H5T_NATIVE_INT) < 0)
+ TEST_ERROR;
+
+ if ((b_type_id = H5Tcreate(H5T_COMPOUND, sizeof(dataset_compount_partial_io_t))) < 0)
+ TEST_ERROR;
+ if (H5Tinsert(b_type_id, "b", HOFFSET(dataset_compount_partial_io_t, b), H5T_NATIVE_INT) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_DATA_COMPOUND_PARTIAL_IO_TEST_DSET_NAME, full_type_id,
+ space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_DATA_COMPOUND_PARTIAL_IO_TEST_DSET_NAME);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(write_full_read_full)
+ {
+ TESTING_2("H5Dwrite then H5Dread with all compound members");
+
+ /* Initialize wbuf */
+ for (i = 0; i < DATASET_COMPOUND_PARTIAL_IO_DSET_DIMS; i++) {
+ wbuf[i].a = (int)(2 * i);
+ wbuf[i].b = (int)(2 * i + 1);
+ }
+
+ /* Write data */
+ if (H5Dwrite(dset_id, full_type_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf) < 0)
+ PART_TEST_ERROR(write_full_read_full);
+
+ /* Update fbuf to match file state */
+ for (i = 0; i < DATASET_COMPOUND_PARTIAL_IO_DSET_DIMS; i++) {
+ fbuf[i].a = wbuf[i].a;
+ fbuf[i].b = wbuf[i].b;
+ }
+
+ /* Initialize rbuf to -1 */
+ for (i = 0; i < DATASET_COMPOUND_PARTIAL_IO_DSET_DIMS; i++) {
+ rbuf[i].a = -1;
+ rbuf[i].b = -1;
+ }
+
+ /* Set erbuf (simply match file state since we're reading the whole
+ * thing) */
+ for (i = 0; i < DATASET_COMPOUND_PARTIAL_IO_DSET_DIMS; i++) {
+ erbuf[i].a = fbuf[i].a;
+ erbuf[i].b = fbuf[i].b;
+ }
+
+ /* Read data */
+ if (H5Dread(dset_id, full_type_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
+ PART_TEST_ERROR(write_full_read_full);
+
+ /* Verify data */
+ for (i = 0; i < DATASET_COMPOUND_PARTIAL_IO_DSET_DIMS; i++) {
+ if (rbuf[i].a != erbuf[i].a)
+ PART_TEST_ERROR(write_full_read_full);
+ if (rbuf[i].b != erbuf[i].b)
+ PART_TEST_ERROR(write_full_read_full);
+ }
+
+ PASSED();
+ }
+ PART_END(write_full_read_full);
+
+ PART_BEGIN(read_a)
+ {
+ TESTING_2("H5Dread with compound member a");
+
+ /* Initialize rbuf to -1 */
+ for (i = 0; i < DATASET_COMPOUND_PARTIAL_IO_DSET_DIMS; i++) {
+ rbuf[i].a = -1;
+ rbuf[i].b = -1;
+ }
+
+ /* Set erbuf (element a comes from the file, element b in untouched)
+ */
+ for (i = 0; i < DATASET_COMPOUND_PARTIAL_IO_DSET_DIMS; i++) {
+ erbuf[i].a = fbuf[i].a;
+ erbuf[i].b = rbuf[i].b;
+ }
+
+ /* Read data */
+ if (H5Dread(dset_id, a_type_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
+ PART_TEST_ERROR(read_a);
+
+ /* Verify data */
+ for (i = 0; i < DATASET_COMPOUND_PARTIAL_IO_DSET_DIMS; i++) {
+ if (rbuf[i].a != erbuf[i].a)
+ PART_TEST_ERROR(read_a);
+ if (rbuf[i].b != erbuf[i].b)
+ PART_TEST_ERROR(read_a);
+ }
+
+ PASSED();
+ }
+ PART_END(read_a);
+
+ PART_BEGIN(write_b_read_full)
+ {
+ TESTING_2("H5Dwrite with compound member b then H5Dread with all compound members");
+
+ /* Initialize wbuf */
+ for (i = 0; i < DATASET_COMPOUND_PARTIAL_IO_DSET_DIMS; i++) {
+ wbuf[i].a = (int)(2 * DATASET_COMPOUND_PARTIAL_IO_DSET_DIMS + 2 * i);
+ wbuf[i].b = (int)(2 * DATASET_COMPOUND_PARTIAL_IO_DSET_DIMS + 2 * i + 1);
+ }
+
+ /* Write data */
+ if (H5Dwrite(dset_id, b_type_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf) < 0)
+ PART_TEST_ERROR(write_full_read_full);
+
+ /* Update fbuf to match file state - only element b was updated */
+ for (i = 0; i < DATASET_COMPOUND_PARTIAL_IO_DSET_DIMS; i++) {
+ fbuf[i].b = wbuf[i].b;
+ }
+
+ /* Initialize rbuf to -1 */
+ for (i = 0; i < DATASET_COMPOUND_PARTIAL_IO_DSET_DIMS; i++) {
+ rbuf[i].a = -1;
+ rbuf[i].b = -1;
+ }
+
+ /* Set erbuf (simply match file state since we're reading the whole
+ * thing) */
+ for (i = 0; i < DATASET_COMPOUND_PARTIAL_IO_DSET_DIMS; i++) {
+ erbuf[i].a = fbuf[i].a;
+ erbuf[i].b = fbuf[i].b;
+ }
+
+ /* Read data */
+ if (H5Dread(dset_id, full_type_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
+ PART_TEST_ERROR(write_b_read_full);
+
+ /* Verify data */
+ for (i = 0; i < DATASET_COMPOUND_PARTIAL_IO_DSET_DIMS; i++) {
+ if (rbuf[i].a != erbuf[i].a)
+ PART_TEST_ERROR(write_b_read_full);
+ if (rbuf[i].b != erbuf[i].b)
+ PART_TEST_ERROR(write_b_read_full);
+ }
+
+ PASSED();
+ }
+ PART_END(write_b_read_full);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(full_type_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(a_type_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(b_type_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ H5Tclose(full_type_id);
+ H5Tclose(a_type_id);
+ H5Tclose(b_type_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a chunked dataset's extent can be
+ * changed by using H5Dset_extent. This test uses unlimited
+ * dimensions for the dataset, so the dimensionality of the
+ * dataset may both shrink and grow.
+ */
+static int
+test_dataset_set_extent_chunked_unlimited(void)
+{
+ hsize_t dims[DATASET_SET_EXTENT_CHUNKED_UNLIMITED_TEST_SPACE_RANK];
+ hsize_t max_dims[DATASET_SET_EXTENT_CHUNKED_UNLIMITED_TEST_SPACE_RANK];
+ hsize_t chunk_dims[DATASET_SET_EXTENT_CHUNKED_UNLIMITED_TEST_SPACE_RANK];
+ hsize_t new_dims[DATASET_SET_EXTENT_CHUNKED_UNLIMITED_TEST_SPACE_RANK];
+ size_t i;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t dcpl_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+
+ TESTING("H5Dset_extent on chunked dataset with unlimited dimensions");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, basic or more dataset aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_SET_EXTENT_CHUNKED_UNLIMITED_TEST_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_SET_EXTENT_CHUNKED_UNLIMITED_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < DATASET_SET_EXTENT_CHUNKED_UNLIMITED_TEST_SPACE_RANK; i++) {
+ max_dims[i] = H5S_UNLIMITED;
+ chunk_dims[i] = (hsize_t)(rand() % MAX_DIM_SIZE + 1);
+ }
+
+ if ((fspace_id = generate_random_dataspace(DATASET_SET_EXTENT_CHUNKED_UNLIMITED_TEST_SPACE_RANK, max_dims,
+ dims, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR;
+
+ if (H5Pset_chunk(dcpl_id, DATASET_SET_EXTENT_CHUNKED_UNLIMITED_TEST_SPACE_RANK, chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" unable to set dataset chunk dimensionality\n");
+ goto error;
+ }
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_SET_EXTENT_CHUNKED_UNLIMITED_TEST_DSET_NAME, dset_dtype,
+ fspace_id, H5P_DEFAULT, dcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_SET_EXTENT_CHUNKED_UNLIMITED_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < DATASET_SET_EXTENT_CHUNKED_UNLIMITED_TEST_NUM_PASSES; i++) {
+ size_t j;
+
+ for (j = 0; j < DATASET_SET_EXTENT_CHUNKED_UNLIMITED_TEST_SPACE_RANK; j++) {
+ /* Ensure that the new dimensionality doesn't match the old dimensionality. */
+ do {
+ new_dims[j] = (hsize_t)(rand() % MAX_DIM_SIZE + 1);
+ } while (new_dims[j] == dims[j]);
+ }
+
+ if (H5Dset_extent(dset_id, new_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set dataset extent\n");
+ goto error;
+ }
+
+ /* Retrieve the new dimensions of the dataset and ensure they
+ * are different from the original.
+ */
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve dataset's dataspace\n");
+ goto error;
+ }
+
+ if (H5Sget_simple_extent_dims(fspace_id, new_dims, NULL) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve dataset dimensionality\n");
+ goto error;
+ }
+
+ /*
+ * Make sure the dimensions have been changed.
+ */
+ for (j = 0; j < DATASET_SET_EXTENT_CHUNKED_UNLIMITED_TEST_SPACE_RANK; j++) {
+ if (dims[j] == new_dims[j]) {
+ H5_FAILED();
+ HDprintf(" dataset dimension %llu wasn't changed!\n", (unsigned long long)j);
+ goto error;
+ }
+ }
+
+ /*
+ * Remember the current dimensionality of the dataset before
+ * changing them again.
+ */
+ HDmemcpy(dims, new_dims, sizeof(new_dims));
+ }
+
+ /*
+ * Now close and re-open the dataset each pass to check the persistence
+ * of the changes to the dataset's dimensionality.
+ */
+ for (i = 0; i < DATASET_SET_EXTENT_CHUNKED_UNLIMITED_TEST_NUM_PASSES; i++) {
+ size_t j;
+
+ for (j = 0; j < DATASET_SET_EXTENT_CHUNKED_UNLIMITED_TEST_SPACE_RANK; j++) {
+ /* Ensure that the new dimensionality doesn't match the old dimensionality. */
+ do {
+ new_dims[j] = (hsize_t)(rand() % MAX_DIM_SIZE + 1);
+ } while (new_dims[j] == dims[j]);
+ }
+
+ if (H5Dset_extent(dset_id, new_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set dataset extent\n");
+ goto error;
+ }
+
+ /* Retrieve the new dimensions of the dataset and ensure they
+ * are different from the original.
+ */
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_SET_EXTENT_CHUNKED_UNLIMITED_TEST_DSET_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" failed to open dataset '%s'\n",
+ DATASET_SET_EXTENT_CHUNKED_UNLIMITED_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve dataset's dataspace\n");
+ goto error;
+ }
+
+ if (H5Sget_simple_extent_dims(fspace_id, new_dims, NULL) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve dataset dimensionality\n");
+ goto error;
+ }
+
+ /*
+ * Make sure the dimensions have been changed.
+ */
+ for (j = 0; j < DATASET_SET_EXTENT_CHUNKED_UNLIMITED_TEST_SPACE_RANK; j++) {
+ if (dims[j] == new_dims[j]) {
+ H5_FAILED();
+ HDprintf(" dataset dimension %llu wasn't changed!\n", (unsigned long long)j);
+ goto error;
+ }
+ }
+
+ /*
+ * Remember the current dimensionality of the dataset before
+ * changing them again.
+ */
+ HDmemcpy(dims, new_dims, sizeof(new_dims));
+ }
+
+ if (H5Pclose(dcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(dcpl_id);
+ H5Sclose(fspace_id);
+ H5Tclose(dset_dtype);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a chunked dataset's extent can be
+ * changed by using H5Dset_extent. This test uses fixed-size
+ * dimensions for the dataset, so the dimensionality of the
+ * dataset may only shrink.
+ */
+static int
+test_dataset_set_extent_chunked_fixed(void)
+{
+ hsize_t dims[DATASET_SET_EXTENT_CHUNKED_FIXED_TEST_SPACE_RANK];
+ hsize_t dims2[DATASET_SET_EXTENT_CHUNKED_FIXED_TEST_SPACE_RANK];
+ hsize_t chunk_dims[DATASET_SET_EXTENT_CHUNKED_FIXED_TEST_SPACE_RANK];
+ hsize_t new_dims[DATASET_SET_EXTENT_CHUNKED_FIXED_TEST_SPACE_RANK];
+ size_t i;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID, dset_id2 = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t dcpl_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID, fspace_id2 = H5I_INVALID_HID;
+
+ TESTING("H5Dset_extent on chunked dataset with fixed dimensions");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, basic or more dataset aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_SET_EXTENT_CHUNKED_FIXED_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_SET_EXTENT_CHUNKED_FIXED_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < DATASET_SET_EXTENT_CHUNKED_FIXED_TEST_SPACE_RANK; i++) {
+ dims[i] = (hsize_t)(rand() % MAX_DIM_SIZE + 1);
+ dims2[i] = dims[i];
+ do {
+ chunk_dims[i] = (hsize_t)(rand() % MAX_DIM_SIZE + 1);
+ } while (chunk_dims[i] > dims[i]);
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_SET_EXTENT_CHUNKED_FIXED_TEST_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+ if ((fspace_id2 = H5Screate_simple(DATASET_SET_EXTENT_CHUNKED_FIXED_TEST_SPACE_RANK, dims2, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR;
+
+ if (H5Pset_chunk(dcpl_id, DATASET_SET_EXTENT_CHUNKED_FIXED_TEST_SPACE_RANK, chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" unable to set dataset chunk dimensionality\n");
+ goto error;
+ }
+
+ /*
+ * NOTE: Since shrinking the dimension size can quickly end in a situation
+ * where the dimensions are of size 1 and we can't shrink them further, we
+ * use two datasets here to ensure the second test can run at least once.
+ */
+ if ((dset_id = H5Dcreate2(group_id, DATASET_SET_EXTENT_CHUNKED_FIXED_TEST_DSET_NAME, dset_dtype,
+ fspace_id, H5P_DEFAULT, dcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_SET_EXTENT_CHUNKED_FIXED_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if ((dset_id2 = H5Dcreate2(group_id, DATASET_SET_EXTENT_CHUNKED_FIXED_TEST_DSET_NAME2, dset_dtype,
+ fspace_id2, H5P_DEFAULT, dcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_SET_EXTENT_CHUNKED_FIXED_TEST_DSET_NAME2);
+ goto error;
+ }
+
+ for (i = 0; i < DATASET_SET_EXTENT_CHUNKED_FIXED_TEST_NUM_PASSES; i++) {
+ hbool_t skip_iterations = FALSE;
+ size_t j;
+
+ for (j = 0; j < DATASET_SET_EXTENT_CHUNKED_FIXED_TEST_SPACE_RANK; j++) {
+ /* Ensure that the new dimensionality is less than the old dimensionality. */
+ do {
+ if (dims[j] == 1) {
+ skip_iterations = TRUE;
+ break;
+ }
+ else
+ new_dims[j] = (hsize_t)(rand() % MAX_DIM_SIZE + 1);
+ } while (new_dims[j] >= dims[j]);
+ }
+
+ /*
+ * If we've shrunk one of the dimensions to size 1, skip the rest of
+ * the iterations.
+ */
+ if (skip_iterations)
+ break;
+
+ if (H5Dset_extent(dset_id, new_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set dataset extent\n");
+ goto error;
+ }
+
+ /* Retrieve the new dimensions of the dataset and ensure they
+ * are different from the original.
+ */
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve dataset's dataspace\n");
+ goto error;
+ }
+
+ if (H5Sget_simple_extent_dims(fspace_id, new_dims, NULL) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve dataset dimensionality\n");
+ goto error;
+ }
+
+ /*
+ * Make sure the dimensions have been changed.
+ */
+ for (j = 0; j < DATASET_SET_EXTENT_CHUNKED_FIXED_TEST_SPACE_RANK; j++) {
+ if (dims[j] == new_dims[j]) {
+ H5_FAILED();
+ HDprintf(" dataset dimension %llu wasn't changed!\n", (unsigned long long)j);
+ goto error;
+ }
+ }
+
+ /*
+ * Remember the current dimensionality of the dataset before
+ * changing them again.
+ */
+ HDmemcpy(dims, new_dims, sizeof(new_dims));
+ }
+
+ /*
+ * Now close and re-open the dataset each pass to check the persistence
+ * of the changes to the dataset's dimensionality.
+ */
+ for (i = 0; i < DATASET_SET_EXTENT_CHUNKED_FIXED_TEST_NUM_PASSES; i++) {
+ hbool_t skip_iterations = FALSE;
+ size_t j;
+
+ for (j = 0; j < DATASET_SET_EXTENT_CHUNKED_FIXED_TEST_SPACE_RANK; j++) {
+ /* Ensure that the new dimensionality is less than the old dimensionality. */
+ do {
+ if (dims2[j] == 1) {
+ skip_iterations = TRUE;
+ break;
+ }
+ else
+ new_dims[j] = (hsize_t)(rand() % MAX_DIM_SIZE + 1);
+ } while (new_dims[j] >= dims2[j]);
+ }
+
+ /*
+ * If we've shrunk one of the dimensions to size 1, skip the rest of
+ * the iterations.
+ */
+ if (skip_iterations)
+ break;
+
+ if (H5Dset_extent(dset_id2, new_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set dataset extent2\n");
+ goto error;
+ }
+
+ /* Retrieve the new dimensions of the dataset and ensure they
+ * are different from the original.
+ */
+ if (H5Sclose(fspace_id2) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id2) < 0)
+ TEST_ERROR;
+
+ if ((dset_id2 = H5Dopen2(group_id, DATASET_SET_EXTENT_CHUNKED_FIXED_TEST_DSET_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" failed to open dataset '%s'\n", DATASET_SET_EXTENT_CHUNKED_FIXED_TEST_DSET_NAME2);
+ goto error;
+ }
+
+ if ((fspace_id2 = H5Dget_space(dset_id2)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve dataset's dataspace\n");
+ goto error;
+ }
+
+ if (H5Sget_simple_extent_dims(fspace_id2, new_dims, NULL) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve dataset dimensionality\n");
+ goto error;
+ }
+
+ /*
+ * Make sure the dimensions have been changed.
+ */
+ for (j = 0; j < DATASET_SET_EXTENT_CHUNKED_FIXED_TEST_SPACE_RANK; j++) {
+ if (dims2[j] == new_dims[j]) {
+ H5_FAILED();
+ HDprintf(" dataset dimension %llu wasn't changed!\n", (unsigned long long)j);
+ goto error;
+ }
+ }
+
+ /*
+ * Remember the current dimensionality of the dataset before
+ * changing them again.
+ */
+ HDmemcpy(dims2, new_dims, sizeof(new_dims));
+ }
+
+ if (H5Pclose(dcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id2) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id2) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(dcpl_id);
+ H5Sclose(fspace_id);
+ H5Sclose(fspace_id2);
+ H5Tclose(dset_dtype);
+ H5Dclose(dset_id);
+ H5Dclose(dset_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check the data is correct after expanding
+ * and shrinking the dataset with H5Dset_extent
+ */
+static int
+test_dataset_set_extent_data(void)
+{
+ hsize_t dims_origin[DATASET_SET_EXTENT_DATA_TEST_SPACE_RANK] = {DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM,
+ DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM};
+ hsize_t dims_expand[DATASET_SET_EXTENT_DATA_TEST_SPACE_RANK] = {
+ DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM * 2 - 1, DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM * 2 - 1};
+ hsize_t dims_shrink[DATASET_SET_EXTENT_DATA_TEST_SPACE_RANK] = {
+ DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM / 2 + 1, DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM / 2 + 1};
+ hsize_t dims_chunk[DATASET_SET_EXTENT_DATA_TEST_SPACE_RANK] = {DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM,
+ DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM};
+ hsize_t dims_max[DATASET_SET_EXTENT_DATA_TEST_SPACE_RANK] = {H5S_UNLIMITED, H5S_UNLIMITED};
+ hsize_t dims_out[DATASET_SET_EXTENT_DATA_TEST_SPACE_RANK];
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dcpl_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID, dset_space_id = H5I_INVALID_HID;
+ int buf_origin[DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM][DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM];
+#ifndef NO_CLEAR_ON_SHRINK
+ int buf_expand2[DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM][DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM];
+#endif
+ int buf_expand[DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM * 2 - 1]
+ [DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM * 2 - 1];
+ int buf_shrink[DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM / 2 + 1]
+ [DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM / 2 + 1];
+ int i, j;
+
+ TESTING_MULTIPART("H5Dset_extent on data correctness");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, basic or more dataset aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_SET_EXTENT_DATA_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", DATASET_SET_EXTENT_DATA_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_SET_EXTENT_DATA_TEST_SPACE_RANK, dims_origin, dims_max)) < 0)
+ TEST_ERROR;
+
+ if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR;
+
+ if (H5Pset_chunk(dcpl_id, DATASET_SET_EXTENT_DATA_TEST_SPACE_RANK, dims_chunk) < 0) {
+ H5_FAILED();
+ HDprintf(" unable to set dataset chunk dimensionality\n");
+ goto error;
+ }
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_SET_EXTENT_DATA_TEST_DSET_NAME, H5T_NATIVE_INT, fspace_id,
+ H5P_DEFAULT, dcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_SET_EXTENT_DATA_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM; i++)
+ for (j = 0; j < DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM; j++)
+ buf_origin[i][j] = i + j;
+
+ /* Write the original data
+ * X X X X X X X X
+ * X X X X X X X X
+ * X X X X X X X X
+ * X X X X X X X X
+ * X X X X X X X X
+ * X X X X X X X X
+ * X X X X X X X X
+ * X X X X X X X X
+ */
+ if (H5Dwrite(dset_id, H5T_NATIVE_INT, fspace_id, H5S_ALL, H5P_DEFAULT, buf_origin) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Dset_extent_data_expand)
+ {
+ TESTING_2("H5Dset_extent for data expansion");
+
+ /* Expand the dataset. The extended space should be initialized with the
+ * the default value (0)
+ * X X X X X X X X 0 0 0 0 0 0 0
+ * X X X X X X X X 0 0 0 0 0 0 0
+ * X X X X X X X X 0 0 0 0 0 0 0
+ * X X X X X X X X 0 0 0 0 0 0 0
+ * X X X X X X X X 0 0 0 0 0 0 0
+ * X X X X X X X X 0 0 0 0 0 0 0
+ * X X X X X X X X 0 0 0 0 0 0 0
+ * X X X X X X X X 0 0 0 0 0 0 0
+ * 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
+ * 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
+ * 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
+ * 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
+ * 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
+ * 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
+ * 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
+ */
+ if (H5Dset_extent(dset_id, dims_expand) < 0)
+ PART_ERROR(H5Dset_extent_data_expand);
+
+ if (H5Dread(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_expand) < 0)
+ PART_ERROR(H5Dset_extent_data_expand);
+
+ /* compare the expanded data */
+ for (i = 0; i < (int)dims_expand[0]; i++) {
+ for (j = 0; j < (int)dims_expand[1]; j++) {
+ if (i >= (int)dims_origin[0] || j >= (int)dims_origin[1]) {
+ if (buf_expand[i][j] != 0) {
+ H5_FAILED();
+ HDprintf(" buf_expand[%d][%d] = %d. It should be 0\n", i, j, buf_expand[i][j]);
+ PART_ERROR(H5Dset_extent_data_expand);
+ }
+ }
+ else {
+ if (buf_expand[i][j] != buf_origin[i][j]) {
+ H5_FAILED();
+ HDprintf(" buf_expand[%d][%d] = %d. It should be %d\n", i, j, buf_expand[i][j],
+ buf_origin[i][j]);
+ PART_ERROR(H5Dset_extent_data_expand);
+ }
+ }
+ }
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dset_extent_data_expand);
+
+ PART_BEGIN(H5Dset_extent_data_shrink)
+ {
+ TESTING_2("H5Dset_extent for data shrinking");
+
+ /* Shrink the dataset.
+ * X X X X X
+ * X X X X X
+ * X X X X X
+ * X X X X X
+ * X X X X X
+ */
+ if (H5Dset_extent(dset_id, dims_shrink) < 0)
+ PART_ERROR(H5Dset_extent_data_shrink);
+
+ if (H5Dread(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_shrink) < 0)
+ PART_ERROR(H5Dset_extent_data_shrink);
+
+ /* compare the shrunk data */
+ for (i = 0; i < (int)dims_shrink[0]; i++) {
+ for (j = 0; j < (int)dims_shrink[1]; j++) {
+ if (buf_shrink[i][j] != buf_origin[i][j]) {
+ H5_FAILED();
+ HDprintf(" buf_shrink[%d][%d] = %d. It should be %d\n", i, j, buf_shrink[i][j],
+ buf_origin[i][j]);
+ PART_ERROR(H5Dset_extent_data_shrink);
+ }
+ }
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dset_extent_data_shrink);
+
+ PART_BEGIN(H5Dset_extent_data_expand_to_origin)
+ {
+ TESTING_2("H5Dset_extent for data back to the original size");
+#ifndef NO_CLEAR_ON_SHRINK
+ /* Expand the dataset back to the original size. The data should look like this:
+ * X X X X X 0 0 0
+ * X X X X X 0 0 0
+ * X X X X X 0 0 0
+ * X X X X X 0 0 0
+ * X X X X X 0 0 0
+ * 0 0 0 0 0 0 0 0
+ * 0 0 0 0 0 0 0 0
+ * 0 0 0 0 0 0 0 0
+ */
+ if (H5Dset_extent(dset_id, dims_origin) < 0)
+ PART_ERROR(H5Dset_extent_data_expand_to_origin);
+
+ if (H5Dread(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_expand2) < 0)
+ PART_ERROR(H5Dset_extent_data_expand_to_origin);
+
+ /* compare the expanded data */
+ for (i = 0; i < (int)dims_origin[0]; i++) {
+ for (j = 0; j < (int)dims_origin[1]; j++) {
+ if (i >= (int)dims_shrink[0] || j >= (int)dims_shrink[1]) {
+ if (buf_expand2[i][j] != 0) {
+ H5_FAILED();
+ HDprintf(" buf_expand2[%d][%d] = %d. It should be 0\n", i, j,
+ buf_expand2[i][j]);
+ PART_ERROR(H5Dset_extent_data_expand_to_origin);
+ }
+ }
+ else {
+ if (buf_expand2[i][j] != buf_origin[i][j]) {
+ H5_FAILED();
+ HDprintf(" buf_expand2[%d][%d] = %d. It should be %d.\n", i, j,
+ buf_expand2[i][j], buf_origin[i][j]);
+ PART_ERROR(H5Dset_extent_data_expand_to_origin);
+ }
+ }
+ }
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Dset_extent_data_expand_to_origin);
+#endif
+ }
+ PART_END(H5Dset_extent_data_expand_to_origin);
+
+ PART_BEGIN(H5Dset_extent_data_shrink_to_zero)
+ {
+ TESTING_2("H5Dset_extent for data shrink to zero size");
+
+ /* Shrink the dimensions to 0 and verify it */
+ dims_shrink[0] = dims_shrink[1] = 0;
+
+ if (H5Dset_extent(dset_id, dims_shrink) < 0)
+ PART_ERROR(H5Dset_extent_data_shrink_to_zero);
+
+ /* get the space */
+ if ((dset_space_id = H5Dget_space(dset_id)) < 0)
+ PART_ERROR(H5Dset_extent_data_shrink_to_zero);
+
+ /* get dimensions */
+ if (H5Sget_simple_extent_dims(dset_space_id, dims_out, NULL) < 0)
+ PART_ERROR(H5Dset_extent_data_shrink_to_zero);
+
+ if (H5Sclose(dset_space_id) < 0)
+ PART_ERROR(H5Dset_extent_data_shrink_to_zero);
+
+ /* Verify the dimensions are 0 */
+ for (i = 0; i < DATASET_SET_EXTENT_DATA_TEST_SPACE_RANK; i++)
+ if (dims_out[i] != 0) {
+ H5_FAILED();
+ HDprintf(" dims_out[%d] = %llu. It should be 0.\n", i,
+ (long long unsigned int)dims_out[i]);
+ PART_ERROR(H5Dset_extent_data_shrink_to_zero);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dset_extent_data_shrink_to_zero);
+
+ PART_BEGIN(H5Dset_extent_data_expand_to_origin_again)
+ {
+ TESTING_2("H5Dset_extent for data expansion back to the original again");
+#ifndef NO_CLEAR_ON_SHRINK
+ /* Expand the dataset back to the original size. The data should look like this:
+ * 0 0 0 0 0 0 0 0
+ * 0 0 0 0 0 0 0 0
+ * 0 0 0 0 0 0 0 0
+ * 0 0 0 0 0 0 0 0
+ * 0 0 0 0 0 0 0 0
+ * 0 0 0 0 0 0 0 0
+ * 0 0 0 0 0 0 0 0
+ * 0 0 0 0 0 0 0 0
+ */
+ if (H5Dset_extent(dset_id, dims_origin) < 0)
+ PART_ERROR(H5Dset_extent_data_expand_to_origin_again);
+
+ if (H5Dread(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_expand2) < 0)
+ PART_ERROR(H5Dset_extent_data_expand_to_origin_again);
+
+ /* The data should be all zeros */
+ for (i = 0; i < (int)dims_origin[0]; i++) {
+ for (j = 0; j < (int)dims_origin[1]; j++) {
+ if (buf_expand2[i][j] != 0) {
+ H5_FAILED();
+ HDprintf(" buf_expand2[%d][%d] = %d. It should be 0.\n", i, j, buf_expand2[i][j]);
+ PART_ERROR(H5Dset_extent_data_expand_to_origin_again);
+ }
+ }
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Dset_extent_data_expand_to_origin_again);
+#endif
+ }
+ PART_END(H5Dset_extent_data_expand_to_origin_again);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Pclose(dcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(dcpl_id);
+ H5Sclose(fspace_id);
+ H5Sclose(dset_space_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+} /* test_dataset_set_extent_data */
+
+/*
+ * If a dataset is opened twice and one of the handles is
+ * used to extend the dataset, then the other handle should
+ * return the new size when queried.
+ */
+static int
+test_dataset_set_extent_double_handles(void)
+{
+#ifndef NO_DOUBLE_OBJECT_OPENS
+ hsize_t dims_origin[DATASET_SET_EXTENT_DOUBLE_HANDLES_TEST_SPACE_RANK] = {
+ DATASET_SET_EXTENT_DOUBLE_HANDLES_TEST_SPACE_DIM, DATASET_SET_EXTENT_DOUBLE_HANDLES_TEST_SPACE_DIM};
+ hsize_t dims_expand[DATASET_SET_EXTENT_DOUBLE_HANDLES_TEST_SPACE_RANK] = {
+ DATASET_SET_EXTENT_DOUBLE_HANDLES_TEST_SPACE_DIM * 2,
+ DATASET_SET_EXTENT_DOUBLE_HANDLES_TEST_SPACE_DIM * 2};
+ hsize_t dims_chunk[DATASET_SET_EXTENT_DOUBLE_HANDLES_TEST_SPACE_RANK] = {
+ DATASET_SET_EXTENT_DOUBLE_HANDLES_TEST_SPACE_DIM / 2,
+ DATASET_SET_EXTENT_DOUBLE_HANDLES_TEST_SPACE_DIM / 2};
+ hsize_t dims_max[DATASET_SET_EXTENT_DOUBLE_HANDLES_TEST_SPACE_RANK] = {H5S_UNLIMITED, H5S_UNLIMITED};
+ hsize_t dims_out[DATASET_SET_EXTENT_DOUBLE_HANDLES_TEST_SPACE_RANK];
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID, dset_id2 = H5I_INVALID_HID;
+ hid_t dcpl_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID, dset_space_id = H5I_INVALID_HID;
+ int i;
+#endif
+
+ TESTING("H5Dset_extent on double dataset handles");
+
+#ifndef NO_DOUBLE_OBJECT_OPENS
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, basic or more dataset aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_SET_EXTENT_DOUBLE_HANDLES_TEST_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", DATASET_SET_EXTENT_DATA_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id =
+ H5Screate_simple(DATASET_SET_EXTENT_DOUBLE_HANDLES_TEST_SPACE_RANK, dims_origin, dims_max)) < 0)
+ TEST_ERROR;
+
+ if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR;
+
+ if (H5Pset_chunk(dcpl_id, DATASET_SET_EXTENT_DOUBLE_HANDLES_TEST_SPACE_RANK, dims_chunk) < 0) {
+ H5_FAILED();
+ HDprintf(" unable to set dataset chunk dimensionality\n");
+ goto error;
+ }
+
+ /* Create the dataset */
+ if ((dset_id = H5Dcreate2(group_id, DATASET_SET_EXTENT_DOUBLE_HANDLES_TEST_DSET_NAME, H5T_NATIVE_INT,
+ fspace_id, H5P_DEFAULT, dcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_SET_EXTENT_DOUBLE_HANDLES_TEST_DSET_NAME);
+ goto error;
+ }
+
+ /* Open the same dataset again */
+ if ((dset_id2 = H5Dopen2(group_id, DATASET_SET_EXTENT_DOUBLE_HANDLES_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_SET_EXTENT_DOUBLE_HANDLES_TEST_DSET_NAME);
+ goto error;
+ }
+
+ /* Expand the dataset's dimensions with the first dataset handle */
+ if (H5Dset_extent(dset_id, dims_expand) < 0)
+ TEST_ERROR;
+
+ /* Get the data space with the second dataset handle */
+ if ((dset_space_id = H5Dget_space(dset_id2)) < 0)
+ TEST_ERROR;
+
+ /* Get the dimensions with the second dataset handle */
+ if (H5Sget_simple_extent_dims(dset_space_id, dims_out, NULL) < 0)
+ TEST_ERROR;
+
+ if (H5Sclose(dset_space_id) < 0)
+ TEST_ERROR;
+
+ for (i = 0; i < DATASET_SET_EXTENT_DOUBLE_HANDLES_TEST_SPACE_RANK; i++)
+ if (dims_out[i] != dims_expand[i]) {
+ H5_FAILED();
+ HDprintf(" dims_out[%d] = %d. It should be %d.\n", i, dims_out[i], dims_expand[i]);
+ goto error;
+ }
+
+ if (H5Pclose(dcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id2) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(dcpl_id);
+ H5Sclose(fspace_id);
+ H5Sclose(dset_space_id);
+ H5Dclose(dset_id);
+ H5Dclose(dset_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+#else
+ SKIPPED();
+ return 0;
+#endif
+} /* test_dataset_set_extent_double_handles */
+
+/*
+ * A test to check that a dataset's extent can't be
+ * changed when H5Dset_extent is passed invalid parameters.
+ */
+static int
+test_dataset_set_extent_invalid_params(void)
+{
+ hsize_t dims[DATASET_SET_EXTENT_INVALID_PARAMS_TEST_SPACE_RANK];
+ hsize_t chunk_dims[DATASET_SET_EXTENT_INVALID_PARAMS_TEST_SPACE_RANK];
+ hsize_t new_dims[DATASET_SET_EXTENT_INVALID_PARAMS_TEST_SPACE_RANK];
+ hsize_t compact_dims[DATASET_SET_EXTENT_INVALID_PARAMS_TEST_SPACE_RANK] = {3, 3};
+ size_t i;
+ herr_t err_ret = -1;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t chunked_dset_id = H5I_INVALID_HID, compact_dset_id = H5I_INVALID_HID,
+ contiguous_dset_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t chunked_dcpl_id = H5I_INVALID_HID, compact_dcpl_id = H5I_INVALID_HID,
+ contiguous_dcpl_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID, compact_fspace_id = H5I_INVALID_HID;
+ char vol_name[5];
+
+ TESTING_MULTIPART("H5Dset_extent with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, basic or more dataset aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ /** for DAOS VOL, this test is problematic since auto chunking can be selected, so skip for now */
+ if (H5VLget_connector_name(file_id, vol_name, 5) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get VOL connector name\n");
+ goto error;
+ }
+ if (strcmp(vol_name, "daos") == 0) {
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+ SKIPPED();
+ return 0;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_SET_EXTENT_INVALID_PARAMS_TEST_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_SET_EXTENT_INVALID_PARAMS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = generate_random_dataspace(DATASET_SET_EXTENT_INVALID_PARAMS_TEST_SPACE_RANK, NULL, dims,
+ FALSE)) < 0)
+ TEST_ERROR;
+
+ for (i = 0; i < DATASET_SET_EXTENT_INVALID_PARAMS_TEST_SPACE_RANK; i++) {
+ do {
+ new_dims[i] = (hsize_t)(rand() % MAX_DIM_SIZE + 1);
+ } while (new_dims[i] > dims[i]);
+ do {
+ chunk_dims[i] = (hsize_t)(rand() % MAX_DIM_SIZE + 1);
+ } while (chunk_dims[i] > dims[i]);
+ }
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ /* Create a compact dataset */
+ if ((compact_dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR;
+
+ if (H5Pset_layout(compact_dcpl_id, H5D_COMPACT) < 0)
+ TEST_ERROR;
+
+ /* Keep the data space small because the storage size of compact dataset is limited to 64K */
+ if ((compact_fspace_id =
+ H5Screate_simple(DATASET_SET_EXTENT_INVALID_PARAMS_TEST_SPACE_RANK, compact_dims, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((compact_dset_id =
+ H5Dcreate2(group_id, DATASET_SET_EXTENT_INVALID_LAYOUT_TEST_COMPACT_DSET_NAME, H5T_NATIVE_INT,
+ compact_fspace_id, H5P_DEFAULT, compact_dcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n",
+ DATASET_SET_EXTENT_INVALID_LAYOUT_TEST_COMPACT_DSET_NAME);
+ goto error;
+ }
+
+ /* Create a contiguous dataset */
+ if ((contiguous_dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR;
+
+ if (H5Pset_layout(contiguous_dcpl_id, H5D_CONTIGUOUS) < 0)
+ TEST_ERROR;
+
+ if ((contiguous_dset_id =
+ H5Dcreate2(group_id, DATASET_SET_EXTENT_INVALID_LAYOUT_TEST_CONTIGUOUS_DSET_NAME, dset_dtype,
+ fspace_id, H5P_DEFAULT, contiguous_dcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n",
+ DATASET_SET_EXTENT_INVALID_LAYOUT_TEST_CONTIGUOUS_DSET_NAME);
+ goto error;
+ }
+
+ /* Create a chunked dataset */
+ if ((chunked_dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR;
+
+ if (H5Pset_chunk(chunked_dcpl_id, DATASET_SET_EXTENT_INVALID_PARAMS_TEST_SPACE_RANK, chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" unable to set dataset chunk dimensionality\n");
+ goto error;
+ }
+
+ if ((chunked_dset_id = H5Dcreate2(group_id, DATASET_SET_EXTENT_INVALID_PARAMS_TEST_DSET_NAME, dset_dtype,
+ fspace_id, H5P_DEFAULT, chunked_dcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_SET_EXTENT_INVALID_PARAMS_TEST_DSET_NAME);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Dset_extent_invalid_layout_compact)
+ {
+ TESTING_2("H5Dset_extent with an invalid dataset layout (compact)");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Dset_extent(compact_dset_id, new_dims);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" setting dataset extent succeeded with an invalid layout (compact)\n");
+ PART_ERROR(H5Dset_extent_invalid_layout_compact);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dset_extent_invalid_layout_compact);
+
+ PART_BEGIN(H5Dset_extent_invalid_layout_contiguous)
+ {
+ TESTING_2("H5Dset_extent with an invalid dataset layout (contiguous)");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Dset_extent(contiguous_dset_id, new_dims);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" setting dataset extent succeeded with an invalid layout (contiguous)\n");
+ PART_ERROR(H5Dset_extent_invalid_layout_contiguous);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dset_extent_invalid_layout_contiguous);
+
+ PART_BEGIN(H5Dset_extent_invalid_dset_id)
+ {
+ TESTING_2("H5Dset_extent with an invalid dataset ID");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Dset_extent(H5I_INVALID_HID, new_dims);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" setting dataset extent succeeded with an invalid dataset ID\n");
+ PART_ERROR(H5Dset_extent_invalid_dset_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dset_extent_invalid_dset_id);
+
+ PART_BEGIN(H5Dset_extent_null_dim_pointer)
+ {
+ TESTING_2("H5Dset_extent with NULL dimension pointer");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Dset_extent(chunked_dset_id, NULL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" setting dataset extent succeeded with a NULL dimension pointer\n");
+ PART_ERROR(H5Dset_extent_null_dim_pointer);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dset_extent_null_dim_pointer);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Pclose(chunked_dcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Pclose(compact_dcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Pclose(contiguous_dcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(compact_fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Dclose(chunked_dset_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(compact_dset_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(contiguous_dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(chunked_dcpl_id);
+ H5Pclose(compact_dcpl_id);
+ H5Pclose(contiguous_dcpl_id);
+ H5Sclose(fspace_id);
+ H5Sclose(compact_fspace_id);
+ H5Tclose(dset_dtype);
+ H5Dclose(chunked_dset_id);
+ H5Dclose(compact_dset_id);
+ H5Dclose(contiguous_dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+} /* test_dataset_set_extent_invalid_params */
+
+/*
+ * A test for H5Dflush.
+ */
+static int
+test_flush_dataset(void)
+{
+ TESTING("H5Dflush");
+
+ SKIPPED();
+
+ return 0;
+}
+
+/*
+ * A test to check that H5Dflush fails when it is
+ * passed invalid parameters.
+ */
+static int
+test_flush_dataset_invalid_params(void)
+{
+ TESTING("H5Dflush with invalid parameters");
+
+ SKIPPED();
+
+ return 0;
+}
+
+/*
+ * A test for H5Drefresh.
+ */
+static int
+test_refresh_dataset(void)
+{
+ TESTING("H5Drefresh");
+
+ SKIPPED();
+
+ return 0;
+}
+
+/*
+ * A test to check that H5Drefresh fails when it is
+ * passed invalid parameters.
+ */
+static int
+test_refresh_dataset_invalid_params(void)
+{
+ TESTING("H5Drefresh");
+
+ SKIPPED();
+
+ return 0;
+}
+
+/*
+ * A test to create a dataset composed of a single chunk.
+ */
+static int
+test_create_single_chunk_dataset(void)
+{
+ hsize_t dims[DATASET_SINGLE_CHUNK_TEST_SPACE_RANK];
+ hsize_t retrieved_chunk_dims[DATASET_SINGLE_CHUNK_TEST_SPACE_RANK];
+ size_t i;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t dcpl_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+
+ TESTING("creation of dataset with single chunk");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GET_PLIST)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, or get property list aren't supported "
+ "with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_SINGLE_CHUNK_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", DATASET_SINGLE_CHUNK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = generate_random_dataspace(DATASET_SINGLE_CHUNK_TEST_SPACE_RANK, NULL, dims, FALSE)) < 0)
+ TEST_ERROR;
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR;
+
+ if (H5Pset_chunk(dcpl_id, DATASET_SINGLE_CHUNK_TEST_SPACE_RANK, dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set chunking on DCPL\n");
+ goto error;
+ }
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_SINGLE_CHUNK_TEST_DSET_NAME, dset_dtype, fspace_id,
+ H5P_DEFAULT, dcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_SINGLE_CHUNK_TEST_DSET_NAME);
+ goto error;
+ }
+
+ /*
+ * See if a copy of the DCPL reports the correct chunking.
+ */
+ if (H5Pclose(dcpl_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close DCPL\n");
+ goto error;
+ }
+
+ if ((dcpl_id = H5Dget_create_plist(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve copy of DCPL\n");
+ goto error;
+ }
+
+ memset(retrieved_chunk_dims, 0, sizeof(retrieved_chunk_dims));
+ if (H5Pget_chunk(dcpl_id, DATASET_SINGLE_CHUNK_TEST_SPACE_RANK, retrieved_chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve chunking info\n");
+ goto error;
+ }
+
+ for (i = 0; i < DATASET_SINGLE_CHUNK_TEST_SPACE_RANK; i++) {
+ if (dims[i] != retrieved_chunk_dims[i]) {
+ H5_FAILED();
+ HDprintf(" chunk dimensionality retrieved from DCPL didn't match originally specified "
+ "dimensionality\n");
+ goto error;
+ }
+ }
+
+ /*
+ * Now close the dataset and retrieve a copy
+ * of the DCPL after re-opening it.
+ */
+ if (H5Pclose(dcpl_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close DCPL\n");
+ goto error;
+ }
+
+ if (H5Dclose(dset_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close dataset\n");
+ goto error;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_SINGLE_CHUNK_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to re-open dataset\n");
+ goto error;
+ }
+
+ if ((dcpl_id = H5Dget_create_plist(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve copy of DCPL\n");
+ goto error;
+ }
+
+ memset(retrieved_chunk_dims, 0, sizeof(retrieved_chunk_dims));
+ if (H5Pget_chunk(dcpl_id, DATASET_SINGLE_CHUNK_TEST_SPACE_RANK, retrieved_chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve chunking info\n");
+ goto error;
+ }
+
+ for (i = 0; i < DATASET_SINGLE_CHUNK_TEST_SPACE_RANK; i++) {
+ if (dims[i] != retrieved_chunk_dims[i]) {
+ H5_FAILED();
+ HDprintf(" chunk dimensionality retrieved from DCPL didn't match originally specified "
+ "dimensionality\n");
+ goto error;
+ }
+ }
+
+ if (H5Pclose(dcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(dcpl_id);
+ H5Sclose(fspace_id);
+ H5Tclose(dset_dtype);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a single-chunk dataset can be written
+ * and read correctly.
+ */
+static int
+test_write_single_chunk_dataset(void)
+{
+ hssize_t space_npoints;
+ hsize_t dims[DATASET_SINGLE_CHUNK_WRITE_TEST_DSET_SPACE_RANK];
+ hsize_t retrieved_chunk_dims[DATASET_SINGLE_CHUNK_WRITE_TEST_DSET_SPACE_RANK];
+ size_t i, data_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dcpl_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ void *write_buf = NULL;
+ void *read_buf = NULL;
+
+ TESTING("write to dataset with single chunk");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_GET_PLIST)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, or get property list aren't supported "
+ "with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_SINGLE_CHUNK_WRITE_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_SINGLE_CHUNK_WRITE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = generate_random_dataspace(DATASET_SINGLE_CHUNK_WRITE_TEST_DSET_SPACE_RANK, NULL, dims,
+ FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR;
+
+ if (H5Pset_chunk(dcpl_id, DATASET_SINGLE_CHUNK_WRITE_TEST_DSET_SPACE_RANK, dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set chunking on DCPL\n");
+ goto error;
+ }
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_SINGLE_CHUNK_WRITE_TEST_DSET_NAME,
+ DATASET_SINGLE_CHUNK_WRITE_TEST_DSET_DTYPE, fspace_id, H5P_DEFAULT, dcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_SINGLE_CHUNK_WRITE_TEST_DSET_NAME);
+ goto error;
+ }
+
+ /*
+ * See if a copy of the DCPL reports the correct chunking.
+ */
+ if (H5Pclose(dcpl_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close DCPL\n");
+ goto error;
+ }
+
+ if ((dcpl_id = H5Dget_create_plist(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve copy of DCPL\n");
+ goto error;
+ }
+
+ memset(retrieved_chunk_dims, 0, sizeof(retrieved_chunk_dims));
+ if (H5Pget_chunk(dcpl_id, DATASET_SINGLE_CHUNK_WRITE_TEST_DSET_SPACE_RANK, retrieved_chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve chunking info\n");
+ goto error;
+ }
+
+ for (i = 0; i < DATASET_SINGLE_CHUNK_WRITE_TEST_DSET_SPACE_RANK; i++) {
+ if (dims[i] != retrieved_chunk_dims[i]) {
+ H5_FAILED();
+ HDprintf(" chunk dimensionality retrieved from DCPL didn't match originally specified "
+ "dimensionality\n");
+ goto error;
+ }
+ }
+
+ for (i = 0, data_size = 1; i < DATASET_SINGLE_CHUNK_WRITE_TEST_DSET_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_SINGLE_CHUNK_WRITE_TEST_DSET_DTYPESIZE;
+
+ if (NULL == (write_buf = HDmalloc(data_size)))
+ TEST_ERROR;
+
+ for (i = 0; i < data_size / DATASET_SINGLE_CHUNK_WRITE_TEST_DSET_DTYPESIZE; i++)
+ ((int *)write_buf)[i] = (int)i;
+
+ if (H5Dwrite(dset_id, DATASET_SINGLE_CHUNK_WRITE_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n", DATASET_SINGLE_CHUNK_WRITE_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_SINGLE_CHUNK_WRITE_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_SINGLE_CHUNK_WRITE_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ goto error;
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ goto error;
+ }
+
+ if (NULL ==
+ (read_buf = HDmalloc((hsize_t)space_npoints * DATASET_SINGLE_CHUNK_WRITE_TEST_DSET_DTYPESIZE))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ goto error;
+ }
+
+ if (H5Dread(dset_id, DATASET_SINGLE_CHUNK_WRITE_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_SINGLE_CHUNK_WRITE_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < (hsize_t)space_npoints; i++)
+ if (((int *)read_buf)[i] != (int)i) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ goto error;
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (H5Pclose(dcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (write_buf)
+ HDfree(write_buf);
+ if (read_buf)
+ HDfree(read_buf);
+ H5Pclose(dcpl_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to create a dataset composed of multiple chunks.
+ */
+static int
+test_create_multi_chunk_dataset(void)
+{
+ hsize_t dims[DATASET_MULTI_CHUNK_TEST_SPACE_RANK] = {100, 100};
+ hsize_t chunk_dims[DATASET_MULTI_CHUNK_TEST_SPACE_RANK] = {10, 10};
+ hsize_t retrieved_chunk_dims[DATASET_MULTI_CHUNK_TEST_SPACE_RANK];
+ size_t i;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t dcpl_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+
+ TESTING("creation of dataset with multiple chunks");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GET_PLIST)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, or get property list aren't supported "
+ "with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_MULTI_CHUNK_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", DATASET_MULTI_CHUNK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_MULTI_CHUNK_TEST_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR;
+
+ if (H5Pset_chunk(dcpl_id, DATASET_MULTI_CHUNK_TEST_SPACE_RANK, chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set chunking on DCPL\n");
+ goto error;
+ }
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_MULTI_CHUNK_TEST_DSET_NAME, dset_dtype, fspace_id,
+ H5P_DEFAULT, dcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_MULTI_CHUNK_TEST_DSET_NAME);
+ goto error;
+ }
+
+ /*
+ * See if a copy of the DCPL reports the correct chunking.
+ */
+ if (H5Pclose(dcpl_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close DCPL\n");
+ goto error;
+ }
+
+ if ((dcpl_id = H5Dget_create_plist(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve copy of DCPL\n");
+ goto error;
+ }
+
+ memset(retrieved_chunk_dims, 0, sizeof(retrieved_chunk_dims));
+ if (H5Pget_chunk(dcpl_id, DATASET_MULTI_CHUNK_TEST_SPACE_RANK, retrieved_chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve chunking info\n");
+ goto error;
+ }
+
+ for (i = 0; i < DATASET_MULTI_CHUNK_TEST_SPACE_RANK; i++) {
+ if (chunk_dims[i] != retrieved_chunk_dims[i]) {
+ H5_FAILED();
+ HDprintf(" chunk dimensionality retrieved from DCPL didn't match originally specified "
+ "dimensionality\n");
+ goto error;
+ }
+ }
+
+ /*
+ * Now close the dataset and retrieve a copy
+ * of the DCPL after re-opening it.
+ */
+ if (H5Pclose(dcpl_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close DCPL\n");
+ goto error;
+ }
+
+ if (H5Dclose(dset_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close dataset\n");
+ goto error;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_MULTI_CHUNK_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to re-open dataset\n");
+ goto error;
+ }
+
+ if ((dcpl_id = H5Dget_create_plist(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve copy of DCPL\n");
+ goto error;
+ }
+
+ memset(retrieved_chunk_dims, 0, sizeof(retrieved_chunk_dims));
+ if (H5Pget_chunk(dcpl_id, DATASET_MULTI_CHUNK_TEST_SPACE_RANK, retrieved_chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve chunking info\n");
+ goto error;
+ }
+
+ for (i = 0; i < DATASET_MULTI_CHUNK_TEST_SPACE_RANK; i++) {
+ if (chunk_dims[i] != retrieved_chunk_dims[i]) {
+ H5_FAILED();
+ HDprintf(" chunk dimensionality retrieved from DCPL didn't match originally specified "
+ "dimensionality\n");
+ goto error;
+ }
+ }
+
+ if (H5Pclose(dcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(dcpl_id);
+ H5Sclose(fspace_id);
+ H5Tclose(dset_dtype);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a dataset composed of multiple chunks
+ * can be written and read correctly. When reading back the
+ * chunks of the dataset, the file dataspace and memory dataspace
+ * used are the same shape.
+ */
+static int
+test_write_multi_chunk_dataset_same_shape_read(void)
+{
+ hsize_t dims[DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK] = {100, 100};
+ hsize_t chunk_dims[DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK] = {10, 10};
+ hsize_t retrieved_chunk_dims[DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK];
+ hsize_t start[DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK];
+ hsize_t count[DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK];
+ size_t i, data_size, chunk_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dcpl_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ void *write_buf = NULL;
+ int read_buf[10][10];
+
+ TESTING("write to dataset with multiple chunks using same shaped dataspaces");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_GET_PLIST)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, or get property list aren't supported "
+ "with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK, dims,
+ NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR;
+
+ if (H5Pset_chunk(dcpl_id, DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK, chunk_dims) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" failed to set chunking on DCPL\n");
+ goto error;
+ }
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_NAME,
+ DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_DTYPE, fspace_id,
+ H5P_DEFAULT, dcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n",
+ DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_NAME);
+ goto error;
+ }
+
+ /*
+ * See if a copy of the DCPL reports the correct chunking.
+ */
+ if (H5Pclose(dcpl_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close DCPL\n");
+ goto error;
+ }
+
+ if ((dcpl_id = H5Dget_create_plist(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve copy of DCPL\n");
+ goto error;
+ }
+
+ memset(retrieved_chunk_dims, 0, sizeof(retrieved_chunk_dims));
+ if (H5Pget_chunk(dcpl_id, DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK,
+ retrieved_chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve chunking info\n");
+ goto error;
+ }
+
+ for (i = 0; i < DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; i++) {
+ if (chunk_dims[i] != retrieved_chunk_dims[i]) {
+ H5_FAILED();
+ HDprintf(" chunk dimensionality retrieved from DCPL didn't match originally specified "
+ "dimensionality\n");
+ goto error;
+ }
+ }
+
+ for (i = 0, chunk_size = 1; i < DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; i++)
+ chunk_size *= chunk_dims[i];
+ chunk_size *= DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_DTYPESIZE;
+
+ for (i = 0, data_size = 1; i < DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_DTYPESIZE;
+
+ if (NULL == (write_buf = HDmalloc(data_size)))
+ TEST_ERROR;
+
+ /*
+ * Ensure that each underlying chunk contains the values
+ *
+ * chunk_index .. (chunk_nelemts - 1) + chunk_index.
+ *
+ * That is to say, for a chunk size of 10 x 10, chunk 0
+ * contains the values
+ *
+ * 0 .. 99
+ *
+ * while the next chunk contains the values
+ *
+ * 1 .. 100
+ *
+ * and so on.
+ */
+ for (i = 0; i < data_size / DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_DTYPESIZE; i++) {
+ size_t j;
+ size_t base;
+ size_t tot_adjust;
+
+ /*
+ * Calculate a starting base value by taking the index value mod
+ * the size of a chunk in each dimension.
+ */
+ for (j = 0, base = i; j < DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; j++)
+ if (chunk_dims[j] > 1 && base >= chunk_dims[j])
+ base %= chunk_dims[j];
+
+ /*
+ * Calculate the adjustment in each dimension.
+ */
+ for (j = 0, tot_adjust = 0; j < DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; j++) {
+ if (j == (DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK - 1))
+ tot_adjust += (i % dims[j]) / chunk_dims[j];
+ else {
+ size_t k;
+ size_t n_faster_elemts;
+
+ /*
+ * Calculate the number of elements in faster dimensions.
+ */
+ for (k = j + 1, n_faster_elemts = 1;
+ k < DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; k++)
+ n_faster_elemts *= dims[k];
+
+ tot_adjust += (((i / n_faster_elemts) / chunk_dims[j]) * (dims[j + 1] / chunk_dims[j + 1])) +
+ (((i / n_faster_elemts) % chunk_dims[j]) * chunk_dims[j + 1]);
+ }
+ }
+
+ ((int *)write_buf)[i] = (int)(base + tot_adjust);
+ }
+
+ /*
+ * Write every chunk in the dataset.
+ */
+ if (H5Dwrite(dset_id, DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n",
+ DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ if ((dset_id =
+ H5Dopen2(group_id, DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n",
+ DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ goto error;
+ }
+
+ /*
+ * Create 2-dimensional memory dataspace for read buffer.
+ */
+ {
+ hsize_t mdims[] = {chunk_dims[0], chunk_dims[1]};
+
+ if ((mspace_id = H5Screate_simple(2, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create memory dataspace\n");
+ goto error;
+ }
+ }
+
+ for (i = 0; i < DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; i++) {
+ count[i] = chunk_dims[i];
+ }
+
+ /*
+ * Read every chunk in the dataset, checking the data for each one.
+ */
+ HDprintf("\n");
+ for (i = 0; i < data_size / chunk_size; i++) {
+ size_t j, k;
+
+ HDprintf("\r Reading chunk %zu", i);
+
+ for (j = 0; j < DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; j++) {
+ if (dims[j] == chunk_dims[j])
+ start[j] = 0;
+ else if (j == (DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK - 1))
+ /* Fastest changing dimension */
+ start[j] = (i * chunk_dims[j]) % dims[j];
+ else
+ start[j] = ((i * chunk_dims[j + 1]) / dims[j + 1]) * (chunk_dims[j]);
+ }
+
+ /*
+ * Adjust file dataspace selection for next chunk.
+ */
+ if (H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, NULL, count, NULL) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set hyperslab selection\n");
+ goto error;
+ }
+
+ for (j = 0; j < chunk_dims[0]; j++)
+ for (k = 0; k < chunk_dims[1]; k++)
+ read_buf[j][k] = 0;
+
+ if (H5Dread(dset_id, DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_DTYPE, mspace_id, fspace_id,
+ H5P_DEFAULT, read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n",
+ DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (j = 0; j < chunk_dims[0]; j++) {
+ for (k = 0; k < chunk_dims[1]; k++) {
+ if (read_buf[j][k] != (int)((j * chunk_dims[0]) + k + i)) {
+ H5_FAILED();
+ HDprintf(" data verification failed for chunk %lld\n", (long long)i);
+ goto error;
+ }
+ }
+ }
+ }
+
+ if (H5Pclose(dcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(mspace_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (write_buf)
+ HDfree(write_buf);
+ H5Pclose(dcpl_id);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a dataset composed of multiple chunks
+ * can be written and read correctly. When reading back the
+ * chunks of the dataset, the file dataspace and memory dataspace
+ * used are differently shaped.
+ */
+static int
+test_write_multi_chunk_dataset_diff_shape_read(void)
+{
+ hsize_t dims[DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK] = {100, 100};
+ hsize_t chunk_dims[DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK] = {10, 10};
+ hsize_t retrieved_chunk_dims[DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK];
+ hsize_t start[DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK];
+ hsize_t count[DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK];
+ size_t i, data_size, chunk_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dcpl_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ void *write_buf = NULL;
+ void *read_buf = NULL;
+
+ TESTING("write to dataset with multiple chunks using differently shaped dataspaces");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GET_PLIST)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, or get property list aren't supported "
+ "with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK, dims,
+ NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR;
+
+ if (H5Pset_chunk(dcpl_id, DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK, chunk_dims) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" failed to set chunking on DCPL\n");
+ goto error;
+ }
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_NAME,
+ DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_DTYPE, fspace_id,
+ H5P_DEFAULT, dcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n",
+ DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_NAME);
+ goto error;
+ }
+
+ /*
+ * See if a copy of the DCPL reports the correct chunking.
+ */
+ if (H5Pclose(dcpl_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close DCPL\n");
+ goto error;
+ }
+
+ if ((dcpl_id = H5Dget_create_plist(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve copy of DCPL\n");
+ goto error;
+ }
+
+ memset(retrieved_chunk_dims, 0, sizeof(retrieved_chunk_dims));
+ if (H5Pget_chunk(dcpl_id, DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK,
+ retrieved_chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve chunking info\n");
+ goto error;
+ }
+
+ for (i = 0; i < DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; i++) {
+ if (chunk_dims[i] != retrieved_chunk_dims[i]) {
+ H5_FAILED();
+ HDprintf(" chunk dimensionality retrieved from DCPL didn't match originally specified "
+ "dimensionality\n");
+ goto error;
+ }
+ }
+
+ for (i = 0, chunk_size = 1; i < DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; i++)
+ chunk_size *= chunk_dims[i];
+ chunk_size *= DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_DTYPESIZE;
+
+ for (i = 0, data_size = 1; i < DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_DTYPESIZE;
+
+ if (NULL == (write_buf = HDmalloc(data_size)))
+ TEST_ERROR;
+
+ /*
+ * Ensure that each underlying chunk contains the values
+ *
+ * chunk_index .. (chunk_nelemts - 1) + chunk_index.
+ *
+ * That is to say, for a chunk size of 10 x 10, chunk 0
+ * contains the values
+ *
+ * 0 .. 99
+ *
+ * while the next chunk contains the values
+ *
+ * 1 .. 100
+ *
+ * and so on.
+ */
+ for (i = 0; i < data_size / DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_DTYPESIZE; i++) {
+ size_t j;
+ size_t base;
+ size_t tot_adjust;
+
+ /*
+ * Calculate a starting base value by taking the index value mod
+ * the size of a chunk in each dimension.
+ */
+ for (j = 0, base = i; j < DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; j++)
+ if (chunk_dims[j] > 1 && base >= chunk_dims[j])
+ base %= chunk_dims[j];
+
+ /*
+ * Calculate the adjustment in each dimension.
+ */
+ for (j = 0, tot_adjust = 0; j < DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; j++) {
+ if (j == (DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK - 1))
+ tot_adjust += (i % dims[j]) / chunk_dims[j];
+ else {
+ size_t k;
+ size_t n_faster_elemts;
+
+ /*
+ * Calculate the number of elements in faster dimensions.
+ */
+ for (k = j + 1, n_faster_elemts = 1;
+ k < DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; k++)
+ n_faster_elemts *= dims[k];
+
+ tot_adjust += (((i / n_faster_elemts) / chunk_dims[j]) * (dims[j + 1] / chunk_dims[j + 1])) +
+ (((i / n_faster_elemts) % chunk_dims[j]) * chunk_dims[j + 1]);
+ }
+ }
+
+ ((int *)write_buf)[i] = (int)(base + tot_adjust);
+ }
+
+ /*
+ * Write every chunk in the dataset.
+ */
+ if (H5Dwrite(dset_id, DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n",
+ DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ if ((dset_id =
+ H5Dopen2(group_id, DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n",
+ DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ goto error;
+ }
+
+ /*
+ * Allocate single chunk-sized read buffer.
+ */
+ if (NULL == (read_buf = HDmalloc(chunk_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ goto error;
+ }
+
+ /*
+ * Create 1-dimensional memory dataspace for read buffer.
+ */
+ {
+ hsize_t mdims[] = {chunk_size / DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_DTYPESIZE};
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create memory dataspace\n");
+ goto error;
+ }
+ }
+
+ for (i = 0; i < DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; i++) {
+ count[i] = chunk_dims[i];
+ }
+
+ /*
+ * Read every chunk in the dataset, checking the data for each one.
+ */
+ HDprintf("\n");
+ for (i = 0; i < data_size / chunk_size; i++) {
+ size_t j;
+
+ HDprintf("\r Reading chunk %zu", i);
+
+ for (j = 0; j < DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; j++) {
+ if (dims[j] == chunk_dims[j])
+ start[j] = 0;
+ else if (j == (DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK - 1))
+ /* Fastest changing dimension */
+ start[j] = (i * chunk_dims[j]) % dims[j];
+ else
+ start[j] = ((i * chunk_dims[j + 1]) / dims[j + 1]) * (chunk_dims[j]);
+ }
+
+ /*
+ * Adjust file dataspace selection for next chunk.
+ */
+ if (H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, NULL, count, NULL) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set hyperslab selection\n");
+ goto error;
+ }
+
+ memset(read_buf, 0, chunk_size);
+ if (H5Dread(dset_id, DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_DTYPE, mspace_id, fspace_id,
+ H5P_DEFAULT, read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n",
+ DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (j = 0; j < (hsize_t)chunk_size / DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_DTYPESIZE;
+ j++)
+ if (((int *)read_buf)[j] != (int)(j + i)) {
+ H5_FAILED();
+ HDprintf(" data verification failed for chunk %lld\n", (long long)i);
+ goto error;
+ }
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (H5Pclose(dcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(mspace_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (write_buf)
+ HDfree(write_buf);
+ if (read_buf)
+ HDfree(read_buf);
+ H5Pclose(dcpl_id);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a dataset composed of multiple chunks
+ * can be written and read correctly several times in a row.
+ * When reading back the chunks of the dataset, the file
+ * dataspace and memory dataspace used are the same shape.
+ */
+static int
+test_overwrite_multi_chunk_dataset_same_shape_read(void)
+{
+ hsize_t dims[DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK] = {100, 100};
+ hsize_t chunk_dims[DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK] = {10, 10};
+ hsize_t retrieved_chunk_dims[DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK];
+ hsize_t start[DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK];
+ hsize_t count[DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK];
+ size_t i, data_size, chunk_size;
+ size_t niter;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dcpl_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ void *write_buf = NULL;
+ int read_buf[10][10];
+
+ TESTING("several overwrites to dataset with multiple chunks using same shaped dataspaces");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GET_PLIST)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, or get property list aren't supported "
+ "with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK,
+ dims, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR;
+
+ if (H5Pset_chunk(dcpl_id, DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK,
+ chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set chunking on DCPL\n");
+ goto error;
+ }
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_NAME,
+ DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_DTYPE, fspace_id,
+ H5P_DEFAULT, dcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n",
+ DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_NAME);
+ goto error;
+ }
+
+ /*
+ * See if a copy of the DCPL reports the correct chunking.
+ */
+ if (H5Pclose(dcpl_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close DCPL\n");
+ goto error;
+ }
+
+ if ((dcpl_id = H5Dget_create_plist(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve copy of DCPL\n");
+ goto error;
+ }
+
+ memset(retrieved_chunk_dims, 0, sizeof(retrieved_chunk_dims));
+ if (H5Pget_chunk(dcpl_id, DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK,
+ retrieved_chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve chunking info\n");
+ goto error;
+ }
+
+ for (i = 0; i < DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; i++) {
+ if (chunk_dims[i] != retrieved_chunk_dims[i]) {
+ H5_FAILED();
+ HDprintf(" chunk dimensionality retrieved from DCPL didn't match originally specified "
+ "dimensionality\n");
+ goto error;
+ }
+ }
+
+ for (i = 0, chunk_size = 1; i < DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; i++)
+ chunk_size *= chunk_dims[i];
+ chunk_size *= DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_DTYPESIZE;
+
+ for (i = 0, data_size = 1; i < DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_DTYPESIZE;
+
+ if (NULL == (write_buf = HDmalloc(data_size)))
+ TEST_ERROR;
+
+ /*
+ * Create 2-dimensional memory dataspace for read buffer.
+ */
+ {
+ hsize_t mdims[] = {chunk_dims[0], chunk_dims[1]};
+
+ if ((mspace_id = H5Screate_simple(2, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create memory dataspace\n");
+ goto error;
+ }
+ }
+
+ for (i = 0; i < DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; i++) {
+ count[i] = chunk_dims[i];
+ }
+
+ HDprintf("\n");
+ for (niter = 0; niter < DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_NITERS; niter++) {
+ memset(write_buf, 0, data_size);
+
+ /*
+ * Ensure that each underlying chunk contains the values
+ *
+ * chunk_index .. (chunk_nelemts - 1) + chunk_index.
+ *
+ * That is to say, for a chunk size of 10 x 10, chunk 0
+ * contains the values
+ *
+ * 0 .. 99
+ *
+ * while the next chunk contains the values
+ *
+ * 1 .. 100
+ *
+ * and so on. On each iteration, we add 1 to the previous
+ * values.
+ */
+ for (i = 0; i < data_size / DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_DTYPESIZE; i++) {
+ size_t j;
+ size_t base;
+ size_t tot_adjust;
+
+ /*
+ * Calculate a starting base value by taking the index value mod
+ * the size of a chunk in each dimension.
+ */
+ for (j = 0, base = i; j < DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; j++)
+ if (chunk_dims[j] > 1 && base >= chunk_dims[j])
+ base %= chunk_dims[j];
+
+ /*
+ * Calculate the adjustment in each dimension.
+ */
+ for (j = 0, tot_adjust = 0;
+ j < DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; j++) {
+ if (j == (DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK - 1))
+ tot_adjust += (i % dims[j]) / chunk_dims[j];
+ else {
+ size_t k;
+ size_t n_faster_elemts;
+
+ /*
+ * Calculate the number of elements in faster dimensions.
+ */
+ for (k = j + 1, n_faster_elemts = 1;
+ k < DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; k++)
+ n_faster_elemts *= dims[k];
+
+ tot_adjust +=
+ (((i / n_faster_elemts) / chunk_dims[j]) * (dims[j + 1] / chunk_dims[j + 1])) +
+ (((i / n_faster_elemts) % chunk_dims[j]) * chunk_dims[j + 1]);
+ }
+ }
+
+ ((int *)write_buf)[i] = (int)(base + tot_adjust + niter);
+ }
+
+ /*
+ * Write every chunk in the dataset.
+ */
+ if (H5Dwrite(dset_id, DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n",
+ DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n",
+ DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ goto error;
+ }
+
+ /*
+ * Read every chunk in the dataset, checking the data for each one.
+ */
+ for (i = 0; i < data_size / chunk_size; i++) {
+ size_t j, k;
+
+ HDprintf("\r Reading chunk %zu", i);
+
+ for (j = 0; j < DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; j++) {
+ if (dims[j] == chunk_dims[j])
+ start[j] = 0;
+ else if (j == (DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK - 1))
+ /* Fastest changing dimension */
+ start[j] = (i * chunk_dims[j]) % dims[j];
+ else
+ start[j] = ((i * chunk_dims[j + 1]) / dims[j + 1]) * (chunk_dims[j]);
+ }
+
+ /*
+ * Adjust file dataspace selection for next chunk.
+ */
+ if (H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, NULL, count, NULL) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set hyperslab selection\n");
+ goto error;
+ }
+
+ for (j = 0; j < chunk_dims[0]; j++)
+ for (k = 0; k < chunk_dims[1]; k++)
+ read_buf[j][k] = 0;
+
+ if (H5Dread(dset_id, DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_DTYPE, mspace_id,
+ fspace_id, H5P_DEFAULT, read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n",
+ DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (j = 0; j < chunk_dims[0]; j++) {
+ for (k = 0; k < chunk_dims[1]; k++) {
+ if (read_buf[j][k] != (int)((j * chunk_dims[0]) + k + i + niter)) {
+ H5_FAILED();
+ HDprintf(" data verification failed for chunk %lld\n", (long long)i);
+ goto error;
+ }
+ }
+ }
+ }
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+
+ if (H5Pclose(dcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(mspace_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (write_buf)
+ HDfree(write_buf);
+ H5Pclose(dcpl_id);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a dataset composed of multiple chunks
+ * can be written and read correctly several times in a row.
+ * When reading back the chunks of the dataset, the file
+ * dataspace and memory dataspace used are differently shaped.
+ */
+static int
+test_overwrite_multi_chunk_dataset_diff_shape_read(void)
+{
+ hsize_t dims[DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK] = {100, 100};
+ hsize_t chunk_dims[DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK] = {10, 10};
+ hsize_t retrieved_chunk_dims[DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK];
+ hsize_t start[DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK];
+ hsize_t count[DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK];
+ size_t i, data_size, chunk_size;
+ size_t niter;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dcpl_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ void *write_buf = NULL;
+ void *read_buf = NULL;
+
+ TESTING("several overwrites to dataset with multiple chunks using differently shaped dataspaces");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GET_PLIST)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, or get property list aren't supported "
+ "with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK,
+ dims, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR;
+
+ if (H5Pset_chunk(dcpl_id, DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK,
+ chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set chunking on DCPL\n");
+ goto error;
+ }
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_NAME,
+ DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_DTYPE, fspace_id,
+ H5P_DEFAULT, dcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n",
+ DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_NAME);
+ goto error;
+ }
+
+ /*
+ * See if a copy of the DCPL reports the correct chunking.
+ */
+ if (H5Pclose(dcpl_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close DCPL\n");
+ goto error;
+ }
+
+ if ((dcpl_id = H5Dget_create_plist(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve copy of DCPL\n");
+ goto error;
+ }
+
+ memset(retrieved_chunk_dims, 0, sizeof(retrieved_chunk_dims));
+ if (H5Pget_chunk(dcpl_id, DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK,
+ retrieved_chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve chunking info\n");
+ goto error;
+ }
+
+ for (i = 0; i < DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; i++) {
+ if (chunk_dims[i] != retrieved_chunk_dims[i]) {
+ H5_FAILED();
+ HDprintf(" chunk dimensionality retrieved from DCPL didn't match originally specified "
+ "dimensionality\n");
+ goto error;
+ }
+ }
+
+ for (i = 0, chunk_size = 1; i < DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; i++)
+ chunk_size *= chunk_dims[i];
+ chunk_size *= DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_DTYPESIZE;
+
+ for (i = 0, data_size = 1; i < DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_DTYPESIZE;
+
+ if (NULL == (write_buf = HDmalloc(data_size)))
+ TEST_ERROR;
+
+ /*
+ * Allocate single chunk-sized read buffer.
+ */
+ if (NULL == (read_buf = HDmalloc(chunk_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ goto error;
+ }
+
+ /*
+ * Create 1-dimensional memory dataspace for read buffer.
+ */
+ {
+ hsize_t mdims[] = {chunk_size / DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_DTYPESIZE};
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create memory dataspace\n");
+ goto error;
+ }
+ }
+
+ for (i = 0; i < DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; i++) {
+ count[i] = chunk_dims[i];
+ }
+
+ HDprintf("\n");
+ for (niter = 0; niter < DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_NITERS; niter++) {
+ memset(write_buf, 0, data_size);
+
+ /*
+ * Ensure that each underlying chunk contains the values
+ *
+ * chunk_index .. (chunk_nelemts - 1) + chunk_index.
+ *
+ * That is to say, for a chunk size of 10 x 10, chunk 0
+ * contains the values
+ *
+ * 0 .. 99
+ *
+ * while the next chunk contains the values
+ *
+ * 1 .. 100
+ *
+ * and so on. On each iteration, we add 1 to the previous
+ * values.
+ */
+ for (i = 0; i < data_size / DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_DTYPESIZE; i++) {
+ size_t j;
+ size_t base;
+ size_t tot_adjust;
+
+ /*
+ * Calculate a starting base value by taking the index value mod
+ * the size of a chunk in each dimension.
+ */
+ for (j = 0, base = i; j < DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; j++)
+ if (chunk_dims[j] > 1 && base >= chunk_dims[j])
+ base %= chunk_dims[j];
+
+ /*
+ * Calculate the adjustment in each dimension.
+ */
+ for (j = 0, tot_adjust = 0;
+ j < DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; j++) {
+ if (j == (DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK - 1))
+ tot_adjust += (i % dims[j]) / chunk_dims[j];
+ else {
+ size_t k;
+ size_t n_faster_elemts;
+
+ /*
+ * Calculate the number of elements in faster dimensions.
+ */
+ for (k = j + 1, n_faster_elemts = 1;
+ k < DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; k++)
+ n_faster_elemts *= dims[k];
+
+ tot_adjust +=
+ (((i / n_faster_elemts) / chunk_dims[j]) * (dims[j + 1] / chunk_dims[j + 1])) +
+ (((i / n_faster_elemts) % chunk_dims[j]) * chunk_dims[j + 1]);
+ }
+ }
+
+ ((int *)write_buf)[i] = (int)(base + tot_adjust + niter);
+ }
+
+ /*
+ * Write every chunk in the dataset.
+ */
+ if (H5Dwrite(dset_id, DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n",
+ DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n",
+ DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ goto error;
+ }
+
+ /*
+ * Read every chunk in the dataset, checking the data for each one.
+ */
+ for (i = 0; i < data_size / chunk_size; i++) {
+ size_t j;
+
+ HDprintf("\r Reading chunk %zu", i);
+
+ for (j = 0; j < DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; j++) {
+ if (dims[j] == chunk_dims[j])
+ start[j] = 0;
+ else if (j == (DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK - 1))
+ /* Fastest changing dimension */
+ start[j] = (i * chunk_dims[j]) % dims[j];
+ else
+ start[j] = ((i * chunk_dims[j + 1]) / dims[j + 1]) * (chunk_dims[j]);
+ }
+
+ /*
+ * Adjust file dataspace selection for next chunk.
+ */
+ if (H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, NULL, count, NULL) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set hyperslab selection\n");
+ goto error;
+ }
+
+ memset(read_buf, 0, chunk_size);
+ if (H5Dread(dset_id, DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_DTYPE, mspace_id,
+ fspace_id, H5P_DEFAULT, read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n",
+ DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (j = 0;
+ j < (hsize_t)chunk_size / DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_DTYPESIZE;
+ j++)
+ if (((int *)read_buf)[j] != (int)(j + i + niter)) {
+ H5_FAILED();
+ HDprintf(" data verification failed for chunk %lld\n", (long long)i);
+ goto error;
+ }
+ }
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (H5Pclose(dcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(mspace_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (write_buf)
+ HDfree(write_buf);
+ if (read_buf)
+ HDfree(read_buf);
+ H5Pclose(dcpl_id);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to ensure that a partial chunk can be written and
+ * then read correctly when the selection used in a chunked
+ * dataset's file dataspace is H5S_ALL.
+ */
+#define FIXED_DIMSIZE 25
+#define FIXED_CHUNK_DIMSIZE 10
+static int
+test_read_partial_chunk_all_selection(void)
+{
+ DATASET_PARTIAL_CHUNK_READ_ALL_SEL_TEST_DSET_CTYPE write_buf[FIXED_DIMSIZE][FIXED_DIMSIZE];
+ DATASET_PARTIAL_CHUNK_READ_ALL_SEL_TEST_DSET_CTYPE read_buf[FIXED_DIMSIZE][FIXED_DIMSIZE];
+ hsize_t dims[DATASET_PARTIAL_CHUNK_READ_ALL_SEL_TEST_DSET_SPACE_RANK] = {FIXED_DIMSIZE, FIXED_DIMSIZE};
+ hsize_t chunk_dims[DATASET_PARTIAL_CHUNK_READ_ALL_SEL_TEST_DSET_SPACE_RANK] = {FIXED_CHUNK_DIMSIZE,
+ FIXED_CHUNK_DIMSIZE};
+ hsize_t retrieved_chunk_dims[DATASET_PARTIAL_CHUNK_READ_ALL_SEL_TEST_DSET_SPACE_RANK];
+ size_t i, j;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dcpl_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+
+ TESTING("reading a partial chunk using H5S_ALL for file dataspace");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GET_PLIST)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, or get property list aren't supported "
+ "with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_PARTIAL_CHUNK_READ_ALL_SEL_TEST_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_PARTIAL_CHUNK_READ_ALL_SEL_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_PARTIAL_CHUNK_READ_ALL_SEL_TEST_DSET_SPACE_RANK, dims, NULL)) <
+ 0)
+ TEST_ERROR;
+
+ if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR;
+
+ if (H5Pset_chunk(dcpl_id, DATASET_PARTIAL_CHUNK_READ_ALL_SEL_TEST_DSET_SPACE_RANK, chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set chunking on DCPL\n");
+ goto error;
+ }
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_PARTIAL_CHUNK_READ_ALL_SEL_TEST_DSET_NAME,
+ DATASET_PARTIAL_CHUNK_READ_ALL_SEL_TEST_DSET_DTYPE, fspace_id, H5P_DEFAULT,
+ dcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_PARTIAL_CHUNK_READ_ALL_SEL_TEST_DSET_NAME);
+ goto error;
+ }
+
+ /*
+ * See if a copy of the DCPL reports the correct chunking.
+ */
+ if (H5Pclose(dcpl_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close DCPL\n");
+ goto error;
+ }
+
+ if ((dcpl_id = H5Dget_create_plist(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve copy of DCPL\n");
+ goto error;
+ }
+
+ memset(retrieved_chunk_dims, 0, sizeof(retrieved_chunk_dims));
+ if (H5Pget_chunk(dcpl_id, DATASET_PARTIAL_CHUNK_READ_ALL_SEL_TEST_DSET_SPACE_RANK, retrieved_chunk_dims) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve chunking info\n");
+ goto error;
+ }
+
+ for (i = 0; i < DATASET_PARTIAL_CHUNK_READ_ALL_SEL_TEST_DSET_SPACE_RANK; i++) {
+ if (chunk_dims[i] != retrieved_chunk_dims[i]) {
+ H5_FAILED();
+ HDprintf(" chunk dimensionality retrieved from DCPL didn't match originally specified "
+ "dimensionality\n");
+ goto error;
+ }
+ }
+
+ for (i = 0; i < FIXED_DIMSIZE; i++)
+ for (j = 0; j < FIXED_DIMSIZE; j++)
+ write_buf[i][j] = (DATASET_PARTIAL_CHUNK_READ_ALL_SEL_TEST_DSET_CTYPE)((i * FIXED_DIMSIZE) + j);
+
+ for (i = 0; i < FIXED_DIMSIZE; i++)
+ for (j = 0; j < FIXED_DIMSIZE; j++)
+ read_buf[i][j] = -1;
+
+ if (H5Dwrite(dset_id, DATASET_PARTIAL_CHUNK_READ_ALL_SEL_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to write to dataset\n");
+ goto error;
+ }
+
+ /*
+ * Close and re-open the dataset to ensure that the data is written.
+ */
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if ((dset_id = H5Dopen2(group_id, DATASET_PARTIAL_CHUNK_READ_ALL_SEL_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to re-open dataset\n");
+ goto error;
+ }
+
+ if (H5Dread(dset_id, DATASET_PARTIAL_CHUNK_READ_ALL_SEL_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to read from dataset\n");
+ goto error;
+ }
+
+ for (i = 0; i < FIXED_DIMSIZE; i++)
+ for (j = 0; j < FIXED_DIMSIZE; j++)
+ if (read_buf[i][j] != (int)((i * FIXED_DIMSIZE) + j)) {
+ H5_FAILED();
+ HDprintf(
+ " data verification failed for read buffer element %lld: expected %lld but was %lld\n",
+ (long long)((i * FIXED_DIMSIZE) + j), (long long)((i * FIXED_DIMSIZE) + j),
+ (long long)read_buf[i][j]);
+ goto error;
+ }
+
+ if (H5Pclose(dcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(dcpl_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+#undef FIXED_DIMSIZE
+#undef FIXED_CHUNK_DIMSIZE
+
+/*
+ * A test to ensure that a partial chunk can be written and
+ * then read correctly when the selection used in a chunked
+ * dataset's file dataspace is a hyperslab selection.
+ */
+#define FIXED_DIMSIZE 25
+#define FIXED_CHUNK_DIMSIZE 10
+#define FIXED_NCHUNKS 9 /* For convenience - make sure to adjust this as necessary */
+static int
+test_read_partial_chunk_hyperslab_selection(void)
+{
+ DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_DSET_CTYPE write_buf[FIXED_CHUNK_DIMSIZE][FIXED_CHUNK_DIMSIZE];
+ DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_DSET_CTYPE read_buf[FIXED_CHUNK_DIMSIZE][FIXED_CHUNK_DIMSIZE];
+ hsize_t dims[DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_DSET_SPACE_RANK] = {FIXED_DIMSIZE, FIXED_DIMSIZE};
+ hsize_t chunk_dims[DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_DSET_SPACE_RANK] = {FIXED_CHUNK_DIMSIZE,
+ FIXED_CHUNK_DIMSIZE};
+ hsize_t retrieved_chunk_dims[DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_DSET_SPACE_RANK];
+ size_t i, j, k;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dcpl_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+
+ TESTING("reading a partial chunk using a hyperslab selection in file dataspace");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GET_PLIST)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, or get property list aren't supported "
+ "with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id =
+ H5Screate_simple(DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_DSET_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR;
+
+ if (H5Pset_chunk(dcpl_id, DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_DSET_SPACE_RANK, chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set chunking on DCPL\n");
+ goto error;
+ }
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_DSET_NAME,
+ DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_DSET_DTYPE, fspace_id, H5P_DEFAULT,
+ dcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_DSET_NAME);
+ goto error;
+ }
+
+ /*
+ * See if a copy of the DCPL reports the correct chunking.
+ */
+ if (H5Pclose(dcpl_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close DCPL\n");
+ goto error;
+ }
+
+ if ((dcpl_id = H5Dget_create_plist(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve copy of DCPL\n");
+ goto error;
+ }
+
+ memset(retrieved_chunk_dims, 0, sizeof(retrieved_chunk_dims));
+ if (H5Pget_chunk(dcpl_id, DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_DSET_SPACE_RANK,
+ retrieved_chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve chunking info\n");
+ goto error;
+ }
+
+ for (i = 0; i < DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_DSET_SPACE_RANK; i++) {
+ if (chunk_dims[i] != retrieved_chunk_dims[i]) {
+ H5_FAILED();
+ HDprintf(" chunk dimensionality retrieved from DCPL didn't match originally specified "
+ "dimensionality\n");
+ goto error;
+ }
+ }
+
+ for (i = 0; i < FIXED_CHUNK_DIMSIZE; i++)
+ for (j = 0; j < FIXED_CHUNK_DIMSIZE; j++)
+ write_buf[i][j] =
+ (DATASET_PARTIAL_CHUNK_READ_ALL_SEL_TEST_DSET_CTYPE)((i * FIXED_CHUNK_DIMSIZE) + j);
+
+ for (i = 0; i < FIXED_CHUNK_DIMSIZE; i++)
+ for (j = 0; j < FIXED_CHUNK_DIMSIZE; j++)
+ read_buf[i][j] = -1;
+
+ /*
+ * Create chunk-sized memory dataspace for read buffer.
+ */
+ {
+ hsize_t mdims[DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_DSET_SPACE_RANK] = {FIXED_CHUNK_DIMSIZE,
+ FIXED_CHUNK_DIMSIZE};
+
+ if ((mspace_id = H5Screate_simple(DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_DSET_SPACE_RANK, mdims,
+ NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create memory dataspace\n");
+ goto error;
+ }
+ }
+
+ /*
+ * Write and read each chunk in the dataset.
+ */
+ for (i = 0; i < FIXED_NCHUNKS; i++) {
+ hsize_t start[DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_DSET_SPACE_RANK];
+ hsize_t count[DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_DSET_SPACE_RANK];
+ hbool_t on_partial_edge_chunk = FALSE;
+ size_t n_chunks_per_dim = (dims[1] / chunk_dims[1]) + (((dims[1] % chunk_dims[1]) > 0) ? 1 : 0);
+
+ on_partial_edge_chunk =
+ (i > 0) && (((i + 1) % n_chunks_per_dim == 0) || (i / n_chunks_per_dim == n_chunks_per_dim - 1));
+
+ for (j = 0; j < DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_DSET_SPACE_RANK; j++) {
+ if (j == 0)
+ start[j] = (i / n_chunks_per_dim) * chunk_dims[j];
+ else
+ start[j] = (i % n_chunks_per_dim) * chunk_dims[j];
+
+ if (on_partial_edge_chunk) {
+ /*
+ * Partial edge chunk
+ */
+ if (j == 0) {
+ if (i / n_chunks_per_dim == n_chunks_per_dim - 1)
+ /* This partial edge chunk spans the remainder of the first dimension */
+ count[j] = dims[j] - ((i / n_chunks_per_dim) * chunk_dims[j]);
+ else
+ /* This partial edge chunk spans the whole first dimension */
+ count[j] = chunk_dims[j];
+ }
+ else {
+ if (i % n_chunks_per_dim == n_chunks_per_dim - 1)
+ /* This partial edge chunk spans the remainder of the second dimension */
+ count[j] = dims[j] - ((i % n_chunks_per_dim) * chunk_dims[j]);
+ else
+ /* This partial edge chunk spans the whole second dimension */
+ count[j] = chunk_dims[j];
+ }
+ }
+ else
+ count[j] = chunk_dims[j];
+ }
+
+ if (on_partial_edge_chunk) {
+ hsize_t m_start[DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_DSET_SPACE_RANK] = {0, 0};
+
+ if (H5Sselect_hyperslab(mspace_id, H5S_SELECT_SET, m_start, NULL, count, NULL) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to select hyperslab in memory dataspace\n");
+ goto error;
+ }
+ }
+ else {
+ if (H5Sselect_all(mspace_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to select entire memory dataspace\n");
+ goto error;
+ }
+ }
+
+ if (H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, NULL, count, NULL) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to select hyperslab\n");
+ goto error;
+ }
+
+ HDprintf("\r Writing chunk %zu", i);
+
+ if (H5Dwrite(dset_id, DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_DSET_DTYPE, mspace_id, fspace_id,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to write to dataset\n");
+ goto error;
+ }
+
+ /*
+ * Close and re-open the dataset to ensure the data gets written.
+ */
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if ((dset_id = H5Dopen2(group_id, DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_DSET_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" failed to re-open dataset\n");
+ goto error;
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve dataspace from dataset\n");
+ goto error;
+ }
+
+ if (H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, NULL, count, NULL) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to select hyperslab\n");
+ goto error;
+ }
+
+ HDprintf("\r Reading chunk %zu", i);
+
+ if (H5Dread(dset_id, DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_DSET_DTYPE, mspace_id, fspace_id,
+ H5P_DEFAULT, read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to read from dataset\n");
+ goto error;
+ }
+
+ for (j = 0; j < FIXED_CHUNK_DIMSIZE; j++)
+ for (k = 0; k < FIXED_CHUNK_DIMSIZE; k++)
+ if (read_buf[j][k] != (int)((j * FIXED_CHUNK_DIMSIZE) + k)) {
+ H5_FAILED();
+ HDprintf(" data verification failed for read buffer element %lld: expected %lld but "
+ "was %lld\n",
+ (long long)((j * FIXED_CHUNK_DIMSIZE) + k),
+ (long long)((j * FIXED_CHUNK_DIMSIZE) + k), (long long)read_buf[j][k]);
+ goto error;
+ }
+ }
+
+ if (H5Pclose(dcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(mspace_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(dcpl_id);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+#undef FIXED_DIMSIZE
+#undef FIXED_CHUNK_DIMSIZE
+#undef FIXED_NCHUNKS
+
+/*
+ * A test to ensure that a partial chunk can be written and
+ * then read correctly when the selection used in a chunked
+ * dataset's file dataspace is a point selection.
+ */
+#define FIXED_DIMSIZE 25
+#define FIXED_CHUNK_DIMSIZE 10
+static int
+test_read_partial_chunk_point_selection(void)
+{
+ TESTING("reading a partial chunk using a point selection in file dataspace");
+ SKIPPED();
+
+ return 1;
+}
+#undef FIXED_DIMSIZE
+#undef FIXED_CHUNK_DIMSIZE
+
+/*
+ * A test to verify that H5Dvlen_get_buf_size returns
+ * correct size
+ */
+static int
+test_get_vlen_buf_size(void)
+{
+ hvl_t wdata[DATASET_GET_VLEN_BUF_SIZE_DSET_SPACE_DIM]; /* Information to write */
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t dataset = H5I_INVALID_HID;
+ hid_t dspace_id = H5I_INVALID_HID;
+ hid_t dtype_id = H5I_INVALID_HID;
+ hbool_t freed_wdata = FALSE;
+ hsize_t dims1[] = {DATASET_GET_VLEN_BUF_SIZE_DSET_SPACE_DIM};
+ hsize_t size; /* Number of bytes which will be used */
+ unsigned i, j;
+
+ TESTING("H5Dvlen_get_buf_size");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, or more aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ /* Allocate and initialize VL data to write */
+ for (i = 0; i < DATASET_GET_VLEN_BUF_SIZE_DSET_SPACE_DIM; i++) {
+ wdata[i].p = HDmalloc((i + 1) * sizeof(unsigned int));
+ wdata[i].len = i + 1;
+ for (j = 0; j < (i + 1); j++)
+ ((unsigned int *)wdata[i].p)[j] = i * 10 + j;
+ } /* end for */
+
+ /* Open the file */
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_GET_VLEN_BUF_SIZE_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", DATASET_GET_VLEN_BUF_SIZE_GROUP_NAME);
+ goto error;
+ }
+
+ /* Create dataspace for dataset */
+ if ((dspace_id = H5Screate_simple(DATASET_GET_VLEN_BUF_SIZE_DSET_SPACE_RANK, dims1, NULL)) < 0)
+ TEST_ERROR;
+
+ /* Create a datatype to refer to */
+ if ((dtype_id = H5Tvlen_create(H5T_NATIVE_UINT)) < 0)
+ TEST_ERROR;
+
+ /* Create a dataset */
+ if ((dataset = H5Dcreate2(group_id, DATASET_GET_VLEN_BUF_SIZE_DSET_NAME, dtype_id, dspace_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+
+ /* Write dataset to disk */
+ if (H5Dwrite(dataset, dtype_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata) < 0)
+ TEST_ERROR;
+
+ /* Make certain the correct amount of memory will be used */
+ if (H5Dvlen_get_buf_size(dataset, dtype_id, dspace_id, &size) < 0)
+ TEST_ERROR;
+
+ /* 10 elements allocated = 1 + 2 + 3 + 4 elements for each array position */
+ if (size !=
+ ((DATASET_GET_VLEN_BUF_SIZE_DSET_SPACE_DIM * (DATASET_GET_VLEN_BUF_SIZE_DSET_SPACE_DIM + 1)) / 2) *
+ sizeof(unsigned int)) {
+ H5_FAILED();
+ HDprintf(
+ " H5Dvlen_get_buf_size returned wrong size (%lu), compared to the correct size (%lu)\n", size,
+ ((DATASET_GET_VLEN_BUF_SIZE_DSET_SPACE_DIM * (DATASET_GET_VLEN_BUF_SIZE_DSET_SPACE_DIM + 1)) /
+ 2) *
+ sizeof(unsigned int));
+ goto error;
+ }
+
+ if (H5Treclaim(dtype_id, dspace_id, H5P_DEFAULT, wdata) < 0)
+ TEST_ERROR;
+ freed_wdata = TRUE;
+
+ if (H5Dclose(dataset) < 0)
+ TEST_ERROR;
+
+ if (H5Tclose(dtype_id) < 0)
+ TEST_ERROR;
+
+ if (H5Sclose(dspace_id) < 0)
+ TEST_ERROR;
+
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (!freed_wdata)
+ H5Treclaim(dtype_id, dspace_id, H5P_DEFAULT, wdata);
+ H5Sclose(dspace_id);
+ H5Tclose(dtype_id);
+ H5Dclose(dataset);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+} /* end test_get_vlen_buf_size() */
+
+int
+H5_api_dataset_test(void)
+{
+ size_t i;
+ int nerrors;
+
+ HDprintf("**********************************************\n");
+ HDprintf("* *\n");
+ HDprintf("* API Dataset Tests *\n");
+ HDprintf("* *\n");
+ HDprintf("**********************************************\n\n");
+
+ for (i = 0, nerrors = 0; i < ARRAY_LENGTH(dataset_tests); i++) {
+ nerrors += (*dataset_tests[i])() ? 1 : 0;
+ }
+
+ HDprintf("\n");
+
+ return nerrors;
+}
diff --git a/test/API/H5_api_dataset_test.h b/test/API/H5_api_dataset_test.h
new file mode 100644
index 0000000..5a50a06
--- /dev/null
+++ b/test/API/H5_api_dataset_test.h
@@ -0,0 +1,331 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifndef H5_API_DATASET_TEST_H
+#define H5_API_DATASET_TEST_H
+
+#include "H5_api_test.h"
+
+int H5_api_dataset_test(void);
+
+/************************************************
+ * *
+ * API Dataset test defines *
+ * *
+ ************************************************/
+
+#define DATASET_CREATE_UNDER_ROOT_DSET_NAME "/dset_under_root"
+#define DATASET_CREATE_UNDER_ROOT_SPACE_RANK 2
+
+#define DATASET_CREATE_UNDER_EXISTING_SPACE_RANK 2
+#define DATASET_CREATE_UNDER_EXISTING_GROUP_NAME "dset_under_group_test"
+#define DATASET_CREATE_UNDER_EXISTING_DSET_NAME "nested_dset"
+
+#define DATASET_CREATE_INVALID_PARAMS_SPACE_RANK 2
+#define DATASET_CREATE_INVALID_PARAMS_GROUP_NAME "dset_create_invalid_params_test"
+#define DATASET_CREATE_INVALID_PARAMS_DSET_NAME "invalid_params_dset"
+
+#define DATASET_CREATE_ANONYMOUS_DATASET_NAME "anon_dset"
+#define DATASET_CREATE_ANONYMOUS_GROUP_NAME "anon_dset_creation_test"
+#define DATASET_CREATE_ANONYMOUS_SPACE_RANK 2
+
+#define DATASET_CREATE_ANONYMOUS_INVALID_PARAMS_DATASET_NAME "invalid_params_anon_dset"
+#define DATASET_CREATE_ANONYMOUS_INVALID_PARAMS_GROUP_NAME "anon_dset_creation_invalid_params_test"
+#define DATASET_CREATE_ANONYMOUS_INVALID_PARAMS_SPACE_RANK 2
+
+#define DATASET_CREATE_NULL_DATASPACE_TEST_SUBGROUP_NAME "dataset_with_null_space_test"
+#define DATASET_CREATE_NULL_DATASPACE_TEST_DSET_NAME "dataset_with_null_space"
+
+#define DATASET_CREATE_SCALAR_DATASPACE_TEST_SUBGROUP_NAME "dataset_with_scalar_space_test"
+#define DATASET_CREATE_SCALAR_DATASPACE_TEST_DSET_NAME "dataset_with_scalar_space"
+
+#define ZERO_DIM_DSET_TEST_GROUP_NAME "zero_dim_dset_test"
+#define ZERO_DIM_DSET_TEST_SPACE_RANK 1
+#define ZERO_DIM_DSET_TEST_DSET_NAME "zero_dim_dset"
+
+#define DATASET_MANY_CREATE_GROUP_NAME "group_for_many_datasets"
+#define DSET_NAME_BUF_SIZE 64u
+#define DATASET_NUMB 100u
+
+#define DATASET_SHAPE_TEST_DSET_BASE_NAME "dataset_shape_test"
+#define DATASET_SHAPE_TEST_SUBGROUP_NAME "dataset_shape_test"
+#define DATASET_SHAPE_TEST_NUM_ITERATIONS 5
+#define DATASET_SHAPE_TEST_MAX_DIMS 5
+
+#define DATASET_PREDEFINED_TYPE_TEST_SPACE_RANK 2
+#define DATASET_PREDEFINED_TYPE_TEST_BASE_NAME "predefined_type_dset"
+#define DATASET_PREDEFINED_TYPE_TEST_SUBGROUP_NAME "predefined_type_dataset_test"
+
+#define DATASET_STRING_TYPE_TEST_STRING_LENGTH 40
+#define DATASET_STRING_TYPE_TEST_SPACE_RANK 2
+#define DATASET_STRING_TYPE_TEST_DSET_NAME1 "fixed_length_string_dset"
+#define DATASET_STRING_TYPE_TEST_DSET_NAME2 "variable_length_string_dset"
+#define DATASET_STRING_TYPE_TEST_SUBGROUP_NAME "string_type_dataset_test"
+
+#define DATASET_COMPOUND_TYPE_TEST_SUBGROUP_NAME "compound_type_dataset_test"
+#define DATASET_COMPOUND_TYPE_TEST_DSET_NAME "compound_type_test"
+#define DATASET_COMPOUND_TYPE_TEST_MAX_SUBTYPES 5
+#define DATASET_COMPOUND_TYPE_TEST_MAX_PASSES 5
+#define DATASET_COMPOUND_TYPE_TEST_DSET_RANK 2
+
+#define DATASET_ENUM_TYPE_TEST_VAL_BASE_NAME "INDEX"
+#define DATASET_ENUM_TYPE_TEST_SUBGROUP_NAME "enum_type_dataset_test"
+#define DATASET_ENUM_TYPE_TEST_NUM_MEMBERS 16
+#define DATASET_ENUM_TYPE_TEST_SPACE_RANK 2
+#define DATASET_ENUM_TYPE_TEST_DSET_NAME1 "enum_native_dset"
+#define DATASET_ENUM_TYPE_TEST_DSET_NAME2 "enum_non_native_dset"
+
+#define DATASET_ARRAY_TYPE_TEST_SUBGROUP_NAME "array_type_dataset_test"
+#define DATASET_ARRAY_TYPE_TEST_DSET_NAME1 "array_type_test1"
+#define DATASET_ARRAY_TYPE_TEST_DSET_NAME2 "array_type_test2"
+#define DATASET_ARRAY_TYPE_TEST_DSET_NAME3 "array_type_test3"
+#define DATASET_ARRAY_TYPE_TEST_SPACE_RANK 2
+#define DATASET_ARRAY_TYPE_TEST_RANK1 2
+#define DATASET_ARRAY_TYPE_TEST_RANK2 2
+#define DATASET_ARRAY_TYPE_TEST_RANK3 2
+
+#define DATASET_CREATION_PROPERTIES_TEST_TRACK_TIMES_YES_DSET_NAME "track_times_true_test"
+#define DATASET_CREATION_PROPERTIES_TEST_TRACK_TIMES_NO_DSET_NAME "track_times_false_test"
+#define DATASET_CREATION_PROPERTIES_TEST_PHASE_CHANGE_DSET_NAME "attr_phase_change_test"
+#define DATASET_CREATION_PROPERTIES_TEST_ALLOC_TIMES_BASE_NAME "alloc_time_test"
+#define DATASET_CREATION_PROPERTIES_TEST_FILL_TIMES_BASE_NAME "fill_times_test"
+#define DATASET_CREATION_PROPERTIES_TEST_CRT_ORDER_BASE_NAME "creation_order_test"
+#define DATASET_CREATION_PROPERTIES_TEST_LAYOUTS_BASE_NAME "layout_test"
+#define DATASET_CREATION_PROPERTIES_TEST_FILTERS_DSET_NAME "filters_test"
+#define DATASET_CREATION_PROPERTIES_TEST_GROUP_NAME "creation_properties_test"
+#define DATASET_CREATION_PROPERTIES_TEST_CHUNK_DIM_RANK DATASET_CREATION_PROPERTIES_TEST_SHAPE_RANK
+#define DATASET_CREATION_PROPERTIES_TEST_MAX_COMPACT 12
+#define DATASET_CREATION_PROPERTIES_TEST_MIN_DENSE 8
+#define DATASET_CREATION_PROPERTIES_TEST_SHAPE_RANK 3
+
+#define DATASET_OPEN_INVALID_PARAMS_SPACE_RANK 2
+#define DATASET_OPEN_INVALID_PARAMS_GROUP_NAME "dataset_open_test"
+#define DATASET_OPEN_INVALID_PARAMS_DSET_NAME "open_test_dataset"
+
+#define DATASET_GET_SPACE_TYPE_TEST_SPACE_RANK 2
+#define DATASET_GET_SPACE_TYPE_TEST_GROUP_NAME "get_dset_space_type_test"
+#define DATASET_GET_SPACE_TYPE_TEST_DSET_NAME "get_space_type_test_dset"
+
+#define DATASET_GET_SPACE_TYPE_INVALID_PARAMS_TEST_SPACE_RANK 2
+#define DATASET_GET_SPACE_TYPE_INVALID_PARAMS_TEST_GROUP_NAME "get_dset_space_type_invalid_params_test"
+#define DATASET_GET_SPACE_TYPE_INVALID_PARAMS_TEST_DSET_NAME "get_space_type_test_invalid_params_dset"
+
+#define DATASET_PROPERTY_LIST_TEST_SUBGROUP_NAME "dataset_property_list_test_group"
+#define DATASET_PROPERTY_LIST_TEST_SPACE_RANK 2
+#define DATASET_PROPERTY_LIST_TEST_DSET_NAME1 "property_list_test_dataset1"
+#define DATASET_PROPERTY_LIST_TEST_DSET_NAME2 "property_list_test_dataset2"
+#define DATASET_PROPERTY_LIST_TEST_DSET_NAME3 "property_list_test_dataset3"
+#define DATASET_PROPERTY_LIST_TEST_DSET_NAME4 "property_list_test_dataset4"
+
+#define DATASET_SMALL_READ_TEST_ALL_DSET_SPACE_RANK 3
+#define DATASET_SMALL_READ_TEST_ALL_DSET_DTYPESIZE sizeof(int)
+#define DATASET_SMALL_READ_TEST_ALL_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_SMALL_READ_TEST_ALL_GROUP_NAME "dataset_small_read_all_test"
+#define DATASET_SMALL_READ_TEST_ALL_DSET_NAME "dataset_small_read_all_dset"
+
+#define DATASET_SMALL_READ_TEST_HYPERSLAB_DSET_SPACE_RANK 3
+#define DATASET_SMALL_READ_TEST_HYPERSLAB_DSET_DTYPESIZE sizeof(int)
+#define DATASET_SMALL_READ_TEST_HYPERSLAB_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_SMALL_READ_TEST_HYPERSLAB_GROUP_NAME "dataset_small_read_hyperslab_test"
+#define DATASET_SMALL_READ_TEST_HYPERSLAB_DSET_NAME "dataset_small_read_hyperslab_dset"
+
+#define DATASET_SMALL_READ_TEST_POINT_SELECTION_DSET_SPACE_RANK 3
+#define DATASET_SMALL_READ_TEST_POINT_SELECTION_DSET_DTYPESIZE sizeof(int)
+#define DATASET_SMALL_READ_TEST_POINT_SELECTION_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_SMALL_READ_TEST_POINT_SELECTION_NUM_POINTS 10
+#define DATASET_SMALL_READ_TEST_POINT_SELECTION_GROUP_NAME "dataset_small_read_point_selection_test"
+#define DATASET_SMALL_READ_TEST_POINT_SELECTION_DSET_NAME "dataset_small_read_point_selection_dset"
+
+#define DATASET_IO_POINT_GROUP_NAME "dataset_io_point_selection_test"
+#define DATASET_IO_POINT_DSET_NAME_NOCHUNK "dataset_io_point_selection_dset_nochunk"
+#define DATASET_IO_POINT_DSET_NAME_CHUNK "dataset_io_point_selection_dset_chunk"
+
+#ifndef NO_LARGE_TESTS
+#define DATASET_LARGE_READ_TEST_ALL_DSET_SPACE_RANK 3
+#define DATASET_LARGE_READ_TEST_ALL_DSET_DTYPESIZE sizeof(int)
+#define DATASET_LARGE_READ_TEST_ALL_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_LARGE_READ_TEST_ALL_GROUP_NAME "dataset_large_read_all_test"
+#define DATASET_LARGE_READ_TEST_ALL_DSET_NAME "dataset_large_read_all_dset"
+
+#define DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_SPACE_RANK 3
+#define DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_DTYPESIZE sizeof(int)
+#define DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_LARGE_READ_TEST_HYPERSLAB_GROUP_NAME "dataset_large_read_hyperslab_test"
+#define DATASET_LARGE_READ_TEST_HYPERSLAB_DSET_NAME "dataset_large_read_hyperslab_dset"
+
+#define DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_SPACE_RANK 1
+#define DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_DTYPESIZE sizeof(int)
+#define DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_LARGE_READ_TEST_POINT_SELECTION_GROUP_NAME "dataset_large_read_point_selection_test"
+#define DATASET_LARGE_READ_TEST_POINT_SELECTION_DSET_NAME "dataset_large_read_point_selection_dset"
+#endif
+
+#define DATASET_READ_INVALID_PARAMS_TEST_DSET_SPACE_RANK 3
+#define DATASET_READ_INVALID_PARAMS_TEST_DSET_DTYPESIZE sizeof(int)
+#define DATASET_READ_INVALID_PARAMS_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_READ_INVALID_PARAMS_TEST_GROUP_NAME "dataset_read_invalid_params_test"
+#define DATASET_READ_INVALID_PARAMS_TEST_DSET_NAME "dataset_read_invalid_params_dset"
+
+#define DATASET_SMALL_WRITE_TEST_ALL_DSET_SPACE_RANK 3
+#define DATASET_SMALL_WRITE_TEST_ALL_DSET_DTYPESIZE sizeof(int)
+#define DATASET_SMALL_WRITE_TEST_ALL_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_SMALL_WRITE_TEST_ALL_GROUP_NAME "dataset_small_write_all_test"
+#define DATASET_SMALL_WRITE_TEST_ALL_DSET_NAME "dataset_small_write_all_dset"
+
+#define DATASET_SMALL_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK 3
+#define DATASET_SMALL_WRITE_TEST_HYPERSLAB_DSET_DTYPESIZE sizeof(int)
+#define DATASET_SMALL_WRITE_TEST_HYPERSLAB_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_SMALL_WRITE_TEST_HYPERSLAB_GROUP_NAME "dataset_small_write_hyperslab_test"
+#define DATASET_SMALL_WRITE_TEST_HYPERSLAB_DSET_NAME "dataset_small_write_hyperslab_dset"
+
+#define DATASET_SMALL_WRITE_TEST_POINT_SELECTION_DSET_SPACE_RANK 3
+#define DATASET_SMALL_WRITE_TEST_POINT_SELECTION_DSET_DTYPESIZE sizeof(int)
+#define DATASET_SMALL_WRITE_TEST_POINT_SELECTION_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_SMALL_WRITE_TEST_POINT_SELECTION_NUM_POINTS 10
+#define DATASET_SMALL_WRITE_TEST_POINT_SELECTION_GROUP_NAME "dataset_small_write_point_selection_test"
+#define DATASET_SMALL_WRITE_TEST_POINT_SELECTION_DSET_NAME "dataset_small_write_point_selection_dset"
+
+#ifndef NO_LARGE_TESTS
+#define DATASET_LARGE_WRITE_TEST_ALL_DSET_SPACE_RANK 3
+#define DATASET_LARGE_WRITE_TEST_ALL_DSET_DTYPESIZE sizeof(int)
+#define DATASET_LARGE_WRITE_TEST_ALL_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_LARGE_WRITE_TEST_ALL_GROUP_NAME "dataset_large_write_all_test"
+#define DATASET_LARGE_WRITE_TEST_ALL_DSET_NAME "dataset_large_write_all_dset"
+
+#define DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_SPACE_RANK 3
+#define DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_DTYPESIZE sizeof(int)
+#define DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_LARGE_WRITE_TEST_HYPERSLAB_GROUP_NAME "dataset_large_write_hyperslab_test"
+#define DATASET_LARGE_WRITE_TEST_HYPERSLAB_DSET_NAME "dataset_large_write_hyperslab_dset"
+
+#define DATASET_LARGE_WRITE_TEST_POINT_SELECTION_DSET_SPACE_RANK 3
+#define DATASET_LARGE_WRITE_TEST_POINT_SELECTION_DSET_DTYPESIZE sizeof(int)
+#define DATASET_LARGE_WRITE_TEST_POINT_SELECTION_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_LARGE_WRITE_TEST_POINT_SELECTION_GROUP_NAME "dataset_large_write_point_selection_test"
+#define DATASET_LARGE_WRITE_TEST_POINT_SELECTION_DSET_NAME "dataset_large_write_point_selection_dset"
+#endif
+
+#define DATASET_DATA_VERIFY_WRITE_TEST_DSET_SPACE_RANK 3
+#define DATASET_DATA_VERIFY_WRITE_TEST_DSET_DTYPESIZE sizeof(int)
+#define DATASET_DATA_VERIFY_WRITE_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_DATA_VERIFY_WRITE_TEST_NUM_POINTS 10
+#define DATASET_DATA_VERIFY_WRITE_TEST_GROUP_NAME "dataset_data_write_verification_test"
+#define DATASET_DATA_VERIFY_WRITE_TEST_DSET_NAME "dataset_data_write_verification_dset"
+
+#define DATASET_WRITE_INVALID_PARAMS_TEST_DSET_SPACE_RANK 3
+#define DATASET_WRITE_INVALID_PARAMS_TEST_DSET_DTYPESIZE sizeof(int)
+#define DATASET_WRITE_INVALID_PARAMS_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_WRITE_INVALID_PARAMS_TEST_GROUP_NAME "dataset_write_invalid_params_test"
+#define DATASET_WRITE_INVALID_PARAMS_TEST_DSET_NAME "dataset_write_invalid_params_dset"
+
+#define DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_SPACE_RANK 3
+#define DATASET_DATA_BUILTIN_CONVERSION_TEST_MEM_DTYPESIZE sizeof(int)
+#define DATASET_DATA_BUILTIN_CONVERSION_TEST_MEM_DTYPE H5T_NATIVE_INT
+#define DATASET_DATA_BUILTIN_CONVERSION_TEST_NUM_POINTS 10
+#define DATASET_DATA_BUILTIN_CONVERSION_TEST_GROUP_NAME "dataset_builtin_conversion_verification_test"
+#define DATASET_DATA_BUILTIN_CONVERSION_TEST_DSET_NAME "dataset_builtin_conversion_verification_dset"
+
+#define DATASET_COMPOUND_PARTIAL_IO_DSET_DIMS 10
+#define DATASET_DATA_COMPOUND_PARTIAL_IO_TEST_GROUP_NAME "dataset_compound_partial_io_test"
+#define DATASET_DATA_COMPOUND_PARTIAL_IO_TEST_DSET_NAME "dataset_compound_partial_io_test"
+
+#define DATASET_SET_EXTENT_CHUNKED_UNLIMITED_TEST_SPACE_RANK 2
+#define DATASET_SET_EXTENT_CHUNKED_UNLIMITED_TEST_NUM_PASSES 3
+#define DATASET_SET_EXTENT_CHUNKED_UNLIMITED_TEST_GROUP_NAME "set_extent_chunked_unlimited_test"
+#define DATASET_SET_EXTENT_CHUNKED_UNLIMITED_TEST_DSET_NAME "set_extent_chunked_unlimited_test_dset"
+
+#define DATASET_SET_EXTENT_CHUNKED_FIXED_TEST_SPACE_RANK 2
+#define DATASET_SET_EXTENT_CHUNKED_FIXED_TEST_NUM_PASSES 3
+#define DATASET_SET_EXTENT_CHUNKED_FIXED_TEST_GROUP_NAME "set_extent_chunked_fixed_test"
+#define DATASET_SET_EXTENT_CHUNKED_FIXED_TEST_DSET_NAME "set_extent_chunked_fixed_test_dset"
+#define DATASET_SET_EXTENT_CHUNKED_FIXED_TEST_DSET_NAME2 "set_extent_chunked_fixed_test_dset2"
+
+#define DATASET_SET_EXTENT_DATA_TEST_SPACE_RANK 2
+#define DATASET_SET_EXTENT_DATA_TEST_GROUP_NAME "set_extent_chunked_data_test"
+#define DATASET_SET_EXTENT_DATA_TEST_DSET_NAME "set_extent_chunked_data_test_dset"
+#define DATASET_SET_EXTENT_DATA_TEST_SPACE_DIM 8
+
+#define DATASET_SET_EXTENT_DOUBLE_HANDLES_TEST_SPACE_RANK 2
+#define DATASET_SET_EXTENT_DOUBLE_HANDLES_TEST_GROUP_NAME "set_extent_chunked_double_handles_test"
+#define DATASET_SET_EXTENT_DOUBLE_HANDLES_TEST_DSET_NAME "set_extent_chunked_double_handles_test_dset"
+#define DATASET_SET_EXTENT_DOUBLE_HANDLES_TEST_SPACE_DIM 8
+
+#define DATASET_SET_EXTENT_INVALID_PARAMS_TEST_SPACE_RANK 2
+#define DATASET_SET_EXTENT_INVALID_PARAMS_TEST_GROUP_NAME "set_extent_invalid_params_test"
+#define DATASET_SET_EXTENT_INVALID_PARAMS_TEST_DSET_NAME "set_extent_invalid_params_test_dset"
+#define DATASET_SET_EXTENT_INVALID_LAYOUT_TEST_COMPACT_DSET_NAME "set_extent_invalid_layout_test_compact_dset"
+#define DATASET_SET_EXTENT_INVALID_LAYOUT_TEST_CONTIGUOUS_DSET_NAME \
+ "set_extent_invalid_layout_test_contiguous_dset"
+
+#define DATASET_SINGLE_CHUNK_TEST_SPACE_RANK 2
+#define DATASET_SINGLE_CHUNK_TEST_GROUP_NAME "single_chunk_dataset_test"
+#define DATASET_SINGLE_CHUNK_TEST_DSET_NAME "single_chunk_dataset"
+
+#define DATASET_SINGLE_CHUNK_WRITE_TEST_DSET_SPACE_RANK 2
+#define DATASET_SINGLE_CHUNK_WRITE_TEST_DSET_DTYPESIZE sizeof(int)
+#define DATASET_SINGLE_CHUNK_WRITE_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_SINGLE_CHUNK_WRITE_TEST_GROUP_NAME "single_chunk_dataset_write_test"
+#define DATASET_SINGLE_CHUNK_WRITE_TEST_DSET_NAME "single_chunk_dataset"
+
+#define DATASET_MULTI_CHUNK_TEST_SPACE_RANK 2
+#define DATASET_MULTI_CHUNK_TEST_GROUP_NAME "multi_chunk_dataset_test"
+#define DATASET_MULTI_CHUNK_TEST_DSET_NAME "multi_chunk_dataset"
+
+#define DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK 2
+#define DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_DTYPESIZE sizeof(int)
+#define DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_GROUP_NAME \
+ "multi_chunk_dataset_write_same_space_read_test"
+#define DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_NAME "multi_chunk_dataset"
+
+#define DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK 2
+#define DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_DTYPESIZE sizeof(int)
+#define DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_GROUP_NAME \
+ "multi_chunk_dataset_write_diff_space_read_test"
+#define DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_NAME "multi_chunk_dataset"
+
+#define DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK 2
+#define DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_DTYPESIZE sizeof(int)
+#define DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_GROUP_NAME \
+ "multi_chunk_dataset_same_space_overwrite_test"
+#define DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_NAME "multi_chunk_dataset"
+#define DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_NITERS 10
+
+#define DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK 2
+#define DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_DTYPESIZE sizeof(int)
+#define DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_GROUP_NAME \
+ "multi_chunk_dataset_diff_space_overwrite_test"
+#define DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_NAME "multi_chunk_dataset"
+#define DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_NITERS 10
+
+#define DATASET_PARTIAL_CHUNK_READ_ALL_SEL_TEST_DSET_SPACE_RANK 2
+#define DATASET_PARTIAL_CHUNK_READ_ALL_SEL_TEST_DSET_DTYPESIZE sizeof(int)
+#define DATASET_PARTIAL_CHUNK_READ_ALL_SEL_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_PARTIAL_CHUNK_READ_ALL_SEL_TEST_DSET_CTYPE int
+#define DATASET_PARTIAL_CHUNK_READ_ALL_SEL_TEST_GROUP_NAME "read_partial_chunk_all_sel_test"
+#define DATASET_PARTIAL_CHUNK_READ_ALL_SEL_TEST_DSET_NAME "read_partial_chunk_all_sel_dset"
+
+#define DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_DSET_SPACE_RANK 2
+#define DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_DSET_DTYPESIZE sizeof(int)
+#define DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_DSET_CTYPE int
+#define DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_GROUP_NAME "read_partial_chunk_hyper_sel_test"
+#define DATASET_PARTIAL_CHUNK_READ_HYPER_SEL_TEST_DSET_NAME "read_partial_chunk_hyper_sel_dset"
+
+#define DATASET_GET_VLEN_BUF_SIZE_DSET_SPACE_RANK 1
+#define DATASET_GET_VLEN_BUF_SIZE_DSET_SPACE_DIM 4
+#define DATASET_GET_VLEN_BUF_SIZE_GROUP_NAME "get_vlen_buffer_size_group"
+#define DATASET_GET_VLEN_BUF_SIZE_DSET_NAME "get_vlen_buffer_size_dset"
+#endif
diff --git a/test/API/H5_api_datatype_test.c b/test/API/H5_api_datatype_test.c
new file mode 100644
index 0000000..9d53292
--- /dev/null
+++ b/test/API/H5_api_datatype_test.c
@@ -0,0 +1,2693 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include "H5_api_datatype_test.h"
+
+/*
+ * Disable tests that currently compromise internal HDF5 integrity.
+ */
+#define PROBLEMATIC_TESTS
+
+static int test_create_committed_datatype(void);
+static int test_create_committed_datatype_invalid_params(void);
+static int test_create_anonymous_committed_datatype(void);
+static int test_create_anonymous_committed_datatype_invalid_params(void);
+#ifndef PROBLEMATIC_TESTS
+static int test_create_committed_datatype_empty_types(void);
+#endif
+static int test_recommit_committed_type(void);
+static int test_open_committed_datatype(void);
+static int test_open_committed_datatype_invalid_params(void);
+static int test_reopen_committed_datatype_indirect(void);
+static int test_close_committed_datatype_invalid_id(void);
+static int test_datatype_property_lists(void);
+static int test_create_dataset_with_committed_type(void);
+static int test_create_attribute_with_committed_type(void);
+static int test_delete_committed_type(void);
+static int test_resurrect_datatype(void);
+static int test_flush_committed_datatype(void);
+static int test_flush_committed_datatype_invalid_params(void);
+static int test_refresh_committed_datatype(void);
+static int test_refresh_committed_datatype_invalid_params(void);
+#ifndef PROBLEMATIC_TESTS
+static int test_cant_commit_predefined(void);
+#endif
+static int test_cant_modify_committed_type(void);
+
+/*
+ * The array of datatype tests to be performed.
+ */
+static int (*datatype_tests[])(void) = {
+ test_create_committed_datatype,
+ test_create_committed_datatype_invalid_params,
+ test_create_anonymous_committed_datatype,
+ test_create_anonymous_committed_datatype_invalid_params,
+#ifndef PROBLEMATIC_TESTS
+ test_create_committed_datatype_empty_types,
+#endif
+ test_recommit_committed_type,
+ test_open_committed_datatype,
+ test_open_committed_datatype_invalid_params,
+ test_reopen_committed_datatype_indirect,
+ test_close_committed_datatype_invalid_id,
+ test_datatype_property_lists,
+ test_create_dataset_with_committed_type,
+ test_create_attribute_with_committed_type,
+ test_delete_committed_type,
+ test_resurrect_datatype,
+ test_flush_committed_datatype,
+ test_flush_committed_datatype_invalid_params,
+ test_refresh_committed_datatype,
+ test_refresh_committed_datatype_invalid_params,
+#ifndef PROBLEMATIC_TESTS
+ test_cant_commit_predefined,
+#endif
+ test_cant_modify_committed_type,
+};
+
+/*
+ * A test to check that a committed datatype can be created.
+ */
+static int
+test_create_committed_datatype(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t type_id = H5I_INVALID_HID;
+
+ TESTING("creation of a committed datatype");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, or stored datatype aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATATYPE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATATYPE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATATYPE_CREATE_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", DATATYPE_CREATE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((type_id = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create datatype to commit\n");
+ goto error;
+ }
+
+ if (H5Tcommit2(group_id, DATATYPE_CREATE_TEST_TYPE_NAME, type_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't commit datatype '%s'\n", DATATYPE_CREATE_TEST_TYPE_NAME);
+ goto error;
+ }
+
+ if (H5Tclose(type_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(type_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a committed datatype can't be
+ * created when H5Tcommit2 is passed invalid parameters.
+ */
+static int
+test_create_committed_datatype_invalid_params(void)
+{
+ herr_t err_ret = -1;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t type_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("H5Tcommit2 with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, or stored datatype aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATATYPE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATATYPE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATATYPE_CREATE_INVALID_PARAMS_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATATYPE_CREATE_INVALID_PARAMS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((type_id = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create datatype to commit\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Tcommit2_invalid_loc_id)
+ {
+ TESTING_2("H5Tcommit2 with an invalid loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Tcommit2(H5I_INVALID_HID, DATATYPE_CREATE_INVALID_PARAMS_TEST_TYPE_NAME, type_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Tcommit2 succeeded with an invalid loc_id!\n");
+ PART_ERROR(H5Tcommit2_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Tcommit2_invalid_loc_id);
+
+ PART_BEGIN(H5Tcommit2_invalid_type_name)
+ {
+ TESTING_2("H5Tcommit2 with an invalid datatype name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Tcommit2(group_id, NULL, type_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Tcommit2 succeeded with an invalid datatype name!\n");
+ PART_ERROR(H5Tcommit2_invalid_type_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Tcommit2(group_id, "", type_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Tcommit2 succeeded with an invalid datatype name!\n");
+ PART_ERROR(H5Tcommit2_invalid_type_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Tcommit2_invalid_type_name);
+
+ PART_BEGIN(H5Tcommit2_invalid_type_id)
+ {
+ TESTING_2("H5Tcommit2 with an invalid datatype ID");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Tcommit2(group_id, DATATYPE_CREATE_INVALID_PARAMS_TEST_TYPE_NAME, H5I_INVALID_HID,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Tcommit2 succeeded with an invalid datatype ID!\n");
+ PART_ERROR(H5Tcommit2_invalid_type_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Tcommit2_invalid_type_id);
+
+ PART_BEGIN(H5Tcommit2_invalid_lcpl)
+ {
+ TESTING_2("H5Tcommit2 with an invalid LCPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Tcommit2(group_id, DATATYPE_CREATE_INVALID_PARAMS_TEST_TYPE_NAME, type_id,
+ H5I_INVALID_HID, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Tcommit2 succeeded with an invalid LCPL!\n");
+ PART_ERROR(H5Tcommit2_invalid_lcpl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Tcommit2_invalid_lcpl);
+
+ PART_BEGIN(H5Tcommit2_invalid_tcpl)
+ {
+ TESTING_2("H5Tcommit2 with an invalid TCPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Tcommit2(group_id, DATATYPE_CREATE_INVALID_PARAMS_TEST_TYPE_NAME, type_id,
+ H5P_DEFAULT, H5I_INVALID_HID, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Tcommit2 succeeded with an invalid TCPL!\n");
+ PART_ERROR(H5Tcommit2_invalid_tcpl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Tcommit2_invalid_tcpl);
+
+ PART_BEGIN(H5Tcommit2_invalid_tapl)
+ {
+ TESTING_2("H5Tcommit2 with an invalid TAPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Tcommit2(group_id, DATATYPE_CREATE_INVALID_PARAMS_TEST_TYPE_NAME, type_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Tcommit2 succeeded with an invalid TAPL!\n");
+ PART_ERROR(H5Tcommit2_invalid_tapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Tcommit2_invalid_tapl);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Tclose(type_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(type_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that an anonymous committed datatype
+ * can be created with H5Tcommit_anon.
+ */
+static int
+test_create_anonymous_committed_datatype(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t type_id = H5I_INVALID_HID;
+
+ TESTING("creation of anonymous committed datatype");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, or stored datatype aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATATYPE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATATYPE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATATYPE_CREATE_ANONYMOUS_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", DATATYPE_CREATE_ANONYMOUS_GROUP_NAME);
+ goto error;
+ }
+
+ if ((type_id = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create datatype\n");
+ goto error;
+ }
+
+ if (H5Tcommit_anon(group_id, type_id, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't commit anonymous datatype\n");
+ goto error;
+ }
+
+ if (H5Tclose(type_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(type_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a committed datatype can't be
+ * created when H5Tcommit_anon is passed invalid parameters.
+ */
+static int
+test_create_anonymous_committed_datatype_invalid_params(void)
+{
+ herr_t err_ret = -1;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t type_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("H5Tcommit_anon with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, or stored datatype aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATATYPE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATATYPE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATATYPE_CREATE_ANONYMOUS_INVALID_PARAMS_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATATYPE_CREATE_ANONYMOUS_INVALID_PARAMS_GROUP_NAME);
+ goto error;
+ }
+
+ if ((type_id = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create datatype\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Tcommit_anon_invalid_loc_id)
+ {
+ TESTING_2("H5Tcommit_anon with an invalid loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Tcommit_anon(H5I_INVALID_HID, type_id, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Tcommit_anon succeeded with an invalid loc_id!\n");
+ PART_ERROR(H5Tcommit_anon_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Tcommit_anon_invalid_loc_id);
+
+ PART_BEGIN(H5Tcommit_anon_invalid_type_id)
+ {
+ TESTING_2("H5Tcommit_anon with an invalid datatype ID");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Tcommit_anon(group_id, H5I_INVALID_HID, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Tcommit_anon succeeded with an invalid datatype ID!\n");
+ PART_ERROR(H5Tcommit_anon_invalid_type_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Tcommit_anon_invalid_type_id);
+
+ PART_BEGIN(H5Tcommit_anon_invalid_tcpl)
+ {
+ TESTING_2("H5Tcommit_anon with an invalid TCPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Tcommit_anon(group_id, type_id, H5I_INVALID_HID, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Tcommit_anon succeeded with an invalid TCPL!\n");
+ PART_ERROR(H5Tcommit_anon_invalid_tcpl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Tcommit_anon_invalid_tcpl);
+
+ PART_BEGIN(H5Tcommit_anon_invalid_tapl)
+ {
+ TESTING_2("H5Tcommit_anon with an invalid TAPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Tcommit_anon(group_id, type_id, H5P_DEFAULT, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Tcommit_anon succeeded with an invalid TAPL!\n");
+ PART_ERROR(H5Tcommit_anon_invalid_tapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Tcommit_anon_invalid_tapl);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Tclose(type_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(type_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that committing a datatype fails with empty
+ * compound and enum datatypes.
+ */
+#ifndef PROBLEMATIC_TESTS
+static int
+test_create_committed_datatype_empty_types(void)
+{
+ herr_t err_ret = FAIL;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t type_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("creation of committed datatype with empty types");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, or stored datatype aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATATYPE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATATYPE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATATYPE_CREATE_EMPTY_TYPES_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATATYPE_CREATE_EMPTY_TYPES_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Tcommit_empty_compound_type)
+ {
+ TESTING_2("creation of committed datatype with empty compound type");
+
+ if ((type_id = H5Tcreate(H5T_COMPOUND, (size_t)32)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create compound type\n");
+ PART_ERROR(H5Tcommit_empty_compound_type);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Tcommit2(group_id, DATATYPE_CREATE_EMPTY_TYPES_TEST_CMPD_TYPE_NAME, type_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" committed empty compound datatype!\n");
+ PART_ERROR(H5Tcommit_empty_compound_type);
+ }
+
+ /* Add a field to the compound datatype */
+ if (H5Tinsert(type_id, "a", (size_t)0, H5T_NATIVE_INT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to insert field into compound datatype\n");
+ PART_ERROR(H5Tcommit_empty_compound_type);
+ }
+
+ /* Attempt to commit the now non-empty compound datatype */
+ if (H5Tcommit2(group_id, DATATYPE_CREATE_EMPTY_TYPES_TEST_CMPD_TYPE_NAME, type_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to commit non-empty compound datatype\n");
+ PART_ERROR(H5Tcommit_empty_compound_type);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Tcommit_empty_compound_type);
+
+ if (type_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(type_id);
+ }
+ H5E_END_TRY;
+ type_id = H5I_INVALID_HID;
+ }
+
+ PART_BEGIN(H5Tcommit_empty_enum_type)
+ {
+ int enum_val = 1;
+
+ TESTING_2("creation of committed datatype with empty enum type");
+
+ if ((type_id = H5Tenum_create(H5T_NATIVE_INT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create enum type\n");
+ PART_ERROR(H5Tcommit_empty_enum_type);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Tcommit2(group_id, DATATYPE_CREATE_EMPTY_TYPES_TEST_ENUM_TYPE_NAME, type_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" committed empty enum datatype!\n");
+ PART_ERROR(H5Tcommit_empty_enum_type);
+ }
+
+ /* Add a field to the enum datatype */
+ if (H5Tenum_insert(type_id, "a", &enum_val) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to insert field into enum datatype\n");
+ PART_ERROR(H5Tcommit_empty_enum_type);
+ }
+
+ /* Attempt to commit the now non-empty enum datatype */
+ if (H5Tcommit2(group_id, DATATYPE_CREATE_EMPTY_TYPES_TEST_ENUM_TYPE_NAME, type_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to commit non-empty enum datatype\n");
+ PART_ERROR(H5Tcommit_empty_enum_type);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Tcommit_empty_enum_type);
+
+ if (type_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(type_id);
+ }
+ H5E_END_TRY;
+ type_id = H5I_INVALID_HID;
+ }
+ }
+ END_MULTIPART;
+
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(type_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+#endif
+
+/*
+ * A test to check that a committed datatype can't be re-committed.
+ */
+static int
+test_recommit_committed_type(void)
+{
+ htri_t is_committed = FALSE;
+ herr_t err_ret;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t type_id = H5I_INVALID_HID;
+
+ TESTING("inability to re-commit a committed datatype");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, or stored datatype aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATATYPE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATATYPE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, RECOMMIT_COMMITTED_TYPE_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", RECOMMIT_COMMITTED_TYPE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ /* Copy a predefined datatype and commit the copy */
+ if ((type_id = H5Tcopy(H5T_NATIVE_INT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to copy predefined integer datatype\n");
+ goto error;
+ }
+
+ if (H5Tcommit2(group_id, "native_int", type_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to commit datatype\n");
+ goto error;
+ }
+
+ if ((is_committed = H5Tcommitted(type_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to determine if datatype is committed\n");
+ goto error;
+ }
+
+ if (!is_committed) {
+ H5_FAILED();
+ HDprintf(" H5Tcommitted() returned false!\n");
+ goto error;
+ }
+
+ /* We should not be able to re-commit a committed type */
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Tcommit2(group_id, "native_int", type_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" re-committed an already committed datatype!\n");
+ goto error;
+ }
+
+ if (H5Tclose(type_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(type_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a committed datatype
+ * can be opened using H5Topen2.
+ */
+static int
+test_open_committed_datatype(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t type_id = H5I_INVALID_HID;
+
+ TESTING("H5Topen2");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, or stored datatype aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATATYPE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATATYPE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATATYPE_OPEN_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", DATATYPE_OPEN_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((type_id = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create datatype to commit\n");
+ goto error;
+ }
+
+ if (H5Tcommit2(group_id, DATATYPE_OPEN_TEST_TYPE_NAME, type_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't commit datatype '%s'\n", DATATYPE_OPEN_TEST_TYPE_NAME);
+ goto error;
+ }
+
+ if (H5Tclose(type_id) < 0)
+ TEST_ERROR;
+
+ if ((type_id = H5Topen2(group_id, DATATYPE_OPEN_TEST_TYPE_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open committed datatype '%s'\n", DATATYPE_OPEN_TEST_TYPE_NAME);
+ goto error;
+ }
+
+ if (H5Tclose(type_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(type_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a committed datatype can't
+ * be opened when H5Topen2 is passed invalid parameters.
+ */
+static int
+test_open_committed_datatype_invalid_params(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t type_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("H5Topen2 with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, or stored datatype aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATATYPE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATATYPE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATATYPE_OPEN_INVALID_PARAMS_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATATYPE_OPEN_INVALID_PARAMS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((type_id = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create datatype to commit\n");
+ goto error;
+ }
+
+ if (H5Tcommit2(group_id, DATATYPE_OPEN_INVALID_PARAMS_TEST_TYPE_NAME, type_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't commit datatype '%s'\n", DATATYPE_OPEN_INVALID_PARAMS_TEST_TYPE_NAME);
+ goto error;
+ }
+
+ if (H5Tclose(type_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Topen2_invalid_loc_id)
+ {
+ TESTING_2("H5Topen2 with an invalid location ID");
+
+ H5E_BEGIN_TRY
+ {
+ type_id = H5Topen2(H5I_INVALID_HID, DATATYPE_OPEN_INVALID_PARAMS_TEST_TYPE_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (type_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened committed datatype with an invalid location ID!\n");
+ H5Tclose(type_id);
+ PART_ERROR(H5Topen2_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Topen2_invalid_loc_id);
+
+ PART_BEGIN(H5Topen2_invalid_type_name)
+ {
+ TESTING_2("H5Topen2 with an invalid datatype name");
+
+ H5E_BEGIN_TRY
+ {
+ type_id = H5Topen2(group_id, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (type_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened committed datatype with an invalid datatype name!\n");
+ H5Tclose(type_id);
+ PART_ERROR(H5Topen2_invalid_type_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ type_id = H5Topen2(group_id, "", H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (type_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened committed datatype with an invalid datatype name!\n");
+ H5Tclose(type_id);
+ PART_ERROR(H5Topen2_invalid_type_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Topen2_invalid_type_name);
+
+ PART_BEGIN(H5Topen2_invalid_tapl)
+ {
+ TESTING_2("H5Topen2 with an invalid TAPL");
+
+ H5E_BEGIN_TRY
+ {
+ type_id = H5Topen2(group_id, DATATYPE_OPEN_INVALID_PARAMS_TEST_TYPE_NAME, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (type_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened committed datatype with an invalid TAPL!\n");
+ H5Tclose(type_id);
+ PART_ERROR(H5Topen2_invalid_tapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Topen2_invalid_tapl);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(type_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that open named datatypes can be reopened indirectly
+ * through H5Dget_type without causing problems.
+ */
+static int
+test_reopen_committed_datatype_indirect(void)
+{
+ size_t dt_size = 0;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t type_id = H5I_INVALID_HID, reopened_type_id = H5I_INVALID_HID;
+ hid_t strtype = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("reopening open committed datatypes using H5Dget_type");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, or stored datatype aren't supported with "
+ "this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATATYPE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATATYPE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATATYPE_REOPEN_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", DATATYPE_REOPEN_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((space_id = generate_random_dataspace(DATATYPE_REOPEN_TEST_SPACE_RANK, NULL, NULL, FALSE)) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(reopen_compound_type)
+ {
+ TESTING_2("re-open of compound datatype");
+
+ if ((strtype = H5Tcopy(H5T_C_S1)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to copy C-string datatype\n");
+ PART_ERROR(reopen_compound_type);
+ }
+
+ if (H5Tset_size(strtype, H5T_VARIABLE) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set string datatype's size to variable\n");
+ PART_ERROR(reopen_compound_type);
+ }
+
+ if ((type_id = H5Tcreate(H5T_COMPOUND, sizeof(char *))) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create compound datatype\n");
+ PART_ERROR(reopen_compound_type);
+ }
+
+ if (H5Tinsert(type_id, "vlstr", (size_t)0, strtype) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to insert field into compound datatype\n");
+ PART_ERROR(reopen_compound_type);
+ }
+
+ if (H5Tclose(strtype) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close string datatype\n");
+ PART_ERROR(reopen_compound_type);
+ }
+
+ /* Get size of compound type */
+ if ((dt_size = H5Tget_size(type_id)) == 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve size of compound datatype\n");
+ PART_ERROR(reopen_compound_type);
+ }
+
+ /* Commit compound type and verify the size doesn't change */
+ if (H5Tcommit2(group_id, "cmpd_type", type_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to commit compound datatype\n");
+ PART_ERROR(reopen_compound_type);
+ }
+
+ if (dt_size != H5Tget_size(type_id)) {
+ H5_FAILED();
+ HDprintf(" committing datatype caused its size to change!\n");
+ PART_ERROR(reopen_compound_type);
+ }
+
+ /* Create dataset with compound type */
+ if ((dset_id = H5Dcreate2(group_id, "cmpd_dset", type_id, space_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create dataset using committed datatype\n");
+ PART_ERROR(reopen_compound_type);
+ }
+
+ /* Indirectly reopen type and verify that the size doesn't change */
+ if ((reopened_type_id = H5Dget_type(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to re-open committed datatype using H5Dget_type\n");
+ PART_ERROR(reopen_compound_type);
+ }
+
+ if (dt_size != H5Tget_size(reopened_type_id)) {
+ H5_FAILED();
+ HDprintf(" size of re-opened datatype didn't match size of original datatype\n");
+ PART_ERROR(reopen_compound_type);
+ }
+
+ PASSED();
+ }
+ PART_END(reopen_compound_type);
+
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(strtype);
+ strtype = H5I_INVALID_HID;
+ H5Tclose(type_id);
+ type_id = H5I_INVALID_HID;
+ H5Tclose(reopened_type_id);
+ reopened_type_id = H5I_INVALID_HID;
+ H5Dclose(dset_id);
+ dset_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(reopen_enum_type)
+ {
+ int enum_value;
+
+ TESTING_2("re-open of enum datatype");
+
+ if ((type_id = H5Tenum_create(H5T_NATIVE_INT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create enum datatype\n");
+ PART_ERROR(reopen_enum_type);
+ }
+
+ enum_value = 0;
+ if (H5Tenum_insert(type_id, "val1", &enum_value) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to insert value into enum datatype\n");
+ PART_ERROR(reopen_enum_type);
+ }
+
+ enum_value = 1;
+ if (H5Tenum_insert(type_id, "val2", &enum_value) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to insert value into enum datatype\n");
+ PART_ERROR(reopen_enum_type);
+ }
+
+ /* Get size of enum type */
+ if ((dt_size = H5Tget_size(type_id)) == 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve size of enum datatype\n");
+ PART_ERROR(reopen_enum_type);
+ }
+
+ /* Commit enum type and verify the size doesn't change */
+ if (H5Tcommit2(group_id, "enum_type", type_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to commit enum datatype\n");
+ PART_ERROR(reopen_enum_type);
+ }
+
+ if (dt_size != H5Tget_size(type_id)) {
+ H5_FAILED();
+ HDprintf(" committing datatype caused its size to change!\n");
+ PART_ERROR(reopen_enum_type);
+ }
+
+ /* Create dataset with enum type */
+ if ((dset_id = H5Dcreate2(group_id, "enum_dset", type_id, space_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create dataset using committed datatype\n");
+ PART_ERROR(reopen_enum_type);
+ }
+
+ /* Indirectly reopen type and verify that the size doesn't change */
+ if ((reopened_type_id = H5Dget_type(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to re-open committed datatype using H5Dget_type\n");
+ PART_ERROR(reopen_enum_type);
+ }
+
+ if (dt_size != H5Tget_size(reopened_type_id)) {
+ H5_FAILED();
+ HDprintf(" size of re-opened datatype didn't match size of original datatype\n");
+ PART_ERROR(reopen_enum_type);
+ }
+
+ PASSED();
+ }
+ PART_END(reopen_enum_type);
+
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(type_id);
+ type_id = H5I_INVALID_HID;
+ H5Tclose(reopened_type_id);
+ reopened_type_id = H5I_INVALID_HID;
+ H5Dclose(dset_id);
+ dset_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(reopen_vlen_type)
+ {
+ TESTING_2("reopen of a variable-length datatype");
+
+ if ((type_id = H5Tvlen_create(H5T_NATIVE_INT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create variable-length datatype\n");
+ PART_ERROR(reopen_vlen_type);
+ }
+
+ /* Get size of variable-length type */
+ if ((dt_size = H5Tget_size(type_id)) == 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve size of variable-length datatype\n");
+ PART_ERROR(reopen_vlen_type);
+ }
+
+ /* Commit variable-length type and verify the size doesn't change */
+ if (H5Tcommit2(group_id, "vlen_type", type_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to commit variable-length datatype\n");
+ PART_ERROR(reopen_vlen_type);
+ }
+
+ if (dt_size != H5Tget_size(type_id)) {
+ H5_FAILED();
+ HDprintf(" committing datatype caused its size to change!\n");
+ PART_ERROR(reopen_vlen_type);
+ }
+
+ /* Create dataset with variable-length type */
+ if ((dset_id = H5Dcreate2(group_id, "vlen_dset", type_id, space_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create dataset using committed datatype\n");
+ PART_ERROR(reopen_vlen_type);
+ }
+
+ /* Indirectly reopen type and verify that the size doesn't change */
+ if ((reopened_type_id = H5Dget_type(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to re-open committed datatype using H5Dget_type\n");
+ PART_ERROR(reopen_vlen_type);
+ }
+
+ if (dt_size != H5Tget_size(reopened_type_id)) {
+ H5_FAILED();
+ HDprintf(" size of re-opened datatype didn't match size of original datatype\n");
+ PART_ERROR(reopen_vlen_type);
+ }
+
+ PASSED();
+ }
+ PART_END(reopen_vlen_type);
+
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(type_id);
+ type_id = H5I_INVALID_HID;
+ H5Tclose(reopened_type_id);
+ reopened_type_id = H5I_INVALID_HID;
+ H5Dclose(dset_id);
+ dset_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(reopen_opaque_type)
+ {
+ const char *tag = "opaque_tag";
+
+ TESTING_2("reopen of an opaque datatype");
+
+ if ((type_id = H5Tcreate(H5T_OPAQUE, (size_t)13)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create opaque datatype\n");
+ PART_ERROR(reopen_opaque_type);
+ }
+
+ if (H5Tset_tag(type_id, tag) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set tag on opaque datatype\n");
+ PART_ERROR(reopen_opaque_type);
+ }
+
+ /* Get size of opaque type */
+ if ((dt_size = H5Tget_size(type_id)) == 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve size of opaque datatype\n");
+ PART_ERROR(reopen_opaque_type);
+ }
+
+ /* Commit opaque type and verify the size doesn't change */
+ if (H5Tcommit2(group_id, "opaque_type", type_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to commit opaque datatype\n");
+ PART_ERROR(reopen_opaque_type);
+ }
+
+ if (dt_size != H5Tget_size(type_id)) {
+ H5_FAILED();
+ HDprintf(" committing datatype caused its size to change!\n");
+ PART_ERROR(reopen_opaque_type);
+ }
+
+ /* Create dataset with opaque type */
+ if ((dset_id = H5Dcreate2(group_id, "opaque_dset", type_id, space_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create dataset using committed datatype\n");
+ PART_ERROR(reopen_opaque_type);
+ }
+
+ /* Indirectly reopen type and verify that the size doesn't change */
+ if ((reopened_type_id = H5Dget_type(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to re-open committed datatype using H5Dget_type\n");
+ PART_ERROR(reopen_opaque_type);
+ }
+
+ if (dt_size != H5Tget_size(reopened_type_id)) {
+ H5_FAILED();
+ HDprintf(" size of re-opened datatype didn't match size of original datatype\n");
+ PART_ERROR(reopen_opaque_type);
+ }
+
+ PASSED();
+ }
+ PART_END(reopen_opaque_type);
+
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(type_id);
+ type_id = H5I_INVALID_HID;
+ H5Tclose(reopened_type_id);
+ reopened_type_id = H5I_INVALID_HID;
+ H5Dclose(dset_id);
+ dset_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(reopen_array_type)
+ {
+ hsize_t array_dims[] = {2, 3};
+
+ TESTING_2("reopen of an array datatype");
+
+ if ((type_id = H5Tarray_create2(H5T_NATIVE_INT, 1, array_dims)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create array datatype\n");
+ PART_ERROR(reopen_array_type);
+ }
+
+ /* Get size of array type */
+ if ((dt_size = H5Tget_size(type_id)) == 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve size of array datatype\n");
+ PART_ERROR(reopen_array_type);
+ }
+
+ /* Commit array type and verify the size doesn't change */
+ if (H5Tcommit2(group_id, "array_type", type_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to commit array datatype\n");
+ PART_ERROR(reopen_array_type);
+ }
+
+ if (dt_size != H5Tget_size(type_id)) {
+ H5_FAILED();
+ HDprintf(" committing datatype caused its size to change!\n");
+ PART_ERROR(reopen_array_type);
+ }
+
+ /* Create dataset with array type */
+ if ((dset_id = H5Dcreate2(group_id, "array_dset", type_id, space_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create dataset using committed datatype\n");
+ PART_ERROR(reopen_array_type);
+ }
+
+ /* Indirectly reopen type and verify that the size doesn't change */
+ if ((reopened_type_id = H5Dget_type(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to re-open committed datatype using H5Dget_type\n");
+ PART_ERROR(reopen_array_type);
+ }
+
+ if (dt_size != H5Tget_size(reopened_type_id)) {
+ H5_FAILED();
+ HDprintf(" size of re-opened datatype didn't match size of original datatype\n");
+ PART_ERROR(reopen_array_type);
+ }
+
+ PASSED();
+ }
+ PART_END(reopen_array_type);
+
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(type_id);
+ type_id = H5I_INVALID_HID;
+ H5Tclose(reopened_type_id);
+ reopened_type_id = H5I_INVALID_HID;
+ H5Dclose(dset_id);
+ dset_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ H5Tclose(strtype);
+ H5Tclose(type_id);
+ H5Tclose(reopened_type_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that H5Tclose fails when
+ * it is passed an invalid datatype ID.
+ */
+static int
+test_close_committed_datatype_invalid_id(void)
+{
+ herr_t err_ret = -1;
+ hid_t file_id = H5I_INVALID_HID;
+
+ TESTING("H5Tclose with an invalid committed datatype ID");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file or stored datatype aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Tclose(H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Tclose succeeded with an invalid committed datatype ID!\n");
+ goto error;
+ }
+
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a TCPL used for datatype creation
+ * can be persisted and that a valid copy of that TCPL can
+ * be retrieved later with a call to H5Tget_create_plist.
+ */
+static int
+test_datatype_property_lists(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t type_id1 = H5I_INVALID_HID, type_id2 = H5I_INVALID_HID;
+ hid_t tcpl_id1 = H5I_INVALID_HID, tcpl_id2 = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("datatype property list operations");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GET_PLIST)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, stored datatype, or getting property list aren't "
+ "supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATATYPE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATATYPE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATATYPE_PROPERTY_LIST_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", DATATYPE_PROPERTY_LIST_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((type_id1 = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create datatype\n");
+ goto error;
+ }
+
+ if ((type_id2 = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create datatype\n");
+ goto error;
+ }
+
+ if ((tcpl_id1 = H5Pcreate(H5P_DATATYPE_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create TCPL\n");
+ goto error;
+ }
+
+ /* Currently no TCPL routines are defined */
+
+ if (H5Tcommit2(group_id, DATATYPE_PROPERTY_LIST_TEST_DATATYPE_NAME1, type_id1, H5P_DEFAULT, tcpl_id1,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't commit datatype '%s'\n", DATATYPE_PROPERTY_LIST_TEST_DATATYPE_NAME1);
+ goto error;
+ }
+
+ if (H5Tcommit2(group_id, DATATYPE_PROPERTY_LIST_TEST_DATATYPE_NAME2, type_id2, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't commit datatype '%s'\n", DATATYPE_PROPERTY_LIST_TEST_DATATYPE_NAME2);
+ goto error;
+ }
+
+ if (H5Pclose(tcpl_id1) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Tget_create_plist)
+ {
+ TESTING_2("H5Tget_create_plist");
+
+ /* Try to receive copies for the two property lists */
+ if ((tcpl_id1 = H5Tget_create_plist(type_id1)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get property list\n");
+ PART_ERROR(H5Tget_create_plist);
+ }
+
+ if ((tcpl_id2 = H5Tget_create_plist(type_id2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get property list\n");
+ PART_ERROR(H5Tget_create_plist);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Tget_create_plist);
+
+ /* Now close the property lists and datatypes and see if we can still retrieve copies of
+ * the property lists upon opening (instead of creating) a datatype
+ */
+ if (tcpl_id1 >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(tcpl_id1);
+ }
+ H5E_END_TRY;
+ tcpl_id1 = H5I_INVALID_HID;
+ }
+ if (tcpl_id2 >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(tcpl_id2);
+ }
+ H5E_END_TRY;
+ tcpl_id2 = H5I_INVALID_HID;
+ }
+ if (type_id1 >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(type_id1);
+ }
+ H5E_END_TRY;
+ type_id1 = H5I_INVALID_HID;
+ }
+ if (type_id2 >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(type_id2);
+ }
+ H5E_END_TRY;
+ type_id2 = H5I_INVALID_HID;
+ }
+
+ PART_BEGIN(H5Tget_create_plist_reopened)
+ {
+ TESTING_2("H5Tget_create_plist after re-opening committed datatype");
+
+ if ((type_id1 = H5Topen2(group_id, DATATYPE_PROPERTY_LIST_TEST_DATATYPE_NAME1, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open datatype '%s'\n", DATATYPE_PROPERTY_LIST_TEST_DATATYPE_NAME1);
+ PART_ERROR(H5Tget_create_plist_reopened);
+ }
+
+ if ((type_id2 = H5Topen2(group_id, DATATYPE_PROPERTY_LIST_TEST_DATATYPE_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open datatype '%s'\n", DATATYPE_PROPERTY_LIST_TEST_DATATYPE_NAME2);
+ PART_ERROR(H5Tget_create_plist_reopened);
+ }
+
+ if ((tcpl_id1 = H5Tget_create_plist(type_id1)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get property list\n");
+ PART_ERROR(H5Tget_create_plist_reopened);
+ }
+
+ if ((tcpl_id2 = H5Tget_create_plist(type_id2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get property list\n");
+ PART_ERROR(H5Tget_create_plist_reopened);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Tget_create_plist_reopened);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Pclose(tcpl_id1) < 0)
+ TEST_ERROR;
+ if (H5Pclose(tcpl_id2) < 0)
+ TEST_ERROR;
+ if (H5Tclose(type_id1) < 0)
+ TEST_ERROR;
+ if (H5Tclose(type_id2) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(tcpl_id1);
+ H5Pclose(tcpl_id2);
+ H5Tclose(type_id1);
+ H5Tclose(type_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a dataset can be created using
+ * a committed datatype.
+ */
+static int
+test_create_dataset_with_committed_type(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t type_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+
+ TESTING("dataset creation with a committed datatype");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, or stored datatype aren't supported with "
+ "this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATATYPE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATATYPE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_CREATE_WITH_DATATYPE_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n", DATASET_CREATE_WITH_DATATYPE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((type_id = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create datatype\n");
+ goto error;
+ }
+
+ if (H5Tcommit2(group_id, DATASET_CREATE_WITH_DATATYPE_TEST_TYPE_NAME, type_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't commit datatype '%s'\n", DATASET_CREATE_WITH_DATATYPE_TEST_TYPE_NAME);
+ goto error;
+ }
+
+ if (H5Tclose(type_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATATYPE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATATYPE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gopen2(container_group, DATASET_CREATE_WITH_DATATYPE_TEST_GROUP_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_CREATE_WITH_DATATYPE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((type_id = H5Topen2(group_id, DATASET_CREATE_WITH_DATATYPE_TEST_TYPE_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open committed datatype '%s'\n", DATASET_CREATE_WITH_DATATYPE_TEST_TYPE_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = generate_random_dataspace(DATATYPE_CREATE_TEST_DATASET_DIMS, NULL, NULL, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_CREATE_WITH_DATATYPE_TEST_DSET_NAME, type_id, fspace_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s' using committed datatype\n",
+ DATASET_CREATE_WITH_DATATYPE_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_CREATE_WITH_DATATYPE_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to open dataset '%s'\n", DATASET_CREATE_WITH_DATATYPE_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if (H5Tclose(type_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(type_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that an attribute can be created
+ * using a committed datatype.
+ */
+static int
+test_create_attribute_with_committed_type(void)
+{
+ htri_t attr_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t type_id = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+
+ TESTING("attribute creation with a committed datatype");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, attribute, or stored datatype aren't supported "
+ "with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATATYPE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATATYPE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, ATTRIBUTE_CREATE_WITH_DATATYPE_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n",
+ ATTRIBUTE_CREATE_WITH_DATATYPE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((type_id = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create datatype\n");
+ goto error;
+ }
+
+ if (H5Tcommit2(group_id, ATTRIBUTE_CREATE_WITH_DATATYPE_TEST_DTYPE_NAME, type_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't commit datatype '%s'\n", ATTRIBUTE_CREATE_WITH_DATATYPE_TEST_DTYPE_NAME);
+ goto error;
+ }
+
+ if (H5Tclose(type_id) < 0)
+ TEST_ERROR;
+
+ if ((type_id = H5Topen2(group_id, ATTRIBUTE_CREATE_WITH_DATATYPE_TEST_DTYPE_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open committed datatype '%s'\n",
+ ATTRIBUTE_CREATE_WITH_DATATYPE_TEST_DTYPE_NAME);
+ goto error;
+ }
+
+ if ((space_id =
+ generate_random_dataspace(ATTRIBUTE_CREATE_WITH_DATATYPE_TEST_SPACE_RANK, NULL, NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, ATTRIBUTE_CREATE_WITH_DATATYPE_TEST_ATTR_NAME, type_id, space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s'\n", ATTRIBUTE_CREATE_WITH_DATATYPE_TEST_ATTR_NAME);
+ goto error;
+ }
+
+ /* Verify the attribute has been created */
+ if ((attr_exists = H5Aexists(group_id, ATTRIBUTE_CREATE_WITH_DATATYPE_TEST_ATTR_NAME)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if attribute '%s' exists\n",
+ ATTRIBUTE_CREATE_WITH_DATATYPE_TEST_ATTR_NAME);
+ goto error;
+ }
+
+ if (!attr_exists) {
+ H5_FAILED();
+ HDprintf(" attribute did not exist\n");
+ goto error;
+ }
+
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Aopen(group_id, ATTRIBUTE_CREATE_WITH_DATATYPE_TEST_ATTR_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open attribute '%s'\n", ATTRIBUTE_CREATE_WITH_DATATYPE_TEST_ATTR_NAME);
+ goto error;
+ }
+
+ if (H5Tclose(type_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(type_id);
+ H5Sclose(space_id);
+ H5Aclose(attr_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a committed datatype can
+ * be deleted.
+ */
+static int
+test_delete_committed_type(void)
+{
+ htri_t type_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t type_id = H5I_INVALID_HID;
+
+ TESTING("committed datatype deletion");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, attribute, or stored datatype aren't supported "
+ "with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATATYPE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATATYPE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATATYPE_DELETE_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group '%s'\n", DATATYPE_DELETE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((type_id = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create datatype\n");
+ goto error;
+ }
+
+ if (H5Tcommit2(group_id, DATATYPE_DELETE_TEST_DTYPE_NAME, type_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't commit datatype '%s'\n", DATATYPE_DELETE_TEST_DTYPE_NAME);
+ goto error;
+ }
+
+ if ((type_exists = H5Lexists(group_id, DATATYPE_DELETE_TEST_DTYPE_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if datatype '%s' exists\n", DATATYPE_DELETE_TEST_DTYPE_NAME);
+ goto error;
+ }
+
+ if (!type_exists) {
+ H5_FAILED();
+ HDprintf(" datatype didn't exist\n");
+ goto error;
+ }
+
+ if (H5Ldelete(group_id, DATATYPE_DELETE_TEST_DTYPE_NAME, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete datatype '%s'\n", DATATYPE_DELETE_TEST_DTYPE_NAME);
+ goto error;
+ }
+
+ if ((type_exists = H5Lexists(group_id, DATATYPE_DELETE_TEST_DTYPE_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if datatype '%s' exists\n", DATATYPE_DELETE_TEST_DTYPE_NAME);
+ goto error;
+ }
+
+ if (type_exists) {
+ H5_FAILED();
+ HDprintf(" datatype exists\n");
+ goto error;
+ }
+
+ if (H5Tclose(type_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(type_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a committed datatype can still be opened when
+ * the link to the datatype is deleted and then a new one is created.
+ */
+static int
+test_resurrect_datatype(void)
+{
+#ifndef NO_ID_PREVENTS_OBJ_DELETE
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t type_id = H5I_INVALID_HID;
+#endif /* NO_ID_PREVENTS_OBJ_DELETE */
+
+ TESTING("resurrecting datatype after deletion");
+
+#ifndef NO_ID_PREVENTS_OBJ_DELETE
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_HARD_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, link, hard link, or stored datatype aren't "
+ "supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATATYPE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATATYPE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATATYPE_RESURRECT_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", DATATYPE_RESURRECT_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ /* Create a named datatype in the file */
+ if ((type_id = H5Tcopy(H5T_NATIVE_INT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to copy predefined integer type\n");
+ goto error;
+ }
+
+ if (H5Tcommit2(group_id, DATATYPE_RESURRECT_TEST_DTYPE_NAME, type_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to commit datatype\n");
+ goto error;
+ }
+
+ /* Unlink the datatype while it's open (will mark it for deletion when closed) */
+ if (H5Ldelete(group_id, DATATYPE_RESURRECT_TEST_DTYPE_NAME, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to delete datatype\n");
+ goto error;
+ }
+#ifndef NO_OBJECT_GET_NAME
+ /* Check that datatype name is NULL */
+ if (H5Iget_name(type_id, NULL, (size_t)0) != 0) {
+ H5_FAILED();
+ HDprintf(" deleted datatype name was not NULL!\n");
+ goto error;
+ }
+#endif
+
+ /* Re-link the datatype to the group hierarchy (shouldn't get deleted now) */
+ if (H5Lcreate_hard(type_id, ".", group_id, DATATYPE_RESURRECT_TEST_DTYPE_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create new link for deleted datatype\n");
+ goto error;
+ }
+
+ /* Close things */
+ if (H5Tclose(type_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ /* Re-open the file */
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATATYPE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATATYPE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gopen2(container_group, DATATYPE_RESURRECT_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container sub-group '%s'\n", DATATYPE_RESURRECT_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ /* Attempt to open the datatype under the new name */
+ if ((type_id = H5Topen2(group_id, DATATYPE_RESURRECT_TEST_DTYPE_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to open resurrected datatype\n");
+ goto error;
+ }
+
+ if (H5Tclose(type_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+#else /* NO_ID_PREVENTS_OBJ_DELETE */
+ SKIPPED();
+#endif /* NO_ID_PREVENTS_OBJ_DELETE */
+
+ return 0;
+
+#ifndef NO_ID_PREVENTS_OBJ_DELETE
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(type_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+#endif /* NO_ID_PREVENTS_OBJ_DELETE */
+}
+
+static int
+test_flush_committed_datatype(void)
+{
+ TESTING("H5Tflush");
+
+ SKIPPED();
+
+ return 0;
+}
+
+static int
+test_flush_committed_datatype_invalid_params(void)
+{
+ TESTING("H5Tflush with invalid parameters");
+
+ SKIPPED();
+
+ return 0;
+}
+
+static int
+test_refresh_committed_datatype(void)
+{
+ TESTING("H5Trefresh");
+
+ SKIPPED();
+
+ return 0;
+}
+
+static int
+test_refresh_committed_datatype_invalid_params(void)
+{
+ TESTING("H5Trefresh with invalid parameters");
+
+ SKIPPED();
+
+ return 0;
+}
+
+/*
+ * A test to check that predefined HDF5 datatypes can't be directly committed.
+ * An application should first copy the type with H5Tcopy and then commit the
+ * copied datatype.
+ */
+#ifndef PROBLEMATIC_TESTS
+static int
+test_cant_commit_predefined(void)
+{
+ herr_t err_ret;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+
+ TESTING("inability to commit predefined types directly");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, or stored datatype aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATATYPE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATATYPE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, PREDEFINED_TYPE_COMMIT_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", PREDEFINED_TYPE_COMMIT_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Tcommit2(group_id, "committed_predefined_type", H5T_NATIVE_INT, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" committed a predefined datatype directly (without copying it)!\n");
+ goto error;
+ }
+
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+#endif
+
+/*
+ * A test to check that a datatype cannot be modified once it has been committed.
+ */
+static int
+test_cant_modify_committed_type(void)
+{
+ htri_t is_committed = FALSE;
+ herr_t err_ret;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t type_id = H5I_INVALID_HID;
+
+ TESTING("inability to modify a committed datatype");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, or stored datatype aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATATYPE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATATYPE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, MODIFY_COMMITTED_TYPE_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", MODIFY_COMMITTED_TYPE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ /* Copy a predefined datatype and commit the copy */
+ if ((type_id = H5Tcopy(H5T_NATIVE_INT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to copy predefined integer datatype\n");
+ goto error;
+ }
+
+ if (H5Tcommit2(group_id, "native_int", type_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to commit datatype\n");
+ goto error;
+ }
+
+ if ((is_committed = H5Tcommitted(type_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to determine if datatype is committed\n");
+ goto error;
+ }
+
+ if (!is_committed) {
+ H5_FAILED();
+ HDprintf(" H5Tcommitted() returned false!\n");
+ goto error;
+ }
+
+ /* We should not be able to modify a type after it has been committed. */
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Tset_precision(type_id, (size_t)256);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" modified committed datatype!\n");
+ goto error;
+ }
+
+ if (H5Tclose(type_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(type_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+int
+H5_api_datatype_test(void)
+{
+ size_t i;
+ int nerrors;
+
+ HDprintf("**********************************************\n");
+ HDprintf("* *\n");
+ HDprintf("* API Datatype Tests *\n");
+ HDprintf("* *\n");
+ HDprintf("**********************************************\n\n");
+
+ for (i = 0, nerrors = 0; i < ARRAY_LENGTH(datatype_tests); i++) {
+ nerrors += (*datatype_tests[i])() ? 1 : 0;
+ }
+
+ HDprintf("\n");
+
+ return nerrors;
+}
diff --git a/test/API/H5_api_datatype_test.h b/test/API/H5_api_datatype_test.h
new file mode 100644
index 0000000..753f9b2
--- /dev/null
+++ b/test/API/H5_api_datatype_test.h
@@ -0,0 +1,79 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifndef H5_API_DATATYPE_TEST_H
+#define H5_API_DATATYPE_TEST_H
+
+#include "H5_api_test.h"
+
+int H5_api_datatype_test(void);
+
+/*************************************************
+ * *
+ * API Datatype test defines *
+ * *
+ *************************************************/
+
+#define DATATYPE_CREATE_TEST_DATASET_DIMS 2
+#define DATATYPE_CREATE_TEST_GROUP_NAME "committed_datatype_creation_test"
+#define DATATYPE_CREATE_TEST_TYPE_NAME "test_type"
+
+#define DATATYPE_CREATE_INVALID_PARAMS_TEST_SPACE_RANK 2
+#define DATATYPE_CREATE_INVALID_PARAMS_TEST_GROUP_NAME "committed_datatype_creation_invalid_params_test"
+#define DATATYPE_CREATE_INVALID_PARAMS_TEST_TYPE_NAME "committed_datatype_creation_invalid_params_datatype"
+
+#define DATATYPE_CREATE_ANONYMOUS_GROUP_NAME "anonymous_type_creation_test"
+#define DATATYPE_CREATE_ANONYMOUS_TYPE_NAME "anon_type"
+
+#define DATATYPE_CREATE_ANONYMOUS_INVALID_PARAMS_GROUP_NAME "anonymous_type_creation_invalid_params_test"
+
+#define DATATYPE_CREATE_EMPTY_TYPES_TEST_CMPD_TYPE_NAME "compound_type"
+#define DATATYPE_CREATE_EMPTY_TYPES_TEST_ENUM_TYPE_NAME "enum_type"
+#define DATATYPE_CREATE_EMPTY_TYPES_TEST_GROUP_NAME "committed_datatype_empty_types_test"
+
+#define RECOMMIT_COMMITTED_TYPE_TEST_GROUP_NAME "recommit_committed_type_test"
+
+#define DATATYPE_OPEN_TEST_GROUP_NAME "datatype_open_test"
+#define DATATYPE_OPEN_TEST_TYPE_NAME "open_test_datatype"
+
+#define DATATYPE_OPEN_INVALID_PARAMS_TEST_GROUP_NAME "datatype_open_invalid_params_test"
+#define DATATYPE_OPEN_INVALID_PARAMS_TEST_TYPE_NAME "open_invalid_params_test_datatype"
+
+#define DATATYPE_REOPEN_TEST_SPACE_RANK 2
+#define DATATYPE_REOPEN_TEST_GROUP_NAME "datatype_reopen_test"
+
+#define DATASET_CREATE_WITH_DATATYPE_TEST_DATASET_DIMS 2
+#define DATASET_CREATE_WITH_DATATYPE_TEST_GROUP_NAME "dataset_create_with_committed_type_test"
+#define DATASET_CREATE_WITH_DATATYPE_TEST_TYPE_NAME "committed_type_test_dtype1"
+#define DATASET_CREATE_WITH_DATATYPE_TEST_DSET_NAME "committed_type_test_dset"
+
+#define ATTRIBUTE_CREATE_WITH_DATATYPE_TEST_SPACE_RANK 2
+#define ATTRIBUTE_CREATE_WITH_DATATYPE_TEST_GROUP_NAME "attribute_create_with_committed_type_test"
+#define ATTRIBUTE_CREATE_WITH_DATATYPE_TEST_DTYPE_NAME "committed_type_test_dtype2"
+#define ATTRIBUTE_CREATE_WITH_DATATYPE_TEST_ATTR_NAME "committed_type_test_attr"
+
+#define DATATYPE_DELETE_TEST_GROUP_NAME "datatype_deletion_test"
+#define DATATYPE_DELETE_TEST_DTYPE_NAME "delete_test_dtype"
+
+#define DATATYPE_RESURRECT_TEST_GROUP_NAME "datatype_resurrection_test"
+#define DATATYPE_RESURRECT_TEST_DTYPE_NAME "delete_test_dtype"
+#define DATATYPE_RESURRECT_TEST_DTYPE_NAME2 "resurrected_dtype"
+
+#define DATATYPE_PROPERTY_LIST_TEST_SUBGROUP_NAME "datatype_property_list_test_group"
+#define DATATYPE_PROPERTY_LIST_TEST_DATATYPE_NAME1 "property_list_test_datatype1"
+#define DATATYPE_PROPERTY_LIST_TEST_DATATYPE_NAME2 "property_list_test_datatype2"
+
+#define PREDEFINED_TYPE_COMMIT_TEST_GROUP_NAME "predefined_type_commit_test"
+
+#define MODIFY_COMMITTED_TYPE_TEST_GROUP_NAME "modify_committed_type_test"
+
+#endif
diff --git a/test/API/H5_api_file_test.c b/test/API/H5_api_file_test.c
new file mode 100644
index 0000000..279e9e7
--- /dev/null
+++ b/test/API/H5_api_file_test.c
@@ -0,0 +1,2564 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include "H5_api_file_test.h"
+
+static int test_create_file(void);
+static int test_create_file_invalid_params(void);
+static int test_create_file_excl(void);
+static int test_open_file(void);
+static int test_open_file_invalid_params(void);
+static int test_open_nonexistent_file(void);
+static int test_file_open_overlap(void);
+static int test_file_permission(void);
+static int test_reopen_file(void);
+static int test_close_file_invalid_id(void);
+static int test_flush_file(void);
+static int test_file_is_accessible(void);
+static int test_file_property_lists(void);
+static int test_get_file_intent(void);
+static int test_get_file_obj_count(void);
+static int test_file_mounts(void);
+static int test_get_file_name(void);
+
+/*
+ * The array of file tests to be performed.
+ */
+static int (*file_tests[])(void) = {
+ test_create_file,
+ test_create_file_invalid_params,
+ test_create_file_excl,
+ test_open_file,
+ test_open_file_invalid_params,
+ test_open_nonexistent_file,
+ test_file_open_overlap,
+ test_file_permission,
+ test_reopen_file,
+ test_close_file_invalid_id,
+ test_flush_file,
+ test_file_is_accessible,
+ test_file_property_lists,
+ test_get_file_intent,
+ test_get_file_obj_count,
+ test_file_mounts,
+ test_get_file_name,
+};
+
+/*
+ * Tests that a file can be created.
+ */
+static int
+test_create_file(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ char *prefixed_filename = NULL;
+
+ TESTING("H5Fcreate");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file aren't supported with this connector\n");
+ return 0;
+ }
+
+ if (prefix_filename(test_path_prefix, FILE_CREATE_TEST_FILENAME, &prefixed_filename) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't prefix filename\n");
+ goto error;
+ }
+
+ if ((file_id = H5Fcreate(prefixed_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s'\n", prefixed_filename);
+ goto error;
+ }
+
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ HDfree(prefixed_filename);
+ prefixed_filename = NULL;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ HDfree(prefixed_filename);
+
+ return 1;
+}
+
+/*
+ * Tests that a file can't be created when H5Fcreate is passed
+ * invalid parameters.
+ */
+static int
+test_create_file_invalid_params(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ char *prefixed_filename = NULL;
+
+ TESTING_MULTIPART("H5Fcreate with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file aren't supported with this connector\n");
+ return 0;
+ }
+
+ if (prefix_filename(test_path_prefix, FILE_CREATE_INVALID_PARAMS_FILE_NAME, &prefixed_filename) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't prefix filename\n");
+ goto error;
+ }
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Fcreate_invalid_name)
+ {
+ TESTING_2("H5Fcreate with invalid file name");
+
+ H5E_BEGIN_TRY
+ {
+ file_id = H5Fcreate(NULL, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (file_id >= 0) {
+ H5_FAILED();
+ HDprintf(" file was created with a NULL name!\n");
+ H5Fclose(file_id);
+ PART_ERROR(H5Fcreate_invalid_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ file_id = H5Fcreate("", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (file_id >= 0) {
+ H5_FAILED();
+ HDprintf(" file was created with an invalid name of ''!\n");
+ H5Fclose(file_id);
+ PART_ERROR(H5Fcreate_invalid_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fcreate_invalid_name);
+
+ PART_BEGIN(H5Fcreate_invalid_flags)
+ {
+ TESTING_2("H5Fcreate with invalid flags");
+
+ H5E_BEGIN_TRY
+ {
+ file_id = H5Fcreate(prefixed_filename, H5F_ACC_RDWR, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (file_id >= 0) {
+ H5_FAILED();
+ HDprintf(" file was created with invalid flag H5F_ACC_RDWR!\n");
+ H5Fclose(file_id);
+ PART_ERROR(H5Fcreate_invalid_flags);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ file_id = H5Fcreate(prefixed_filename, H5F_ACC_CREAT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (file_id >= 0) {
+ H5_FAILED();
+ HDprintf(" file was created with invalid flag H5F_ACC_CREAT!\n");
+ H5Fclose(file_id);
+ PART_ERROR(H5Fcreate_invalid_flags);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ file_id = H5Fcreate(prefixed_filename, H5F_ACC_SWMR_READ, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (file_id >= 0) {
+ H5_FAILED();
+ HDprintf(" file was created with invalid flag H5F_ACC_SWMR_READ!\n");
+ H5Fclose(file_id);
+ PART_ERROR(H5Fcreate_invalid_flags);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fcreate_invalid_flags);
+
+ PART_BEGIN(H5Fcreate_invalid_fcpl)
+ {
+ TESTING_2("H5Fcreate with invalid FCPL");
+
+ H5E_BEGIN_TRY
+ {
+ file_id = H5Fcreate(prefixed_filename, H5F_ACC_TRUNC, H5I_INVALID_HID, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (file_id >= 0) {
+ H5_FAILED();
+ HDprintf(" file was created with invalid FCPL!\n");
+ H5Fclose(file_id);
+ PART_ERROR(H5Fcreate_invalid_fcpl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fcreate_invalid_fcpl);
+ }
+ END_MULTIPART;
+
+ HDfree(prefixed_filename);
+ prefixed_filename = NULL;
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ /* Attempt to remove the file if it ended up being created. */
+ H5Fdelete(prefixed_filename, H5P_DEFAULT);
+
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ HDfree(prefixed_filename);
+
+ return 1;
+}
+
+/*
+ * Tests that file creation will fail when a file is created
+ * using the H5F_ACC_EXCL flag while the file already exists.
+ */
+static int
+test_create_file_excl(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t file_id2 = H5I_INVALID_HID;
+ char *prefixed_filename = NULL;
+
+ TESTING("H5Fcreate with H5F_ACC_EXCL/H5F_ACC_TRUNC flag");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file aren't supported with this connector\n");
+ return 0;
+ }
+
+ if (prefix_filename(test_path_prefix, FILE_CREATE_EXCL_FILE_NAME, &prefixed_filename) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't prefix filename\n");
+ goto error;
+ }
+
+ if ((file_id = H5Fcreate(prefixed_filename, H5F_ACC_EXCL, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create first file\n");
+ goto error;
+ }
+
+ /* Close the file */
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ /* Try again with H5F_ACC_EXCL. This should fail because the file already
+ * exists on disk from the previous steps.
+ */
+ H5E_BEGIN_TRY
+ {
+ file_id = H5Fcreate(prefixed_filename, H5F_ACC_EXCL, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (file_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created already existing file using H5F_ACC_EXCL flag!\n");
+ goto error;
+ }
+
+ /* Test creating with H5F_ACC_TRUNC. This will truncate the existing file on disk. */
+ if ((file_id = H5Fcreate(prefixed_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't truncate the existing file\n");
+ goto error;
+ }
+
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ HDfree(prefixed_filename);
+ prefixed_filename = NULL;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(file_id);
+ H5Fclose(file_id2);
+ }
+ H5E_END_TRY;
+
+ HDfree(prefixed_filename);
+
+ return 1;
+}
+
+/*
+ * Tests that a file can be opened.
+ */
+static int
+test_open_file(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("H5Fopen");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file aren't supported with this connector\n");
+ return 0;
+ }
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Fopen_rdonly)
+ {
+ TESTING_2("H5Fopen in read-only mode");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" unable to open file '%s' in read-only mode\n", H5_api_test_filename);
+ PART_ERROR(H5Fopen_rdonly);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fopen_rdonly);
+
+ if (file_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+ file_id = H5I_INVALID_HID;
+ }
+
+ PART_BEGIN(H5Fopen_rdwrite)
+ {
+ TESTING_2("H5Fopen in read-write mode");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" unable to open file '%s' in read-write mode\n", H5_api_test_filename);
+ PART_ERROR(H5Fopen_rdwrite);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fopen_rdwrite);
+
+ if (file_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+ file_id = H5I_INVALID_HID;
+ }
+
+ /*
+ * XXX: SWMR open flags
+ */
+ }
+ END_MULTIPART;
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * Tests that a file can't be opened when H5Fopen is given
+ * invalid parameters.
+ */
+static int
+test_open_file_invalid_params(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("H5Fopen with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file aren't supported with this connector\n");
+ return 0;
+ }
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Fopen_invalid_name)
+ {
+ TESTING_2("H5Fopen with invalid file name");
+
+ H5E_BEGIN_TRY
+ {
+ file_id = H5Fopen(NULL, H5F_ACC_RDWR, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (file_id >= 0) {
+ H5_FAILED();
+ HDprintf(" file was opened with a NULL name!\n");
+ H5Fclose(file_id);
+ PART_ERROR(H5Fopen_invalid_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ file_id = H5Fopen("", H5F_ACC_RDWR, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (file_id >= 0) {
+ H5_FAILED();
+ HDprintf(" file was opened with an invalid name of ''!\n");
+ H5Fclose(file_id);
+ PART_ERROR(H5Fopen_invalid_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fopen_invalid_name);
+
+ PART_BEGIN(H5Fopen_invalid_flags)
+ {
+ TESTING_2("H5Fopen with invalid flags");
+
+ H5E_BEGIN_TRY
+ {
+ file_id = H5Fopen(H5_api_test_filename, H5F_ACC_TRUNC, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (file_id >= 0) {
+ H5_FAILED();
+ HDprintf(" file was opened with invalid flag H5F_ACC_TRUNC!\n");
+ H5Fclose(file_id);
+ PART_ERROR(H5Fopen_invalid_flags);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ file_id = H5Fopen(H5_api_test_filename, H5F_ACC_EXCL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (file_id >= 0) {
+ H5_FAILED();
+ HDprintf(" file was opened with invalid flag H5F_ACC_EXCL!\n");
+ H5Fclose(file_id);
+ PART_ERROR(H5Fopen_invalid_flags);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fopen_invalid_flags);
+ }
+ END_MULTIPART;
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to ensure that opening a file which doesn't exist will fail.
+ */
+static int
+test_open_nonexistent_file(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ char *prefixed_filename = NULL;
+
+ TESTING("for invalid opening of a non-existent file");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file aren't supported with this connector\n");
+ return 0;
+ }
+
+ if (prefix_filename(test_path_prefix, NONEXISTENT_FILENAME, &prefixed_filename) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't prefix filename\n");
+ goto error;
+ }
+
+ /* XXX: Make sure to first delete the file so we know for sure it doesn't exist */
+
+ H5E_BEGIN_TRY
+ {
+ file_id = H5Fopen(prefixed_filename, H5F_ACC_RDWR, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (file_id >= 0) {
+ H5_FAILED();
+ HDprintf(" non-existent file was opened!\n");
+ goto error;
+ }
+
+ HDfree(prefixed_filename);
+ prefixed_filename = NULL;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ HDfree(prefixed_filename);
+
+ return 1;
+}
+
+/*
+ * Tests that a file can be opened read-only or read-write
+ * and things are handled appropriately.
+ */
+static int
+test_file_permission(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dspace_id = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t dtype_id = H5I_INVALID_HID;
+ char *prefixed_filename = NULL;
+ herr_t h5_ret = FAIL;
+
+ TESTING_MULTIPART("file permissions (invalid creation of objects in read-only file)");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, attribute, stored datatype aren't "
+ "supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if (prefix_filename(test_path_prefix, FILE_PERMISSION_TEST_FILENAME, &prefixed_filename) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't prefix filename\n");
+ goto error;
+ }
+
+ if ((file_id = H5Fcreate(prefixed_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s'\n", prefixed_filename);
+ goto error;
+ }
+
+ if ((dspace_id = H5Screate(H5S_SCALAR)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create data space\n");
+ goto error;
+ }
+
+ if ((dset_id = H5Dcreate2(file_id, FILE_PERMISSION_TEST_DSET_NAME, H5T_STD_U32LE, dspace_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create data set: %s\n", FILE_PERMISSION_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ /* Open the file (with read-only permission) */
+ if ((file_id = H5Fopen(prefixed_filename, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Gcreate_rdonly_file)
+ {
+ TESTING_2("invalid creation of group in read-only file");
+
+ /* Create a group with the read-only file handle (should fail) */
+ H5E_BEGIN_TRY
+ {
+ group_id =
+ H5Gcreate2(file_id, FILE_PERMISSION_TEST_GRP_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (group_id >= 0) {
+ H5_FAILED();
+ HDprintf(" a group was created in a read-only file!\n");
+ PART_ERROR(H5Gcreate_rdonly_file);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ group_id = H5Gcreate_anon(file_id, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (group_id >= 0) {
+ H5_FAILED();
+ HDprintf(" a group was created in a read-only file!\n");
+ PART_ERROR(H5Gcreate_rdonly_file);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gcreate_rdonly_file);
+
+ PART_BEGIN(H5Dcreate_rdonly_file)
+ {
+ TESTING_2("invalid creation of dataset in read-only file");
+
+ /* Create a dataset with the read-only file handle (should fail) */
+ H5E_BEGIN_TRY
+ {
+ dset_id = H5Dcreate2(file_id, FILE_PERMISSION_TEST_DSET2_NAME, H5T_STD_U32LE, dspace_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (dset_id >= 0) {
+ H5_FAILED();
+ HDprintf(" a dataset was created in a read-only file!\n");
+ PART_ERROR(H5Dcreate_rdonly_file);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ dset_id = H5Dcreate_anon(file_id, H5T_STD_U32LE, dspace_id, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (dset_id >= 0) {
+ H5_FAILED();
+ HDprintf(" a dataset was created in a read-only file!\n");
+ PART_ERROR(H5Dcreate_rdonly_file);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dcreate_rdonly_file);
+
+ PART_BEGIN(H5Acreate_rdonly_file)
+ {
+ TESTING_2("invalid creation of attribute in read-only file");
+
+ /* Create an attribute with the read-only file handle (should fail) */
+ H5E_BEGIN_TRY
+ {
+ attr_id = H5Acreate2(file_id, FILE_PERMISSION_TEST_ATTR_NAME, H5T_NATIVE_INT, dspace_id,
+ H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (attr_id >= 0) {
+ H5_FAILED();
+ HDprintf(" an attribute was created in a read-only file!\n");
+ PART_ERROR(H5Acreate_rdonly_file);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Acreate_rdonly_file);
+
+ PART_BEGIN(H5Tcommit_rdonly_file)
+ {
+ TESTING_2("invalid creation of committed datatype in read-only file");
+
+ if ((dtype_id = H5Tcopy(H5T_NATIVE_INT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't copy a native datatype\n");
+ PART_ERROR(H5Tcommit_rdonly_file);
+ }
+
+ /* Commit a datatype with the read-only file handle (should fail) */
+ H5E_BEGIN_TRY
+ {
+ h5_ret = H5Tcommit2(file_id, FILE_PERMISSION_TEST_NAMED_DTYPE, dtype_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (h5_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" a named datatype was committed in a read-only file!\n");
+ PART_ERROR(H5Tcommit_rdonly_file);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ h5_ret = H5Tcommit_anon(file_id, dtype_id, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (h5_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" a named datatype was committed in a read-only file!\n");
+ PART_ERROR(H5Tcommit_rdonly_file);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Tcommit_rdonly_file);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Tclose(dtype_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(dspace_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ HDfree(prefixed_filename);
+ prefixed_filename = NULL;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(dspace_id);
+ H5Dclose(dset_id);
+ H5Aclose(attr_id);
+ H5Tclose(dtype_id);
+ H5Gclose(group_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ HDfree(prefixed_filename);
+
+ return 1;
+}
+
+/*
+ * A test to check that a file can be re-opened with H5Freopen.
+ */
+static int
+test_reopen_file(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t file_id2 = H5I_INVALID_HID;
+
+ TESTING("re-open of a file with H5Freopen");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file\n");
+ goto error;
+ }
+
+ if ((file_id2 = H5Freopen(file_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't re-open file\n");
+ goto error;
+ }
+
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id2) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(file_id);
+ H5Fclose(file_id2);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that H5Fclose doesn't succeed for an
+ * invalid file ID */
+static int
+test_close_file_invalid_id(void)
+{
+ herr_t err_ret = -1;
+
+ TESTING("H5Fclose with an invalid ID");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file aren't supported with this connector\n");
+ return 0;
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Fclose(H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" closed an invalid file ID!\n");
+ goto error;
+ }
+
+ PASSED();
+
+ return 0;
+
+error:
+ return 1;
+}
+
+/*
+ * A test to check that a file can be flushed using H5Fflush.
+ */
+static int
+test_flush_file(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t dspace_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ char *prefixed_filename = NULL;
+ char dset_name[32];
+ unsigned u;
+
+ TESTING_MULTIPART("H5Fflush");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, dataset, or file flush aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if (prefix_filename(test_path_prefix, FILE_FLUSH_TEST_FILENAME, &prefixed_filename) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't prefix filename\n");
+ goto error;
+ }
+
+ if ((file_id = H5Fcreate(prefixed_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s'\n", prefixed_filename);
+ goto error;
+ }
+
+ /* Create multiple small datasets in file */
+ if ((dspace_id = H5Screate(H5S_SCALAR)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create data space\n");
+ goto error;
+ }
+
+ for (u = 0; u < 10; u++) {
+ HDsprintf(dset_name, "Dataset %u", u);
+
+ if ((dset_id = H5Dcreate2(file_id, dset_name, H5T_STD_U32LE, dspace_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create data set: %s\n", dset_name);
+ goto error;
+ }
+
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Fflush_local)
+ {
+ TESTING_2("file flushing at local scope");
+
+ if (H5Fflush(file_id, H5F_SCOPE_LOCAL) < 0) {
+ H5_FAILED();
+ HDprintf(" unable to flush file with scope H5F_SCOPE_LOCAL\n");
+ PART_ERROR(H5Fflush_local);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fflush_local);
+
+ PART_BEGIN(H5Fflush_global)
+ {
+ TESTING_2("file flushing at global scope");
+
+ if (H5Fflush(file_id, H5F_SCOPE_GLOBAL) < 0) {
+ H5_FAILED();
+ HDprintf(" unable to flush file with scope H5F_SCOPE_GLOBAL\n");
+ PART_ERROR(H5Fflush_global);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fflush_global);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(dspace_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ HDfree(prefixed_filename);
+ prefixed_filename = NULL;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(dspace_id);
+ H5Dclose(dset_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ HDfree(prefixed_filename);
+
+ return 1;
+}
+
+/*
+ * A test for H5Fis_accessible.
+ */
+static int
+test_file_is_accessible(void)
+{
+ const char *const fake_filename = "nonexistent_file.h5";
+ char *prefixed_filename = NULL;
+ htri_t is_accessible = FAIL;
+
+ TESTING_MULTIPART("H5Fis_accessible");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file aren't supported with this connector\n");
+ return 0;
+ }
+
+ if (prefix_filename(test_path_prefix, fake_filename, &prefixed_filename) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't prefix filename\n");
+ goto error;
+ }
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Fis_accessible_valid_file)
+ {
+ TESTING_2("H5Fis_accessible on existing file");
+
+ if ((is_accessible = H5Fis_accessible(H5_api_test_filename, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if file '%s' is accessible with default FAPL\n",
+ H5_api_test_filename);
+ PART_ERROR(H5Fis_accessible_valid_file);
+ }
+
+ if (!is_accessible) {
+ H5_FAILED();
+ HDprintf(" file '%s' is not accessible with default FAPL\n", H5_api_test_filename);
+ PART_ERROR(H5Fis_accessible_valid_file);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fis_accessible_valid_file);
+
+ is_accessible = -1;
+
+ PART_BEGIN(H5Fis_accessible_invalid_file)
+ {
+ TESTING_2("H5Fis_accessible on non-existing file");
+
+ H5E_BEGIN_TRY
+ {
+ is_accessible = H5Fis_accessible(prefixed_filename, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (is_accessible > 0) {
+ H5_FAILED();
+ HDprintf(" non-existent file '%s' was accessible with default FAPL: is_accessible=%d!\n",
+ prefixed_filename, is_accessible);
+ PART_ERROR(H5Fis_accessible_invalid_file);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fis_accessible_invalid_file);
+ }
+ END_MULTIPART;
+
+ HDfree(prefixed_filename);
+ prefixed_filename = NULL;
+
+ return 0;
+
+error:
+ HDfree(prefixed_filename);
+
+ return 1;
+}
+
+/*
+ * A test to check that a FCPL used for file creation can
+ * be persisted and that a valid copy of that FCPL can be
+ * retrieved later with a call to H5Fget_create_plist. Also
+ * tests that a valid copy of a FAPL used for file access
+ * can be retrieved with a call to H5Fget_access_plist.
+ */
+static int
+test_file_property_lists(void)
+{
+ hsize_t prop_val = 0;
+ hid_t file_id1 = H5I_INVALID_HID;
+ hid_t file_id2 = H5I_INVALID_HID;
+ hid_t fcpl_id1 = H5I_INVALID_HID;
+ hid_t fcpl_id2 = H5I_INVALID_HID;
+ hid_t fapl_id1 = H5I_INVALID_HID;
+ hid_t fapl_id2 = H5I_INVALID_HID;
+ char *prefixed_filename1 = NULL;
+ char *prefixed_filename2 = NULL;
+
+ TESTING_MULTIPART("file property list operations");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_GET_PLIST)) {
+ SKIPPED();
+ HDprintf(" API functions for basic or more file or get property list aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if (prefix_filename(test_path_prefix, FILE_PROPERTY_LIST_TEST_FNAME1, &prefixed_filename1) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't prefix filename\n");
+ goto error;
+ }
+ if (prefix_filename(test_path_prefix, FILE_PROPERTY_LIST_TEST_FNAME2, &prefixed_filename2) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't prefix filename\n");
+ goto error;
+ }
+
+ if ((fcpl_id1 = H5Pcreate(H5P_FILE_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create FCPL\n");
+ goto error;
+ }
+
+ if (H5Pset_userblock(fcpl_id1, FILE_PROPERTY_LIST_TEST_FCPL_PROP_VAL) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set test property on FCPL\n");
+ goto error;
+ }
+
+ if ((file_id1 = H5Fcreate(prefixed_filename1, H5F_ACC_TRUNC, fcpl_id1, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file\n");
+ goto error;
+ }
+
+ if ((file_id2 = H5Fcreate(prefixed_filename2, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file\n");
+ goto error;
+ }
+
+ if (H5Pclose(fcpl_id1) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Fget_create_plist)
+ {
+ TESTING_2("H5Fget_create_plist");
+
+ /* Try to receive copies of the two property lists, one which has the property set and one which
+ * does not */
+ if ((fcpl_id1 = H5Fget_create_plist(file_id1)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get FCPL\n");
+ PART_ERROR(H5Fget_create_plist);
+ }
+
+ if ((fcpl_id2 = H5Fget_create_plist(file_id2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get FCPL\n");
+ PART_ERROR(H5Fget_create_plist);
+ }
+
+ /* Ensure that property list 1 has the property set and property list 2 does not */
+ if (H5Pget_userblock(fcpl_id1, &prop_val) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve test property from FCPL\n");
+ PART_ERROR(H5Fget_create_plist);
+ }
+
+ if (prop_val != FILE_PROPERTY_LIST_TEST_FCPL_PROP_VAL) {
+ H5_FAILED();
+ HDprintf(" retrieved test property value '%llu' did not match expected value '%llu'\n",
+ (long long unsigned)prop_val,
+ (long long unsigned)FILE_PROPERTY_LIST_TEST_FCPL_PROP_VAL);
+ PART_ERROR(H5Fget_create_plist);
+ }
+
+ if (H5Pget_userblock(fcpl_id2, &prop_val) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve test property from FCPL\n");
+ PART_ERROR(H5Fget_create_plist);
+ }
+
+ if (prop_val == FILE_PROPERTY_LIST_TEST_FCPL_PROP_VAL) {
+ HDprintf(" retrieved test property value '%llu' matched control value '%llu' when it "
+ "shouldn't have\n",
+ (long long unsigned)prop_val,
+ (long long unsigned)FILE_PROPERTY_LIST_TEST_FCPL_PROP_VAL);
+ PART_ERROR(H5Fget_create_plist);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fget_create_plist);
+
+ PART_BEGIN(H5Fget_access_plist)
+ {
+ TESTING_2("H5Fget_access_plist");
+
+ /* Due to the nature of needing to supply a FAPL with the VOL connector having been set on it to
+ * the H5Fcreate() call, we cannot exactly test using H5P_DEFAULT as the FAPL for one of the
+ * create calls in this test. However, the use of H5Fget_access_plist() will still be used to
+ * check that the FAPL is correct after both creating and opening a file.
+ */
+ if ((fapl_id1 = H5Fget_access_plist(file_id1)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get FAPL\n");
+ PART_ERROR(H5Fget_access_plist);
+ }
+
+ if ((fapl_id2 = H5Fget_access_plist(file_id2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get FAPL\n");
+ PART_ERROR(H5Fget_access_plist);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fget_access_plist);
+
+ /* Now see if we can still retrieve copies of the property lists upon opening
+ * (instead of creating) a file. If they were reconstructed properly upon file
+ * open, the creation property lists should also have the same test values
+ * as set before.
+ */
+ if (fcpl_id1 >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(fcpl_id1);
+ }
+ H5E_END_TRY;
+ fcpl_id1 = H5I_INVALID_HID;
+ }
+ if (fcpl_id2 >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(fcpl_id2);
+ }
+ H5E_END_TRY;
+ fcpl_id2 = H5I_INVALID_HID;
+ }
+ if (fapl_id1 >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(fapl_id1);
+ }
+ H5E_END_TRY;
+ fapl_id1 = H5I_INVALID_HID;
+ }
+ if (fapl_id2 >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(fapl_id2);
+ }
+ H5E_END_TRY;
+ fapl_id2 = H5I_INVALID_HID;
+ }
+ if (file_id1 >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(file_id1);
+ }
+ H5E_END_TRY;
+ file_id1 = H5I_INVALID_HID;
+ }
+ if (file_id2 >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(file_id2);
+ }
+ H5E_END_TRY;
+ file_id2 = H5I_INVALID_HID;
+ }
+
+ PART_BEGIN(H5Fget_create_plist_reopened)
+ {
+ TESTING_2("H5Fget_create_plist after re-opening file");
+
+ if ((file_id1 = H5Fopen(prefixed_filename1, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file\n");
+ PART_ERROR(H5Fget_create_plist_reopened);
+ }
+
+ if ((file_id2 = H5Fopen(prefixed_filename2, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file\n");
+ PART_ERROR(H5Fget_create_plist_reopened);
+ }
+
+ if ((fcpl_id1 = H5Fget_create_plist(file_id1)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get FCPL\n");
+ PART_ERROR(H5Fget_create_plist_reopened);
+ }
+
+ if ((fcpl_id2 = H5Fget_create_plist(file_id2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get FCPL\n");
+ PART_ERROR(H5Fget_create_plist_reopened);
+ }
+
+ /* Check the values of the test property */
+ if (H5Pget_userblock(fcpl_id1, &prop_val) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve test property from FCPL\n");
+ PART_ERROR(H5Fget_create_plist_reopened);
+ }
+
+ if (prop_val != FILE_PROPERTY_LIST_TEST_FCPL_PROP_VAL) {
+ H5_FAILED();
+ HDprintf(" retrieved test property value '%llu' did not match expected value '%llu'\n",
+ (long long unsigned)prop_val,
+ (long long unsigned)FILE_PROPERTY_LIST_TEST_FCPL_PROP_VAL);
+ PART_ERROR(H5Fget_create_plist_reopened);
+ }
+
+ if (H5Pget_userblock(fcpl_id2, &prop_val) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve test property from FCPL\n");
+ PART_ERROR(H5Fget_create_plist_reopened);
+ }
+
+ if (prop_val == FILE_PROPERTY_LIST_TEST_FCPL_PROP_VAL) {
+ HDprintf(" retrieved test property value '%llu' matched control value '%llu' when it "
+ "shouldn't have\n",
+ (long long unsigned)prop_val,
+ (long long unsigned)FILE_PROPERTY_LIST_TEST_FCPL_PROP_VAL);
+ PART_ERROR(H5Fget_create_plist_reopened);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fget_create_plist_reopened);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Pclose(fcpl_id1) < 0)
+ TEST_ERROR;
+ if (H5Pclose(fcpl_id2) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id1) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id2) < 0)
+ TEST_ERROR;
+
+ HDfree(prefixed_filename1);
+ prefixed_filename1 = NULL;
+ HDfree(prefixed_filename2);
+ prefixed_filename2 = NULL;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(fcpl_id1);
+ H5Pclose(fcpl_id2);
+ H5Pclose(fapl_id1);
+ H5Pclose(fapl_id2);
+ H5Fclose(file_id1);
+ H5Fclose(file_id2);
+ }
+ H5E_END_TRY;
+
+ HDfree(prefixed_filename1);
+ HDfree(prefixed_filename2);
+
+ return 1;
+}
+
+/*
+ * A test to check that the file intent flags can be retrieved.
+ */
+static int
+test_get_file_intent(void)
+{
+ unsigned file_intent;
+ hid_t file_id = H5I_INVALID_HID;
+ char *prefixed_filename = NULL;
+
+ TESTING_MULTIPART("retrieval of file intent with H5Fget_intent");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_MORE)) {
+ SKIPPED();
+ HDprintf(" API functions for basic or more file aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if (prefix_filename(test_path_prefix, FILE_INTENT_TEST_FILENAME, &prefixed_filename) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't prefix filename\n");
+ goto error;
+ }
+
+ /* Test that file intent retrieval works correctly for file create */
+ if ((file_id = H5Fcreate(prefixed_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s'\n", prefixed_filename);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Fget_intent_file_creation)
+ {
+ TESTING_2("H5Fget_intent on newly-created file");
+
+ if (H5Fget_intent(file_id, &file_intent) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve file intent\n");
+ PART_ERROR(H5Fget_intent_file_creation);
+ }
+
+ if (H5F_ACC_RDWR != file_intent) {
+ H5_FAILED();
+ HDprintf(" received incorrect file intent for file creation\n");
+ PART_ERROR(H5Fget_intent_file_creation);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fget_intent_file_creation);
+
+ if (file_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+ file_id = H5I_INVALID_HID;
+ }
+
+ PART_BEGIN(H5Fget_intent_rdonly_file_open)
+ {
+ TESTING_2("H5Fget_intent for file opened read-only");
+
+ /* Test that file intent retrieval works correctly for file open */
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ PART_ERROR(H5Fget_intent_rdonly_file_open);
+ }
+
+ if (H5Fget_intent(file_id, &file_intent) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve file intent\n");
+ PART_ERROR(H5Fget_intent_rdonly_file_open);
+ }
+
+ if (H5F_ACC_RDONLY != file_intent) {
+ H5_FAILED();
+ HDprintf(" received incorrect file intent for read-only file open\n");
+ PART_ERROR(H5Fget_intent_rdonly_file_open);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fget_intent_rdonly_file_open);
+
+ if (file_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+ file_id = H5I_INVALID_HID;
+ }
+
+ PART_BEGIN(H5Fget_intent_rdwrite_file_open)
+ {
+ TESTING_2("H5Fget_intent for file opened read-write");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ PART_ERROR(H5Fget_intent_rdwrite_file_open);
+ }
+
+ if (H5Fget_intent(file_id, &file_intent) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve file intent\n");
+ PART_ERROR(H5Fget_intent_rdwrite_file_open);
+ }
+
+ if (H5F_ACC_RDWR != file_intent) {
+ H5_FAILED();
+ HDprintf(" received incorrect file intent\n");
+ PART_ERROR(H5Fget_intent_rdwrite_file_open);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fget_intent_rdwrite_file_open);
+
+ if (file_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+ file_id = H5I_INVALID_HID;
+ }
+ }
+ END_MULTIPART;
+
+ HDfree(prefixed_filename);
+ prefixed_filename = NULL;
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ HDfree(prefixed_filename);
+
+ return 1;
+}
+
+/*
+ * A test to check that the number of open objects and IDs of objects in a file
+ * can be retrieved.
+ */
+static int
+test_get_file_obj_count(void)
+{
+ ssize_t obj_count;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t file_id2 = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t object_id = H5I_INVALID_HID;
+ hid_t named_dtype_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t dspace_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ char *prefixed_filename1 = NULL;
+ char *prefixed_filename2 = NULL;
+
+ TESTING_MULTIPART("retrieval of open object number and IDs");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic or more file, basic dataset, group, datatype, or attribute "
+ "aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if (prefix_filename(test_path_prefix, GET_OBJ_COUNT_TEST_FILENAME1, &prefixed_filename1) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't prefix filename\n");
+ goto error;
+ }
+ if (prefix_filename(test_path_prefix, GET_OBJ_COUNT_TEST_FILENAME2, &prefixed_filename2) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't prefix filename\n");
+ goto error;
+ }
+
+ if ((file_id = H5Fcreate(prefixed_filename1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s'\n", prefixed_filename1);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(file_id, GET_OBJ_COUNT_TEST_GRP_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", GET_OBJ_COUNT_TEST_GRP_NAME);
+ goto error;
+ }
+
+ /* Create a second file while keeping the first file open */
+ if ((file_id2 = H5Fcreate(prefixed_filename2, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s'\n", prefixed_filename2);
+ goto error;
+ }
+
+ /* Create a named datatype */
+ if ((named_dtype_id = H5Tcopy(H5T_NATIVE_INT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't copy a native datatype\n");
+ goto error;
+ }
+
+ if (H5Tcommit2(file_id2, GET_OBJ_COUNT_TEST_NAMED_DTYPE, named_dtype_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't commit a named datatype\n");
+ goto error;
+ }
+
+ /* Create a dataspace for the attribute and dataset */
+ if ((dspace_id = H5Screate(H5S_SCALAR)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create data space for attribute\n");
+ goto error;
+ }
+
+ /* Create an attribute for the second file */
+ if ((attr_id = H5Acreate2(file_id2, GET_OBJ_COUNT_TEST_ATTR_NAME, H5T_NATIVE_INT, dspace_id, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create the attribute '%s'\n", GET_OBJ_COUNT_TEST_ATTR_NAME);
+ goto error;
+ }
+
+ /* Create a dataset for the second file */
+ if ((dset_id = H5Dcreate2(file_id2, GET_OBJ_COUNT_TEST_DSET_NAME, H5T_NATIVE_INT, dspace_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create the dataset '%s'\n", GET_OBJ_COUNT_TEST_DSET_NAME);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Fget_obj_count_files)
+ {
+ TESTING_2("H5Fget_obj_count for files");
+
+ /* Get the number of files currently opened */
+ if ((obj_count = H5Fget_obj_count((hid_t)H5F_OBJ_ALL, H5F_OBJ_FILE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get the number of open files\n");
+ PART_ERROR(H5Fget_obj_count_files);
+ }
+
+ if (obj_count != 2) {
+ H5_FAILED();
+ HDprintf(" number of open files (%ld) did not match expected number (2)\n", obj_count);
+ PART_ERROR(H5Fget_obj_count_files);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fget_obj_count_files);
+
+ PART_BEGIN(H5Fget_obj_count_grps_single_file)
+ {
+ TESTING_2("H5Fget_obj_count for groups in single file");
+
+ /* Get the number of groups */
+ if ((obj_count = H5Fget_obj_count(file_id, H5F_OBJ_GROUP)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve number of open groups\n");
+ PART_ERROR(H5Fget_obj_count_grps_single_file);
+ }
+
+ if (obj_count != 1) {
+ H5_FAILED();
+ HDprintf(" number of open groups (%ld) did not match expected number (1)\n", obj_count);
+ PART_ERROR(H5Fget_obj_count_grps_single_file);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fget_obj_count_grps_single_file);
+
+ PART_BEGIN(H5Fget_obj_count_grps)
+ {
+ TESTING_2("H5Fget_obj_count for groups");
+
+ /* Get the number of groups in two opened files */
+ if ((obj_count = H5Fget_obj_count((hid_t)H5F_OBJ_ALL, H5F_OBJ_GROUP)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get the number of open groups\n");
+ PART_ERROR(H5Fget_obj_count_grps);
+ }
+
+ if (obj_count != 1) {
+ H5_FAILED();
+ HDprintf(" number of open groups (%ld) did not match expected number (1)\n", obj_count);
+ PART_ERROR(H5Fget_obj_count_grps);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fget_obj_count_grps);
+
+ PART_BEGIN(H5Fget_obj_count_types)
+ {
+ TESTING_2("H5Fget_obj_count for datatypes");
+#ifndef WRONG_DATATYPE_OBJ_COUNT
+ /* Get the number of named datatype in two opened files */
+ if ((obj_count = H5Fget_obj_count((hid_t)H5F_OBJ_ALL, H5F_OBJ_DATATYPE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get the number of open named datatypes\n");
+ PART_ERROR(H5Fget_obj_count_types);
+ }
+
+ if (obj_count != 1) {
+ H5_FAILED();
+ HDprintf(" number of open named datatypes (%ld) did not match expected number (1)\n",
+ obj_count);
+ PART_ERROR(H5Fget_obj_count_types);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Fget_obj_count_types);
+#endif
+ }
+ PART_END(H5Fget_obj_count_types);
+
+ PART_BEGIN(H5Fget_obj_count_attrs)
+ {
+ TESTING_2("H5Fget_obj_count for attributes");
+
+ /* Get the number of attribute in two opened files */
+ if ((obj_count = H5Fget_obj_count((hid_t)H5F_OBJ_ALL, H5F_OBJ_ATTR)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get the number of open attributes\n");
+ PART_ERROR(H5Fget_obj_count_attrs);
+ }
+
+ if (obj_count != 1) {
+ H5_FAILED();
+ HDprintf(" number of open attributes (%ld) did not match expected number (1)\n",
+ obj_count);
+ PART_ERROR(H5Fget_obj_count_attrs);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fget_obj_count_attrs);
+
+ PART_BEGIN(H5Fget_obj_count_dsets)
+ {
+ TESTING_2("H5Fget_obj_count for datasets");
+
+ /* Get the number of dataset in two opened files */
+ if ((obj_count = H5Fget_obj_count((hid_t)H5F_OBJ_ALL, H5F_OBJ_DATASET)) < 0 || obj_count != 1) {
+ H5_FAILED();
+ HDprintf(" couldn't get the number of open datasets\n");
+ PART_ERROR(H5Fget_obj_count_dsets);
+ }
+
+ if (obj_count != 1) {
+ H5_FAILED();
+ HDprintf(" number of open datasets (%ld) did not match expected number (1)\n", obj_count);
+ PART_ERROR(H5Fget_obj_count_dsets);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fget_obj_count_dsets);
+
+ PART_BEGIN(H5Fget_obj_count_all_single_file)
+ {
+ TESTING_2("H5Fget_obj_count for all object types in single file");
+
+ /* Get the number of all open objects */
+ if ((obj_count = H5Fget_obj_count(file_id, H5F_OBJ_ALL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve number of open objects\n");
+ PART_ERROR(H5Fget_obj_count_all_single_file);
+ }
+
+ /* One for the file and another for the group */
+ if (obj_count != 2) {
+ H5_FAILED();
+ HDprintf(" number of open objects (%ld) did not match expected number (2)\n", obj_count);
+ PART_ERROR(H5Fget_obj_count_all_single_file);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fget_obj_count_all_single_file);
+
+ PART_BEGIN(H5Fget_obj_count_all)
+ {
+ TESTING_2("H5Fget_obj_count for all object types");
+#ifndef WRONG_DATATYPE_OBJ_COUNT
+ /* Get the number of all open objects */
+ if ((obj_count = H5Fget_obj_count(H5F_OBJ_ALL, H5F_OBJ_ALL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve number of open objects\n");
+ PART_ERROR(H5Fget_obj_count_all);
+ }
+
+ if (obj_count != 6) {
+ H5_FAILED();
+ HDprintf(" number of open objects (%ld) did not match expected number (6)\n", obj_count);
+ PART_ERROR(H5Fget_obj_count_all);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Fget_obj_count_all);
+#endif
+ }
+ PART_END(H5Fget_obj_count_all);
+
+ PART_BEGIN(H5Fget_obj_ids_singular_grp)
+ {
+ TESTING_2("H5Fget_obj_ids for a singular group");
+
+ if (H5Fget_obj_ids(file_id, H5F_OBJ_GROUP, (size_t)obj_count, &object_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get opened group IDs\n");
+ PART_ERROR(H5Fget_obj_ids_singular_grp);
+ }
+
+ if (object_id != group_id) {
+ H5_FAILED();
+ HDprintf(" opened object ID (%ld) did not match only currently open group ID (%ld)\n",
+ object_id, group_id);
+ PART_ERROR(H5Fget_obj_ids_singular_grp);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fget_obj_ids_singular_grp);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(dspace_id) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(named_dtype_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id2) < 0)
+ TEST_ERROR;
+
+ HDfree(prefixed_filename1);
+ prefixed_filename1 = NULL;
+ HDfree(prefixed_filename2);
+ prefixed_filename2 = NULL;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id);
+ H5Tclose(named_dtype_id);
+ H5Sclose(dspace_id);
+ H5Aclose(attr_id);
+ H5Dclose(dset_id);
+ H5Fclose(file_id);
+ H5Fclose(file_id2);
+ }
+ H5E_END_TRY;
+
+ HDfree(prefixed_filename1);
+ HDfree(prefixed_filename2);
+
+ return 1;
+}
+
+/*
+ * A test to check that opening files in an overlapping way
+ * works correctly.
+ */
+static int
+test_file_open_overlap(void)
+{
+#ifndef NO_DOUBLE_OBJECT_OPENS
+ ssize_t obj_count;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t file_id2 = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t dspace_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ char *prefixed_filename = NULL;
+#endif
+
+ TESTING("overlapping file opens");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic or more file, dataset, or group aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+#ifndef NO_DOUBLE_OBJECT_OPENS
+ if (prefix_filename(test_path_prefix, OVERLAPPING_FILENAME, &prefixed_filename) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't prefix filename\n");
+ goto error;
+ }
+
+ if ((file_id = H5Fcreate(prefixed_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s'\n", prefixed_filename);
+ goto error;
+ }
+
+ if ((file_id2 = H5Fopen(prefixed_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", prefixed_filename);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(file_id, OVERLAPPING_OPEN_TEST_GRP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", OVERLAPPING_OPEN_TEST_GRP_NAME);
+ goto error;
+ }
+
+ /* Create a dataspace for the dataset */
+ if ((dspace_id = H5Screate(H5S_SCALAR)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create data space for dataset\n");
+ goto error;
+ }
+
+ /* Create a dataset in the group of the first file */
+ if ((dset_id = H5Dcreate2(group_id, OVERLAPPING_OPEN_TEST_DSET_NAME, H5T_NATIVE_INT, dspace_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create the dataset '%s'\n", OVERLAPPING_OPEN_TEST_DSET_NAME);
+ goto error;
+ }
+
+ /* Get the number of objects opened in the first file: 3 == file + dataset + group */
+ if ((obj_count = H5Fget_obj_count(file_id, H5F_OBJ_LOCAL | H5F_OBJ_ALL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve the number of objects opened in the file\n");
+ goto error;
+ }
+
+ if (obj_count != 3) {
+ H5_FAILED();
+ HDprintf(" number of objects opened in file (%ld) did not match expected number (3)\n", obj_count);
+ goto error;
+ }
+
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ /* Create a dataset in the second file */
+ if ((dset_id = H5Dcreate2(file_id2, OVERLAPPING_OPEN_TEST_DSET_NAME, H5T_NATIVE_INT, dspace_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create the dataset '%s'\n", OVERLAPPING_OPEN_TEST_DSET_NAME);
+ goto error;
+ }
+
+ /* Get the number of objects opened in the first file: 2 == file + dataset */
+ if ((obj_count = H5Fget_obj_count(file_id2, H5F_OBJ_ALL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve the number of objects opened in the file\n");
+ goto error;
+ }
+
+ if (obj_count != 2) {
+ H5_FAILED();
+ HDprintf(" number of objects opened in the file (%ld) did not match expected number (2)\n",
+ obj_count);
+ goto error;
+ }
+
+ if (H5Sclose(dspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id2) < 0)
+ TEST_ERROR;
+
+ HDfree(prefixed_filename);
+ prefixed_filename = NULL;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id);
+ H5Sclose(dspace_id);
+ H5Dclose(dset_id);
+ H5Fclose(file_id);
+ H5Fclose(file_id2);
+ }
+ H5E_END_TRY;
+
+ HDfree(prefixed_filename);
+
+ return 1;
+#else
+ SKIPPED();
+ return 0;
+#endif
+}
+
+/*
+ * A test to check that file mounting and unmounting works
+ * correctly.
+ */
+static int
+test_file_mounts(void)
+{
+#ifndef NO_FILE_MOUNTS
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t child_fid = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ char *prefixed_filename = NULL;
+#endif
+
+ TESTING("file mounting/unmounting");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_MOUNT) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, file mount, or basic group aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+#ifndef NO_FILE_MOUNTS
+ if (prefix_filename(test_path_prefix, FILE_MOUNT_TEST_FILENAME, &prefixed_filename) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't prefix filename\n");
+ goto error;
+ }
+
+ if ((file_id = H5Fcreate(prefixed_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s'\n", prefixed_filename);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(file_id, FILE_MOUNT_TEST_GRP_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", FILE_MOUNT_TEST_GRP_NAME);
+ goto error;
+ }
+
+ if ((child_fid = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ /* Mount one file (child_fid) to the group of another file (file_id) */
+ if (H5Fmount(file_id, FILE_MOUNT_TEST_GRP_NAME, child_fid, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't mount file\n");
+ goto error;
+ }
+
+ if (H5Funmount(file_id, FILE_MOUNT_TEST_GRP_NAME) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't mount file\n");
+ goto error;
+ }
+
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(child_fid) < 0)
+ TEST_ERROR;
+
+ HDfree(prefixed_filename);
+ prefixed_filename = NULL;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id);
+ H5Fclose(file_id);
+ H5Fclose(child_fid);
+ }
+ H5E_END_TRY;
+
+ HDfree(prefixed_filename);
+
+ return 1;
+#else
+ SKIPPED();
+ return 0;
+#endif
+}
+
+/*
+ * A test to ensure that a file's name can be retrieved.
+ */
+static int
+test_get_file_name(void)
+{
+ ssize_t file_name_buf_len = 0;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dspace_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t named_dtype_id = H5I_INVALID_HID;
+ char *prefixed_filename = NULL;
+ char *file_name_buf = NULL;
+
+ TESTING_MULTIPART("retrieval of file name");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic or more file, basic dataset, group, datatype, or attribute "
+ "aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if (prefix_filename(test_path_prefix, GET_FILE_NAME_TEST_FNAME, &prefixed_filename) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't prefix filename\n");
+ goto error;
+ }
+
+ if ((file_id = H5Fcreate(prefixed_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s'\n", prefixed_filename);
+ goto error;
+ }
+
+ /* Retrieve the size of the file name */
+ if ((file_name_buf_len = H5Fget_name(file_id, NULL, 0)) < 0)
+ TEST_ERROR;
+
+ /* Allocate buffer for file name */
+ if (NULL == (file_name_buf = (char *)HDmalloc((size_t)file_name_buf_len + 1)))
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Fget_name_file_id)
+ {
+ TESTING_2("H5Fget_name using file ID");
+
+ memset(file_name_buf, 0, (size_t)file_name_buf_len);
+
+ /* Retrieve the actual file name */
+ if (H5Fget_name(file_id, file_name_buf, (size_t)file_name_buf_len + 1) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get file name %s\n", prefixed_filename);
+ PART_ERROR(H5Fget_name_file_id);
+ }
+
+ if (HDstrncmp(file_name_buf, prefixed_filename, (size_t)file_name_buf_len)) {
+ H5_FAILED();
+ HDprintf(" file name '%s' didn't match expected name '%s'\n", file_name_buf,
+ prefixed_filename);
+ PART_ERROR(H5Fget_name_file_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fget_name_file_id);
+
+ PART_BEGIN(H5Fget_name_grp_id)
+ {
+ TESTING_2("H5Fget_name using non-root group ID");
+
+ /* Attempt to retrieve the name of the file from an object that isn't the root group */
+ memset(file_name_buf, 0, (size_t)file_name_buf_len);
+
+ if ((group_id = H5Gcreate2(file_id, GET_FILE_NAME_TEST_GRP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create group '%s'\n", GET_FILE_NAME_TEST_GRP_NAME);
+ PART_ERROR(H5Fget_name_grp_id);
+ }
+
+ if (H5Fget_name(group_id, file_name_buf, (size_t)file_name_buf_len + 1) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get file name %s\n", prefixed_filename);
+ PART_ERROR(H5Fget_name_grp_id);
+ }
+
+ if (HDstrncmp(file_name_buf, prefixed_filename, (size_t)file_name_buf_len)) {
+ H5_FAILED();
+ HDprintf(" file name '%s' didn't match expected name '%s'\n", file_name_buf,
+ prefixed_filename);
+ PART_ERROR(H5Fget_name_grp_id);
+ }
+
+ if (group_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id);
+ }
+ H5E_END_TRY;
+ group_id = H5I_INVALID_HID;
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fget_name_grp_id);
+
+ PART_BEGIN(H5Fget_name_dset_id)
+ {
+ TESTING_2("H5Fget_name using dataset ID");
+
+ if ((dspace_id = H5Screate(H5S_SCALAR)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataspace\n");
+ PART_ERROR(H5Fget_name_dset_id);
+ }
+
+ /* Create a dataset in the file */
+ if ((dset_id = H5Dcreate2(file_id, GET_FILE_NAME_TEST_DSET_NAME, H5T_NATIVE_INT, dspace_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create the dataset '%s'\n", GET_FILE_NAME_TEST_DSET_NAME);
+ PART_ERROR(H5Fget_name_dset_id);
+ }
+
+ /* Get and verify file name from the dataset */
+ if (H5Fget_name(dset_id, file_name_buf, (size_t)file_name_buf_len + 1) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get file name %s\n", prefixed_filename);
+ PART_ERROR(H5Fget_name_dset_id);
+ }
+
+ if (HDstrncmp(file_name_buf, prefixed_filename, (size_t)file_name_buf_len)) {
+ H5_FAILED();
+ HDprintf(" file name '%s' didn't match expected name '%s'\n", file_name_buf,
+ prefixed_filename);
+ PART_ERROR(H5Fget_name_dset_id);
+ }
+
+ if (dspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(dspace_id);
+ }
+ H5E_END_TRY;
+ dspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fget_name_dset_id);
+
+ PART_BEGIN(H5Fget_name_attr_id)
+ {
+ TESTING_2("H5Fget_name using attribute ID");
+
+ if ((dspace_id = H5Screate(H5S_SCALAR)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataspace\n");
+ PART_ERROR(H5Fget_name_attr_id);
+ }
+
+ /* Create an attribute for the dataset */
+ if ((attr_id = H5Acreate2(file_id, GET_FILE_NAME_TEST_ATTR_NAME, H5T_NATIVE_INT, dspace_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create the attribute '%s'\n", GET_FILE_NAME_TEST_ATTR_NAME);
+ PART_ERROR(H5Fget_name_attr_id);
+ }
+
+ /* Get and verify file name from the attribute */
+ if (H5Fget_name(attr_id, file_name_buf, (size_t)file_name_buf_len + 1) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get file name %s\n", prefixed_filename);
+ PART_ERROR(H5Fget_name_attr_id);
+ }
+
+ if (HDstrncmp(file_name_buf, prefixed_filename, (size_t)file_name_buf_len)) {
+ H5_FAILED();
+ HDprintf(" file name '%s' didn't match expected name '%s'\n", file_name_buf,
+ prefixed_filename);
+ PART_ERROR(H5Fget_name_attr_id);
+ }
+
+ if (dspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(dspace_id);
+ }
+ H5E_END_TRY;
+ dspace_id = H5I_INVALID_HID;
+ }
+ if (attr_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Aclose(attr_id);
+ }
+ H5E_END_TRY;
+ attr_id = H5I_INVALID_HID;
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fget_name_attr_id);
+
+ PART_BEGIN(H5Fget_name_dtype_id)
+ {
+ TESTING_2("H5Fget_name using committed datatype ID");
+
+ /* Create a named datatype */
+ if ((named_dtype_id = H5Tcopy(H5T_NATIVE_INT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't copy a native datatype\n");
+ PART_ERROR(H5Fget_name_dtype_id);
+ }
+
+ if (H5Tcommit2(file_id, GET_FILE_NAME_TEST_NAMED_DTYPE, named_dtype_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't commit a named datatype\n");
+ PART_ERROR(H5Fget_name_dtype_id);
+ }
+
+ /* Get and verify file name from the committed datatype */
+ if (H5Fget_name(named_dtype_id, file_name_buf, (size_t)file_name_buf_len + 1) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get file name %s\n", prefixed_filename);
+ PART_ERROR(H5Fget_name_dtype_id);
+ }
+
+ if (HDstrncmp(file_name_buf, prefixed_filename, (size_t)file_name_buf_len)) {
+ H5_FAILED();
+ HDprintf(" file name '%s' didn't match expected name '%s'\n", file_name_buf,
+ prefixed_filename);
+ PART_ERROR(H5Fget_name_dtype_id);
+ }
+
+ if (named_dtype_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(named_dtype_id);
+ }
+ H5E_END_TRY;
+ named_dtype_id = H5I_INVALID_HID;
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fget_name_dtype_id);
+
+ PART_BEGIN(H5Fget_name_dspace_id)
+ {
+ ssize_t name_len = 0;
+
+ TESTING_2("invalid H5Fget_name using dataspace ID");
+
+ if ((dspace_id = H5Screate(H5S_SCALAR)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataspace\n");
+ PART_ERROR(H5Fget_name_dspace_id);
+ }
+
+ /* Try get file name from data space. Supposed to fail because
+ * it's illegal operation. */
+ H5E_BEGIN_TRY
+ {
+ name_len = H5Fget_name(dspace_id, file_name_buf, (size_t)file_name_buf_len + 1);
+ }
+ H5E_END_TRY;
+
+ if (name_len >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved file name using H5Fget_name on a dataspace ID!\n");
+ PART_ERROR(H5Fget_name_dspace_id);
+ }
+
+ if (dspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(dspace_id);
+ }
+ H5E_END_TRY;
+ dspace_id = H5I_INVALID_HID;
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fget_name_dspace_id);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (file_name_buf) {
+ HDfree(file_name_buf);
+ file_name_buf = NULL;
+ }
+
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ HDfree(prefixed_filename);
+ prefixed_filename = NULL;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (file_name_buf)
+ HDfree(file_name_buf);
+ H5Tclose(named_dtype_id);
+ H5Sclose(dspace_id);
+ H5Dclose(dset_id);
+ H5Aclose(attr_id);
+ H5Gclose(group_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ HDfree(prefixed_filename);
+
+ return 1;
+}
+
+/*
+ * Cleanup temporary test files
+ */
+static void
+cleanup_files(void)
+{
+ remove_test_file(test_path_prefix, FILE_CREATE_TEST_FILENAME);
+ remove_test_file(test_path_prefix, FILE_CREATE_EXCL_FILE_NAME);
+
+ /* The below file should not get created */
+ /* remove_test_file(test_path_prefix, FILE_CREATE_INVALID_PARAMS_FILE_NAME); */
+
+#ifndef NO_DOUBLE_OBJECT_OPENS
+ remove_test_file(test_path_prefix, OVERLAPPING_FILENAME);
+#endif
+ remove_test_file(test_path_prefix, FILE_PERMISSION_TEST_FILENAME);
+ remove_test_file(test_path_prefix, FILE_FLUSH_TEST_FILENAME);
+ remove_test_file(test_path_prefix, FILE_PROPERTY_LIST_TEST_FNAME1);
+ remove_test_file(test_path_prefix, FILE_PROPERTY_LIST_TEST_FNAME2);
+ remove_test_file(test_path_prefix, FILE_INTENT_TEST_FILENAME);
+ remove_test_file(test_path_prefix, GET_OBJ_COUNT_TEST_FILENAME1);
+ remove_test_file(test_path_prefix, GET_OBJ_COUNT_TEST_FILENAME2);
+#ifndef NO_FILE_MOUNTS
+ remove_test_file(test_path_prefix, FILE_MOUNT_TEST_FILENAME);
+#endif
+ remove_test_file(test_path_prefix, GET_FILE_NAME_TEST_FNAME);
+}
+
+int
+H5_api_file_test(void)
+{
+ size_t i;
+ int nerrors;
+
+ HDprintf("**********************************************\n");
+ HDprintf("* *\n");
+ HDprintf("* API File Tests *\n");
+ HDprintf("* *\n");
+ HDprintf("**********************************************\n\n");
+
+ for (i = 0, nerrors = 0; i < ARRAY_LENGTH(file_tests); i++) {
+ nerrors += (*file_tests[i])() ? 1 : 0;
+ }
+
+ HDprintf("\n");
+
+ HDprintf("Cleaning up testing files\n");
+ cleanup_files();
+
+ return nerrors;
+}
diff --git a/test/API/H5_api_file_test.h b/test/API/H5_api_file_test.h
new file mode 100644
index 0000000..948cb6a
--- /dev/null
+++ b/test/API/H5_api_file_test.h
@@ -0,0 +1,85 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifndef H5_API_FILE_TEST_H
+#define H5_API_FILE_TEST_H
+
+#include "H5_api_test.h"
+
+int H5_api_file_test(void);
+
+/*********************************************
+ * *
+ * API File test defines *
+ * *
+ *********************************************/
+
+#define FILE_CREATE_TEST_FILENAME "test_file.h5"
+
+#define FILE_CREATE_INVALID_PARAMS_FILE_NAME "invalid_params_file.h5"
+
+#define FILE_CREATE_EXCL_FILE_NAME "excl_flag_file.h5"
+
+#define NONEXISTENT_FILENAME "nonexistent_file.h5"
+
+#define OVERLAPPING_FILENAME "overlapping_file.h5"
+#define OVERLAPPING_OPEN_TEST_GRP_NAME "group"
+#define OVERLAPPING_OPEN_TEST_DSET_NAME "dataset"
+
+#define FILE_PERMISSION_TEST_FILENAME "file_permission.h5"
+#define FILE_PERMISSION_TEST_GRP_NAME "group"
+#define FILE_PERMISSION_TEST_DSET_NAME "Dataset"
+#define FILE_PERMISSION_TEST_DSET2_NAME "Dataset2"
+#define FILE_PERMISSION_TEST_ATTR_NAME "attribute"
+#define FILE_PERMISSION_TEST_NAMED_DTYPE "named_dtype"
+
+#define FILE_FLUSH_TEST_FILENAME "flush_file.h5"
+
+#define FILE_PROPERTY_LIST_TEST_FCPL_PROP_VAL 65536
+#define FILE_PROPERTY_LIST_TEST_FNAME1 "property_list_test_file1.h5"
+#define FILE_PROPERTY_LIST_TEST_FNAME2 "property_list_test_file2.h5"
+
+#define FILE_INTENT_TEST_FILENAME "intent_test_file.h5"
+
+#define GET_OBJ_COUNT_TEST_FILENAME1 "file_obj_count1.h5"
+#define GET_OBJ_COUNT_TEST_FILENAME2 "file_obj_count2.h5"
+#define GET_OBJ_COUNT_TEST_GRP_NAME "/group"
+#define GET_OBJ_COUNT_TEST_DSET_NAME "Dataset"
+#define GET_OBJ_COUNT_TEST_ATTR_NAME "Attribute"
+#define GET_OBJ_COUNT_TEST_NAMED_DTYPE "named_dtype"
+
+#define FILE_MOUNT_TEST_FILENAME "file_mount.h5"
+#define FILE_MOUNT_TEST_GRP_NAME "group"
+
+#define GET_FILE_NAME_TEST_FNAME "file_name_retrieval.h5"
+#define GET_FILE_NAME_TEST_GRP_NAME "group"
+#define GET_FILE_NAME_TEST_DSET_NAME "dataset"
+#define GET_FILE_NAME_TEST_ATTR_NAME "attribute"
+#define GET_FILE_NAME_TEST_NAMED_DTYPE "datatype"
+
+#define FILESPACE_INFO_FILENAME "filespace_info.h5"
+#define FSP_SIZE512 (hsize_t)512
+
+#define FILE_GET_ID_TEST_FILENAME "test_file_id.h5"
+
+#define FILE_CLOSE_DEGREE_FILENAME "test_close_degree.h5"
+
+#define GET_FREE_SECTIONS_FILENAME "test_free_sections.h5"
+
+#define FILE_SIZE_FILENAME "file_size.h5"
+#define KB 1024U
+
+#define FILE_INFO_FILENAME "file_info.h5"
+
+#define DOUBLE_GROUP_OPEN_FILENAME "double_group_open.h5"
+
+#endif
diff --git a/test/API/H5_api_group_test.c b/test/API/H5_api_group_test.c
new file mode 100644
index 0000000..f652202
--- /dev/null
+++ b/test/API/H5_api_group_test.c
@@ -0,0 +1,2394 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include "H5_api_group_test.h"
+
+static int test_create_group_under_root(void);
+static int test_create_group_under_existing_group(void);
+static int test_create_many_groups(void);
+static int test_create_deep_groups(void);
+static int test_create_intermediate_group(void);
+static int test_create_group_invalid_params(void);
+static int test_create_anonymous_group(void);
+static int test_create_anonymous_group_invalid_params(void);
+static int test_open_nonexistent_group(void);
+static int test_open_group_invalid_params(void);
+static int test_close_group_invalid_id(void);
+static int test_group_property_lists(void);
+static int test_get_group_info(void);
+static int test_get_group_info_invalid_params(void);
+static int test_flush_group(void);
+static int test_flush_group_invalid_params(void);
+static int test_refresh_group(void);
+static int test_refresh_group_invalid_params(void);
+static int create_group_recursive(hid_t parent_gid, unsigned counter);
+
+/*
+ * The array of group tests to be performed.
+ */
+static int (*group_tests[])(void) = {
+ test_create_group_under_root,
+ test_create_group_under_existing_group,
+ test_create_many_groups,
+ test_create_deep_groups,
+ test_create_intermediate_group,
+ test_create_group_invalid_params,
+ test_create_anonymous_group,
+ test_create_anonymous_group_invalid_params,
+ test_open_nonexistent_group,
+ test_open_group_invalid_params,
+ test_close_group_invalid_id,
+ test_group_property_lists,
+ test_get_group_info,
+ test_get_group_info_invalid_params,
+ test_flush_group,
+ test_flush_group_invalid_params,
+ test_refresh_group,
+ test_refresh_group_invalid_params,
+};
+
+/*
+ * A test to check that a group can be created under the root group.
+ */
+static int
+test_create_group_under_root(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t parent_gid = H5I_INVALID_HID;
+
+ TESTING("creation of group under the root group");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file or group aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ /* Create the group under the root group of the file */
+ if ((parent_gid =
+ H5Gcreate2(file_id, GROUP_CREATE_UNDER_ROOT_GNAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", GROUP_CREATE_UNDER_ROOT_GNAME);
+ goto error;
+ }
+
+ if (H5Gclose(parent_gid) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(parent_gid);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a group can be created under an existing
+ * group which is not the root group.
+ */
+static int
+test_create_group_under_existing_group(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t parent_group_id = H5I_INVALID_HID, child_group_id = H5I_INVALID_HID,
+ grandchild_group_id = H5I_INVALID_HID;
+
+ TESTING("creation of group under existing group using a relative path");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file or group aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file\n");
+ goto error;
+ }
+
+ /* Open the already-existing group (/group_tests) in the file as the parent */
+ if ((parent_group_id = H5Gopen2(file_id, GROUP_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open group\n");
+ goto error;
+ }
+
+ /* Create a new group (/group_tests/child_group) under the already-existing parent Group using a relative
+ * path */
+ if ((child_group_id = H5Gcreate2(parent_group_id, GROUP_CREATE_UNDER_GROUP_REL_GNAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group using relative path: %s\n", GROUP_CREATE_UNDER_GROUP_REL_GNAME);
+ goto error;
+ }
+
+ /* Create a new group (child_group/grandchild_group) under the already-existing parent Group using an
+ * absolute path */
+ if ((grandchild_group_id = H5Gcreate2(parent_group_id, GROUP_CREATE_UNDER_GROUP_ABS_GNAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group using absolute path: %s\n", GROUP_CREATE_UNDER_GROUP_ABS_GNAME);
+ goto error;
+ }
+
+ if (H5Gclose(grandchild_group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(child_group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(parent_group_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(grandchild_group_id);
+ H5Gclose(child_group_id);
+ H5Gclose(parent_group_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to create many (one million) groups
+ */
+static int
+test_create_many_groups(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t parent_group_id = H5I_INVALID_HID, child_group_id = H5I_INVALID_HID;
+ char group_name[NAME_BUF_SIZE];
+ unsigned i;
+
+ TESTING("H5Gcreate many groups");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file or group aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, GROUP_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group\n");
+ goto error;
+ }
+
+ if ((parent_group_id = H5Gcreate2(container_group, MANY_GROUP_CREATIONS_GNAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", MANY_GROUP_CREATIONS_GNAME);
+ goto error;
+ }
+
+ /* Create multiple groups under the parent group */
+ HDprintf("\n");
+ for (i = 0; i < GROUP_NUMB_MANY; i++) {
+ HDprintf("\r %u/%u", i + 1, GROUP_NUMB_MANY);
+ sprintf(group_name, "group %02u", i);
+ if ((child_group_id =
+ H5Gcreate2(parent_group_id, group_name, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", group_name);
+ goto error;
+ }
+
+ if (H5Gclose(child_group_id) < 0)
+ TEST_ERROR;
+ }
+
+ if (H5Gclose(parent_group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(child_group_id);
+ H5Gclose(parent_group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to create groups of the depth GROUP_DEPTH.
+ */
+static int
+test_create_deep_groups(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+
+ TESTING("H5Gcreate groups of great depths");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file or group aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, GROUP_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group\n");
+ goto error;
+ }
+
+ /* Create the group under the root group of the file */
+ if ((group_id = H5Gcreate2(container_group, DEEP_GROUP_CREATIONS_GNAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", DEEP_GROUP_CREATIONS_GNAME);
+ goto error;
+ }
+
+ HDprintf("\n");
+ if (create_group_recursive(group_id, 1) < 0)
+ TEST_ERROR;
+
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * Recursive function to create groups of the depth GROUP_DEPTH.
+ */
+static int
+create_group_recursive(hid_t parent_gid, unsigned counter)
+{
+ hid_t child_gid = H5I_INVALID_HID;
+ char gname[NAME_BUF_SIZE];
+
+ HDprintf("\r %u/%u", counter, GROUP_DEPTH);
+ if (counter == 1)
+ sprintf(gname, "2nd_child_group");
+ else if (counter == 2)
+ sprintf(gname, "3rd_child_group");
+ else
+ sprintf(gname, "%dth_child_group", counter + 1);
+ if ((child_gid = H5Gcreate2(parent_gid, gname, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", gname);
+ goto error;
+ }
+
+ if (counter < GROUP_DEPTH) {
+ if (create_group_recursive(child_gid, counter + 1) < 0)
+ TEST_ERROR;
+ }
+
+ if (H5Gclose(child_gid) < 0)
+ TEST_ERROR;
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(child_gid);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to create groups automatically using H5Pset_create_intermediate_group
+ */
+static int
+test_create_intermediate_group(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t crt_intmd_lcpl_id = H5I_INVALID_HID;
+
+ TESTING("H5Gcreate group with intermediate group creation");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file or group aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, GROUP_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group\n");
+ goto error;
+ }
+
+ /* Set up plist for creating intermediate groups */
+ if ((crt_intmd_lcpl_id = H5Pcreate(H5P_LINK_CREATE)) < 0)
+ TEST_ERROR;
+ if (H5Pset_create_intermediate_group(crt_intmd_lcpl_id, TRUE) < 0)
+ TEST_ERROR;
+
+ /* Create an intermediate group using a relative path */
+ if ((group_id = H5Gcreate2(container_group,
+ GROUP_CREATE_INTMD_REL_INTMD_NAME "/" GROUP_CREATE_INTMD_REL_END_NAME,
+ crt_intmd_lcpl_id, H5P_DEFAULT, H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ group_id = H5I_INVALID_HID;
+
+ /* Verify both groups were created */
+ if ((group_id =
+ H5Gopen2(file_id, GROUP_TEST_GROUP_NAME "/" GROUP_CREATE_INTMD_REL_INTMD_NAME, H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ group_id = H5I_INVALID_HID;
+ if ((group_id = H5Gopen2(file_id,
+ GROUP_TEST_GROUP_NAME "/" GROUP_CREATE_INTMD_REL_INTMD_NAME
+ "/" GROUP_CREATE_INTMD_REL_END_NAME,
+ H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ group_id = H5I_INVALID_HID;
+
+ /* Create an intermediate group using an absolute path */
+ if ((group_id = H5Gcreate2(container_group,
+ "/" GROUP_TEST_GROUP_NAME "/" GROUP_CREATE_INTMD_ABS_INTMD_NAME
+ "/" GROUP_CREATE_INTMD_ABS_END_NAME,
+ crt_intmd_lcpl_id, H5P_DEFAULT, H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ group_id = H5I_INVALID_HID;
+
+ /* Verify both groups were created */
+ if ((group_id =
+ H5Gopen2(file_id, GROUP_TEST_GROUP_NAME "/" GROUP_CREATE_INTMD_ABS_INTMD_NAME, H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ group_id = H5I_INVALID_HID;
+ if ((group_id = H5Gopen2(file_id,
+ GROUP_TEST_GROUP_NAME "/" GROUP_CREATE_INTMD_ABS_INTMD_NAME
+ "/" GROUP_CREATE_INTMD_ABS_END_NAME,
+ H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ group_id = H5I_INVALID_HID;
+
+ /* Create two intermediate groups using an absolute path */
+ if ((group_id = H5Gcreate2(container_group,
+ "/" GROUP_TEST_GROUP_NAME "/" GROUP_CREATE_INTMD_MULT_INTMD1_NAME
+ "/" GROUP_CREATE_INTMD_MULT_INTMD2_NAME "/" GROUP_CREATE_INTMD_MULT_END_NAME,
+ crt_intmd_lcpl_id, H5P_DEFAULT, H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ group_id = H5I_INVALID_HID;
+
+ /* Verify all three groups were created */
+ if ((group_id = H5Gopen2(file_id, GROUP_TEST_GROUP_NAME "/" GROUP_CREATE_INTMD_MULT_INTMD1_NAME,
+ H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ group_id = H5I_INVALID_HID;
+ if ((group_id = H5Gopen2(file_id,
+ GROUP_TEST_GROUP_NAME "/" GROUP_CREATE_INTMD_MULT_INTMD1_NAME
+ "/" GROUP_CREATE_INTMD_MULT_INTMD2_NAME,
+ H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ group_id = H5I_INVALID_HID;
+ if ((group_id = H5Gopen2(file_id,
+ GROUP_TEST_GROUP_NAME "/" GROUP_CREATE_INTMD_MULT_INTMD1_NAME
+ "/" GROUP_CREATE_INTMD_MULT_INTMD2_NAME
+ "/" GROUP_CREATE_INTMD_MULT_END_NAME,
+ H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ group_id = H5I_INVALID_HID;
+
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+ if (H5Pclose(crt_intmd_lcpl_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ H5Pclose(crt_intmd_lcpl_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a group can't be created when H5Gcreate
+ * is passed invalid parameters.
+ */
+static int
+test_create_group_invalid_params(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("H5Gcreate with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file or group aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Gcreate_invalid_loc_id)
+ {
+ TESTING_2("H5Gcreate with an invalid loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ group_id = H5Gcreate2(H5I_INVALID_HID, GROUP_CREATE_INVALID_PARAMS_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (group_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created group with invalid loc_id!\n");
+ H5Gclose(group_id);
+ PART_ERROR(H5Gcreate_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gcreate_invalid_loc_id);
+
+ PART_BEGIN(H5Gcreate_invalid_grp_name)
+ {
+ TESTING_2("H5Gcreate with an invalid group name");
+
+ H5E_BEGIN_TRY
+ {
+ group_id = H5Gcreate2(file_id, NULL, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (group_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created group with a NULL name!\n");
+ H5Gclose(group_id);
+ PART_ERROR(H5Gcreate_invalid_grp_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ group_id = H5Gcreate2(file_id, "", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (group_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created group with an invalid group name of ''!\n");
+ H5Gclose(group_id);
+ PART_ERROR(H5Gcreate_invalid_grp_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gcreate_invalid_grp_name);
+
+ PART_BEGIN(H5Gcreate_invalid_lcpl)
+ {
+ TESTING_2("H5Gcreate with an invalid LCPL");
+
+ H5E_BEGIN_TRY
+ {
+ group_id = H5Gcreate2(file_id, GROUP_CREATE_INVALID_PARAMS_GROUP_NAME, H5I_INVALID_HID,
+ H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (group_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created group with invalid LCPL!\n");
+ H5Gclose(group_id);
+ PART_ERROR(H5Gcreate_invalid_lcpl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gcreate_invalid_lcpl);
+
+ PART_BEGIN(H5Gcreate_invalid_gcpl)
+ {
+ TESTING_2("H5Gcreate with an invalid GCPL");
+
+ H5E_BEGIN_TRY
+ {
+ group_id = H5Gcreate2(file_id, GROUP_CREATE_INVALID_PARAMS_GROUP_NAME, H5P_DEFAULT,
+ H5I_INVALID_HID, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (group_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created group with invalid GCPL!\n");
+ H5Gclose(group_id);
+ PART_ERROR(H5Gcreate_invalid_gcpl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gcreate_invalid_gcpl);
+
+ PART_BEGIN(H5Gcreate_invalid_gapl)
+ {
+ TESTING_2("H5Gcreate with an invalid GAPL");
+
+ H5E_BEGIN_TRY
+ {
+ group_id = H5Gcreate2(file_id, GROUP_CREATE_INVALID_PARAMS_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (group_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created group with invalid GAPL!\n");
+ H5Gclose(group_id);
+ PART_ERROR(H5Gcreate_invalid_gapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gcreate_invalid_gapl);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that an anonymous group can be created with
+ * H5Gcreate_anon.
+ */
+static int
+test_create_anonymous_group(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, new_group_id = H5I_INVALID_HID;
+
+ TESTING("creation of anonymous group");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file or group aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file\n");
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, GROUP_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open group\n");
+ goto error;
+ }
+
+ if ((new_group_id = H5Gcreate_anon(file_id, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create anonymous group\n");
+ goto error;
+ }
+
+ if (H5Gclose(new_group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(new_group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that an anonymous group can't be created
+ * when H5Gcreate_anon is passed invalid parameters.
+ */
+static int
+test_create_anonymous_group_invalid_params(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, new_group_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("H5Gcreate_anon with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file or group aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file\n");
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, GROUP_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open group\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Gcreate_anon_invalid_loc_id)
+ {
+ TESTING_2("H5Gcreate_anon with an invalid loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ new_group_id = H5Gcreate_anon(H5I_INVALID_HID, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (new_group_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created anonymous group with invalid loc_id!\n");
+ H5Gclose(new_group_id);
+ PART_ERROR(H5Gcreate_anon_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gcreate_anon_invalid_loc_id);
+
+ PART_BEGIN(H5Gcreate_anon_invalid_gcpl)
+ {
+ TESTING_2("H5Gcreate_anon with an invalid GCPL");
+
+ H5E_BEGIN_TRY
+ {
+ new_group_id = H5Gcreate_anon(container_group, H5I_INVALID_HID, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (new_group_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created anonymous group with invalid GCPL!\n");
+ H5Gclose(new_group_id);
+ PART_ERROR(H5Gcreate_anon_invalid_gcpl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gcreate_anon_invalid_gcpl);
+
+ PART_BEGIN(H5Gcreate_anon_invalid_gapl)
+ {
+ TESTING_2("H5Gcreate_anon with an invalid GAPL");
+
+ H5E_BEGIN_TRY
+ {
+ new_group_id = H5Gcreate_anon(container_group, H5P_DEFAULT, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (new_group_id >= 0) {
+ H5_FAILED();
+ HDprintf(" created anonymous group with invalid GAPL!\n");
+ H5Gclose(new_group_id);
+ PART_ERROR(H5Gcreate_anon_invalid_gapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gcreate_anon_invalid_gapl);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(new_group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a group which doesn't exist cannot
+ * be opened.
+ */
+static int
+test_open_nonexistent_group(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+
+ TESTING("for invalid opening of a nonexistent group");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file or group aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file\n");
+ goto error;
+ }
+
+ H5E_BEGIN_TRY
+ {
+ group_id = H5Gopen2(file_id, OPEN_NONEXISTENT_GROUP_TEST_GNAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (group_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened non-existent group!\n");
+ goto error;
+ }
+
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a group can't be opened when H5Gopen
+ * is passed invalid parameters.
+ */
+static int
+test_open_group_invalid_params(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("H5Gopen with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file or group aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Gopen_invalid_loc_id)
+ {
+ TESTING_2("H5Gopen with an invalid loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ group_id = H5Gopen2(H5I_INVALID_HID, GROUP_TEST_GROUP_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (group_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened group using an invalid loc_id!\n");
+ H5Gclose(group_id);
+ PART_ERROR(H5Gopen_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gopen_invalid_loc_id);
+
+ PART_BEGIN(H5Gopen_invalid_grp_name)
+ {
+ TESTING_2("H5Gopen with an invalid group name");
+
+ H5E_BEGIN_TRY
+ {
+ group_id = H5Gopen2(file_id, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (group_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened group using a NULL name!\n");
+ H5Gclose(group_id);
+ PART_ERROR(H5Gopen_invalid_grp_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ group_id = H5Gopen2(file_id, "", H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (group_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened group using an invalid name of ''!\n");
+ H5Gclose(group_id);
+ PART_ERROR(H5Gopen_invalid_grp_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gopen_invalid_grp_name);
+
+ PART_BEGIN(H5Gopen_invalid_gapl)
+ {
+ TESTING_2("H5Gopen with an invalid GAPL");
+
+ H5E_BEGIN_TRY
+ {
+ group_id = H5Gopen2(file_id, GROUP_TEST_GROUP_NAME, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (group_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened group using an invalid GAPL!\n");
+ H5Gclose(group_id);
+ PART_ERROR(H5Gopen_invalid_gapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gopen_invalid_gapl);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that H5Gclose doesn't succeed for an
+ * invalid group ID.
+ */
+static int
+test_close_group_invalid_id(void)
+{
+ herr_t err_ret = -1;
+
+ TESTING("H5Gclose with an invalid group ID");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic group aren't supported with this connector\n");
+ return 0;
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Gclose(H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" close a group with an invalid ID!\n");
+ goto error;
+ }
+
+ PASSED();
+
+ return 0;
+
+error:
+ return 1;
+}
+
+/*
+ * A test to check that a GCPL used for group creation can
+ * be persisted and that a valid copy of that GCPL can be
+ * retrieved later with a call to H5Gget_create_plist.
+ */
+static int
+test_group_property_lists(void)
+{
+ unsigned dummy_prop_val = GROUP_PROPERTY_LIST_TEST_DUMMY_VAL;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id1 = H5I_INVALID_HID, group_id2 = H5I_INVALID_HID;
+ hid_t gcpl_id1 = H5I_INVALID_HID, gcpl_id2 = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("group property list operations");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_GET_PLIST) || !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, property list, creation order aren't supported "
+ "with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file\n");
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, GROUP_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group\n");
+ goto error;
+ }
+
+ if ((gcpl_id1 = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create GCPL\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id1, dummy_prop_val) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set property on GCPL\n");
+ goto error;
+ }
+
+ /* Create the group in the file */
+ if ((group_id1 = H5Gcreate2(container_group, GROUP_PROPERTY_LIST_TEST_GROUP_NAME1, H5P_DEFAULT, gcpl_id1,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group\n");
+ goto error;
+ }
+
+ /* Create the second group using H5P_DEFAULT for the GCPL */
+ if ((group_id2 = H5Gcreate2(container_group, GROUP_PROPERTY_LIST_TEST_GROUP_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group\n");
+ goto error;
+ }
+
+ if (H5Pclose(gcpl_id1) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Gget_create_plist)
+ {
+ TESTING_2("H5Gget_create_plist");
+
+ /* Try to retrieve copies of the two property lists, one which has the property set and one which
+ * does not */
+ if ((gcpl_id1 = H5Gget_create_plist(group_id1)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get GCPL\n");
+ PART_ERROR(H5Gget_create_plist);
+ }
+
+ if ((gcpl_id2 = H5Gget_create_plist(group_id2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get GCPL\n");
+ PART_ERROR(H5Gget_create_plist);
+ }
+
+ /* Ensure that property list 1 has the property set and property list 2 does not */
+ dummy_prop_val = 0;
+
+ if (H5Pget_link_creation_order(gcpl_id1, &dummy_prop_val) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve GCPL property value\n");
+ PART_ERROR(H5Gget_create_plist);
+ }
+
+ if (dummy_prop_val != GROUP_PROPERTY_LIST_TEST_DUMMY_VAL) {
+ H5_FAILED();
+ HDprintf(" retrieved GCPL property value '%llu' did not match expected value '%llu'\n",
+ (unsigned long long)dummy_prop_val,
+ (unsigned long long)GROUP_PROPERTY_LIST_TEST_DUMMY_VAL);
+ PART_ERROR(H5Gget_create_plist);
+ }
+
+ dummy_prop_val = 0;
+
+ if (H5Pget_link_creation_order(gcpl_id2, &dummy_prop_val) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve GCPL property value\n");
+ PART_ERROR(H5Gget_create_plist);
+ }
+
+ if (dummy_prop_val == GROUP_PROPERTY_LIST_TEST_DUMMY_VAL) {
+ H5_FAILED();
+ HDprintf(" retrieved GCPL property value '%llu' matched control value '%llu' when it "
+ "shouldn't have\n",
+ (unsigned long long)dummy_prop_val,
+ (unsigned long long)GROUP_PROPERTY_LIST_TEST_DUMMY_VAL);
+ PART_ERROR(H5Gget_create_plist);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gget_create_plist);
+
+ /* Now see if we can still retrieve copies of the property lists upon opening
+ * (instead of creating) a group. If they were reconstructed properly upon file
+ * open, the creation property lists should also have the same test values
+ * as set before.
+ */
+ if (gcpl_id1 >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(gcpl_id1);
+ }
+ H5E_END_TRY;
+ gcpl_id1 = H5I_INVALID_HID;
+ }
+ if (gcpl_id2 >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(gcpl_id2);
+ }
+ H5E_END_TRY;
+ gcpl_id2 = H5I_INVALID_HID;
+ }
+ if (group_id1 >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id1);
+ }
+ H5E_END_TRY;
+ group_id1 = H5I_INVALID_HID;
+ }
+ if (group_id2 >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id2);
+ }
+ H5E_END_TRY;
+ group_id2 = H5I_INVALID_HID;
+ }
+
+ PART_BEGIN(H5Gget_create_plist_reopened)
+ {
+ TESTING_2("H5Gget_create_plist after re-opening a group");
+
+ if ((group_id1 = H5Gopen2(container_group, GROUP_PROPERTY_LIST_TEST_GROUP_NAME1, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open group\n");
+ PART_ERROR(H5Gget_create_plist_reopened);
+ }
+
+ if ((group_id2 = H5Gopen2(container_group, GROUP_PROPERTY_LIST_TEST_GROUP_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open group\n");
+ PART_ERROR(H5Gget_create_plist_reopened);
+ }
+
+ if ((gcpl_id1 = H5Gget_create_plist(group_id1)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get property list\n");
+ PART_ERROR(H5Gget_create_plist_reopened);
+ }
+
+ if ((gcpl_id2 = H5Gget_create_plist(group_id2)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get property list\n");
+ PART_ERROR(H5Gget_create_plist_reopened);
+ }
+
+ /* Re-check the property values */
+ dummy_prop_val = 0;
+
+ if (H5Pget_link_creation_order(gcpl_id1, &dummy_prop_val) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve GCPL property value\n");
+ PART_ERROR(H5Gget_create_plist_reopened);
+ }
+
+ if (dummy_prop_val != GROUP_PROPERTY_LIST_TEST_DUMMY_VAL) {
+ H5_FAILED();
+ HDprintf(" retrieved GCPL property value '%llu' did not match expected value '%llu'\n",
+ (unsigned long long)dummy_prop_val,
+ (unsigned long long)GROUP_PROPERTY_LIST_TEST_DUMMY_VAL);
+ PART_ERROR(H5Gget_create_plist_reopened);
+ }
+
+ dummy_prop_val = 0;
+
+ if (H5Pget_link_creation_order(gcpl_id2, &dummy_prop_val) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve GCPL property value\n");
+ PART_ERROR(H5Gget_create_plist_reopened);
+ }
+
+ if (dummy_prop_val == GROUP_PROPERTY_LIST_TEST_DUMMY_VAL) {
+ H5_FAILED();
+ HDprintf(" retrieved GCPL property value '%llu' matched control value '%llu' when it "
+ "shouldn't have\n",
+ (unsigned long long)dummy_prop_val,
+ (unsigned long long)GROUP_PROPERTY_LIST_TEST_DUMMY_VAL);
+ PART_ERROR(H5Gget_create_plist_reopened);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gget_create_plist_reopened);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Pclose(gcpl_id1) < 0)
+ TEST_ERROR;
+ if (H5Pclose(gcpl_id2) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id1) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id2) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(gcpl_id1);
+ H5Pclose(gcpl_id2);
+ H5Gclose(group_id1);
+ H5Gclose(group_id2);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test for the functionality of H5Gget_info(_by_idx).
+ */
+static int
+test_get_group_info(void)
+{
+ H5G_info_t group_info;
+ unsigned i;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t parent_group_id = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+ char group_name[NAME_BUF_SIZE];
+
+ TESTING_MULTIPART("retrieval of group info");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_MORE) || !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file or group, creation order aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, GROUP_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group\n");
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create a GCPL\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't enable link creation order tracking and indexing on GCPL\n");
+ goto error;
+ }
+
+ if ((parent_group_id = H5Gcreate2(container_group, GROUP_GET_INFO_TEST_GROUP_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", GROUP_GET_INFO_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ /* Create multiple groups under the parent group */
+ for (i = 0; i < GROUP_GET_INFO_TEST_GROUP_NUMB; i++) {
+ /* Create the groups with a reverse-ordering naming scheme to test creation order */
+ HDsnprintf(group_name, NAME_BUF_SIZE, "group %02u",
+ (unsigned)(GROUP_GET_INFO_TEST_GROUP_NUMB - i - 1));
+
+ if ((group_id = H5Gcreate2(parent_group_id, group_name, H5P_DEFAULT, gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", group_name);
+ goto error;
+ }
+
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Gget_info)
+ {
+ TESTING_2("retrieval of group info with H5Gget_info");
+
+ memset(&group_info, 0, sizeof(group_info));
+
+ /* Retrieve information about the parent group */
+ if (H5Gget_info(parent_group_id, &group_info) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get group info\n");
+ PART_ERROR(H5Gget_info);
+ }
+
+ if (group_info.nlinks != GROUP_GET_INFO_TEST_GROUP_NUMB) {
+ H5_FAILED();
+ HDprintf(" group's number of links '%lu' doesn't match expected value '%u'\n",
+ group_info.nlinks, (unsigned int)GROUP_GET_INFO_TEST_GROUP_NUMB);
+ PART_ERROR(H5Gget_info);
+ }
+
+ /*
+ * For the purpose of this test, the max creation order should match
+ * the number of links in the group.
+ */
+ if (group_info.max_corder != GROUP_GET_INFO_TEST_GROUP_NUMB) {
+ H5_FAILED();
+ HDprintf(" group's max creation order '%lld' doesn't match expected value '%lld'\n",
+ (long long)group_info.max_corder, (long long)GROUP_GET_INFO_TEST_GROUP_NUMB);
+ PART_ERROR(H5Gget_info);
+ }
+
+ /* Ensure that the storage_type field is at least set to a meaningful value */
+ if (group_info.storage_type != H5G_STORAGE_TYPE_SYMBOL_TABLE &&
+ group_info.storage_type != H5G_STORAGE_TYPE_COMPACT &&
+ group_info.storage_type != H5G_STORAGE_TYPE_DENSE &&
+ group_info.storage_type != H5G_STORAGE_TYPE_UNKNOWN) {
+ H5_FAILED();
+ HDprintf(" group info's 'storage_type' field wasn't set to a meaningful value\n");
+ PART_ERROR(H5Gget_info);
+ }
+
+ /* Assume that mounted should be FALSE in this case */
+ if (group_info.mounted != FALSE) {
+ H5_FAILED();
+ HDprintf(" group info's 'mounted' field was TRUE when it should have been FALSE\n");
+ PART_ERROR(H5Gget_info);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gget_info);
+
+ PART_BEGIN(H5Gget_info_by_name)
+ {
+ TESTING_2("retrieval of group info with H5Gget_info_by_name");
+
+ memset(&group_info, 0, sizeof(group_info));
+
+ /* Retrieve information about the parent group */
+ if (H5Gget_info_by_name(container_group, GROUP_GET_INFO_TEST_GROUP_NAME, &group_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get group info by name\n");
+ PART_ERROR(H5Gget_info_by_name);
+ }
+
+ if (group_info.nlinks != GROUP_GET_INFO_TEST_GROUP_NUMB) {
+ H5_FAILED();
+ HDprintf(" group's number of links '%lu' doesn't match expected value '%u'\n",
+ group_info.nlinks, (unsigned int)GROUP_GET_INFO_TEST_GROUP_NUMB);
+ PART_ERROR(H5Gget_info_by_name);
+ }
+
+ /*
+ * For the purpose of this test, the max creation order should match
+ * the number of links in the group.
+ */
+ if (group_info.max_corder != GROUP_GET_INFO_TEST_GROUP_NUMB) {
+ H5_FAILED();
+ HDprintf(" group's max creation order '%lld' doesn't match expected value '%lld'\n",
+ (long long)group_info.max_corder, (long long)GROUP_GET_INFO_TEST_GROUP_NUMB);
+ PART_ERROR(H5Gget_info_by_name);
+ }
+
+ /* Ensure that the storage_type field is at least set to a meaningful value */
+ if (group_info.storage_type != H5G_STORAGE_TYPE_SYMBOL_TABLE &&
+ group_info.storage_type != H5G_STORAGE_TYPE_COMPACT &&
+ group_info.storage_type != H5G_STORAGE_TYPE_DENSE &&
+ group_info.storage_type != H5G_STORAGE_TYPE_UNKNOWN) {
+ H5_FAILED();
+ HDprintf(" group info's 'storage_type' field wasn't set to a meaningful value\n");
+ PART_ERROR(H5Gget_info_by_name);
+ }
+
+ /* Assume that mounted should be FALSE in this case */
+ if (group_info.mounted != FALSE) {
+ H5_FAILED();
+ HDprintf(" group info's 'mounted' field was TRUE when it should have been FALSE\n");
+ PART_ERROR(H5Gget_info_by_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gget_info_by_name);
+
+ PART_BEGIN(H5Gget_info_by_idx_crt_order_increasing)
+ {
+ TESTING_2("H5Gget_info_by_idx by creation order in increasing order");
+
+ for (i = 0; i < GROUP_GET_INFO_TEST_GROUP_NUMB; i++) {
+ memset(&group_info, 0, sizeof(group_info));
+
+ /* Retrieve information about each group under the parent group */
+ if (H5Gget_info_by_idx(container_group, GROUP_GET_INFO_TEST_GROUP_NAME, H5_INDEX_CRT_ORDER,
+ H5_ITER_INC, (hsize_t)i, &group_info, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get group info for group at index %u\n", i);
+ PART_ERROR(H5Gget_info_by_idx_crt_order_increasing);
+ }
+
+ if (group_info.nlinks != 0) {
+ H5_FAILED();
+ HDprintf(" group's number of links '%lu' doesn't match expected value '%d'\n",
+ group_info.nlinks, 0);
+ PART_ERROR(H5Gget_info_by_idx_crt_order_increasing);
+ }
+
+ if (group_info.max_corder != 0) {
+ H5_FAILED();
+ HDprintf(" group's max creation order '%lld' doesn't match expected value '%d'\n",
+ (long long)group_info.max_corder, 0);
+ PART_ERROR(H5Gget_info_by_idx_crt_order_increasing);
+ }
+
+ /* Ensure that the storage_type field is at least set to a meaningful value */
+ if (group_info.storage_type != H5G_STORAGE_TYPE_SYMBOL_TABLE &&
+ group_info.storage_type != H5G_STORAGE_TYPE_COMPACT &&
+ group_info.storage_type != H5G_STORAGE_TYPE_DENSE &&
+ group_info.storage_type != H5G_STORAGE_TYPE_UNKNOWN) {
+ H5_FAILED();
+ HDprintf(" group info's 'storage_type' field wasn't set to a meaningful value\n");
+ PART_ERROR(H5Gget_info_by_idx_crt_order_increasing);
+ }
+
+ /* Assume that mounted should be FALSE in this case */
+ if (group_info.mounted != FALSE) {
+ H5_FAILED();
+ HDprintf(" group info's 'mounted' field was TRUE when it should have been FALSE\n");
+ PART_ERROR(H5Gget_info_by_idx_crt_order_increasing);
+ }
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gget_info_by_idx_crt_order_increasing);
+
+ PART_BEGIN(H5Gget_info_by_idx_crt_order_decreasing)
+ {
+ TESTING_2("H5Gget_info_by_idx by creation order in decreasing order");
+
+ for (i = 0; i < GROUP_GET_INFO_TEST_GROUP_NUMB; i++) {
+ memset(&group_info, 0, sizeof(group_info));
+
+ /* Retrieve information about each group under the parent group */
+ if (H5Gget_info_by_idx(container_group, GROUP_GET_INFO_TEST_GROUP_NAME, H5_INDEX_CRT_ORDER,
+ H5_ITER_DEC, (hsize_t)i, &group_info, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get group info for group at index %u\n", i);
+ PART_ERROR(H5Gget_info_by_idx_crt_order_decreasing);
+ }
+
+ if (group_info.nlinks != 0) {
+ H5_FAILED();
+ HDprintf(" group's number of links '%lu' doesn't match expected value '%d'\n",
+ group_info.nlinks, 0);
+ PART_ERROR(H5Gget_info_by_idx_crt_order_decreasing);
+ }
+
+ if (group_info.max_corder != 0) {
+ H5_FAILED();
+ HDprintf(" group's max creation order '%lld' doesn't match expected value '%d'\n",
+ (long long)group_info.max_corder, 0);
+ PART_ERROR(H5Gget_info_by_idx_crt_order_decreasing);
+ }
+
+ /* Ensure that the storage_type field is at least set to a meaningful value */
+ if (group_info.storage_type != H5G_STORAGE_TYPE_SYMBOL_TABLE &&
+ group_info.storage_type != H5G_STORAGE_TYPE_COMPACT &&
+ group_info.storage_type != H5G_STORAGE_TYPE_DENSE &&
+ group_info.storage_type != H5G_STORAGE_TYPE_UNKNOWN) {
+ H5_FAILED();
+ HDprintf(" group info's 'storage_type' field wasn't set to a meaningful value\n");
+ PART_ERROR(H5Gget_info_by_idx_crt_order_decreasing);
+ }
+
+ /* Assume that mounted should be FALSE in this case */
+ if (group_info.mounted != FALSE) {
+ H5_FAILED();
+ HDprintf(" group info's 'mounted' field was TRUE when it should have been FALSE\n");
+ PART_ERROR(H5Gget_info_by_idx_crt_order_decreasing);
+ }
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gget_info_by_idx_crt_order_decreasing);
+
+ PART_BEGIN(H5Gget_info_by_idx_name_order_increasing)
+ {
+ TESTING_2("H5Gget_info_by_idx by alphabetical order in increasing order");
+
+ for (i = 0; i < GROUP_GET_INFO_TEST_GROUP_NUMB; i++) {
+ memset(&group_info, 0, sizeof(group_info));
+
+ /* Retrieve information about each group under the parent group */
+ if (H5Gget_info_by_idx(container_group, GROUP_GET_INFO_TEST_GROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_INC, (hsize_t)i, &group_info, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get group info for group at index %u\n", i);
+ PART_ERROR(H5Gget_info_by_idx_name_order_increasing);
+ }
+
+ if (group_info.nlinks != 0) {
+ H5_FAILED();
+ HDprintf(" group's number of links '%lu' doesn't match expected value '%d'\n",
+ group_info.nlinks, 0);
+ PART_ERROR(H5Gget_info_by_idx_name_order_increasing);
+ }
+
+ if (group_info.max_corder != 0) {
+ H5_FAILED();
+ HDprintf(" group's max creation order '%lld' doesn't match expected value '%d'\n",
+ (long long)group_info.max_corder, 0);
+ PART_ERROR(H5Gget_info_by_idx_name_order_increasing);
+ }
+
+ /* Ensure that the storage_type field is at least set to a meaningful value */
+ if (group_info.storage_type != H5G_STORAGE_TYPE_SYMBOL_TABLE &&
+ group_info.storage_type != H5G_STORAGE_TYPE_COMPACT &&
+ group_info.storage_type != H5G_STORAGE_TYPE_DENSE &&
+ group_info.storage_type != H5G_STORAGE_TYPE_UNKNOWN) {
+ H5_FAILED();
+ HDprintf(" group info's 'storage_type' field wasn't set to a meaningful value\n");
+ PART_ERROR(H5Gget_info_by_idx_name_order_increasing);
+ }
+
+ /* Assume that mounted should be FALSE in this case */
+ if (group_info.mounted != FALSE) {
+ H5_FAILED();
+ HDprintf(" group info's 'mounted' field was TRUE when it should have been FALSE\n");
+ PART_ERROR(H5Gget_info_by_idx_name_order_increasing);
+ }
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gget_info_by_idx_name_order_increasing);
+
+ PART_BEGIN(H5Gget_info_by_idx_name_order_decreasing)
+ {
+ TESTING_2("H5Gget_info_by_idx by alphabetical order in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ for (i = 0; i < GROUP_GET_INFO_TEST_GROUP_NUMB; i++) {
+ memset(&group_info, 0, sizeof(group_info));
+
+ /* Retrieve information about each group under the parent group */
+ if (H5Gget_info_by_idx(container_group, GROUP_GET_INFO_TEST_GROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_DEC, (hsize_t)i, &group_info, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get group info for group at index %u\n", i);
+ PART_ERROR(H5Gget_info_by_idx_name_order_decreasing);
+ }
+
+ if (group_info.nlinks != 0) {
+ H5_FAILED();
+ HDprintf(" group's number of links '%lld' doesn't match expected value '%lld'\n",
+ group_info.nlinks, 0);
+ PART_ERROR(H5Gget_info_by_idx_name_order_decreasing);
+ }
+
+ if (group_info.max_corder != 0) {
+ H5_FAILED();
+ HDprintf(" group's max creation order '%lld' doesn't match expected value '%lld'\n",
+ (long long)group_info.max_corder, 0);
+ PART_ERROR(H5Gget_info_by_idx_name_order_decreasing);
+ }
+
+ /* Ensure that the storage_type field is at least set to a meaningful value */
+ if (group_info.storage_type != H5G_STORAGE_TYPE_SYMBOL_TABLE &&
+ group_info.storage_type != H5G_STORAGE_TYPE_COMPACT &&
+ group_info.storage_type != H5G_STORAGE_TYPE_DENSE &&
+ group_info.storage_type != H5G_STORAGE_TYPE_UNKNOWN) {
+ H5_FAILED();
+ HDprintf(" group info's 'storage_type' field wasn't set to a meaningful value\n");
+ PART_ERROR(H5Gget_info_by_idx_name_order_decreasing);
+ }
+
+ /* Assume that mounted should be FALSE in this case */
+ if (group_info.mounted != FALSE) {
+ H5_FAILED();
+ HDprintf(" group info's 'mounted' field was TRUE when it should have been FALSE\n");
+ PART_ERROR(H5Gget_info_by_idx_name_order_decreasing);
+ }
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Gget_info_by_idx_name_order_decreasing);
+#endif
+ }
+ PART_END(H5Gget_info_by_idx_name_order_decreasing);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(parent_group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(gcpl_id);
+ H5Gclose(parent_group_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a group's info can't be retrieved when
+ * H5Gget_info(_by_name/_by_idx) is passed invalid parameters.
+ */
+static int
+test_get_group_info_invalid_params(void)
+{
+ H5G_info_t group_info;
+ herr_t err_ret = -1;
+ hid_t file_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("retrieval of group info with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, more group, creation order aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Gget_info_invalid_loc_id)
+ {
+ TESTING_2("H5Gget_info with an invalid loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Gget_info(H5I_INVALID_HID, &group_info);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved info of group using H5Gget_info with an invalid loc_id!\n");
+ PART_ERROR(H5Gget_info_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gget_info_invalid_loc_id);
+
+ PART_BEGIN(H5Gget_info_invalid_grp_info_pointer)
+ {
+ TESTING_2("H5Gget_info with an invalid group info pointer");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Gget_info(file_id, NULL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved info of group using H5Gget_info with invalid group info pointer!\n");
+ PART_ERROR(H5Gget_info_invalid_grp_info_pointer);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gget_info_invalid_grp_info_pointer);
+
+ PART_BEGIN(H5Gget_info_by_name_invalid_loc_id)
+ {
+ TESTING_2("H5Gget_info_by_name with an invalid loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Gget_info_by_name(H5I_INVALID_HID, ".", &group_info, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved info of group using H5Gget_info_by_name with an invalid loc_id!\n");
+ PART_ERROR(H5Gget_info_by_name_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gget_info_by_name_invalid_loc_id);
+
+ PART_BEGIN(H5Gget_info_by_name_invalid_grp_name)
+ {
+ TESTING_2("H5Gget_info_by_name with an invalid group name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Gget_info_by_name(file_id, NULL, &group_info, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved info of group using H5Gget_info_by_name with a NULL name!\n");
+ PART_ERROR(H5Gget_info_by_name_invalid_grp_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Gget_info_by_name(file_id, "", &group_info, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(
+ " retrieved info of group using H5Gget_info_by_name with an invalid name of ''!\n");
+ PART_ERROR(H5Gget_info_by_name_invalid_grp_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gget_info_by_name_invalid_grp_name);
+
+ PART_BEGIN(H5Gget_info_by_name_invalid_grp_info_pointer)
+ {
+ TESTING_2("H5Gget_info_by_name with an invalid group info pointer");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Gget_info_by_name(file_id, ".", NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved info of group using H5Gget_info_by_name with an invalid group info "
+ "pointer!\n");
+ PART_ERROR(H5Gget_info_by_name_invalid_grp_info_pointer);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gget_info_by_name_invalid_grp_info_pointer);
+
+ PART_BEGIN(H5Gget_info_by_name_invalid_lapl)
+ {
+ TESTING_2("H5Gget_info_by_name with an invalid LAPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Gget_info_by_name(file_id, ".", &group_info, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved info of group using H5Gget_info_by_name with an invalid LAPL!\n");
+ PART_ERROR(H5Gget_info_by_name_invalid_lapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gget_info_by_name_invalid_lapl);
+
+ PART_BEGIN(H5Gget_info_by_idx_invalid_loc_id)
+ {
+ TESTING_2("H5Gget_info_by_idx with an invalid loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Gget_info_by_idx(H5I_INVALID_HID, ".", H5_INDEX_NAME, H5_ITER_INC, 0, &group_info,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved info of group using H5Gget_info_by_idx with an invalid loc_id!\n");
+ PART_ERROR(H5Gget_info_by_idx_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gget_info_by_idx_invalid_loc_id);
+
+ PART_BEGIN(H5Gget_info_by_idx_invalid_grp_name)
+ {
+ TESTING_2("H5Gget_info_by_idx with an invalid group name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Gget_info_by_idx(file_id, NULL, H5_INDEX_NAME, H5_ITER_INC, 0, &group_info,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved info of group using H5Gget_info_by_idx with a NULL group name!\n");
+ PART_ERROR(H5Gget_info_by_idx_invalid_grp_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Gget_info_by_idx(file_id, "", H5_INDEX_NAME, H5_ITER_INC, 0, &group_info, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved info of group using H5Gget_info_by_idx with an invalid group name of "
+ "''!\n");
+ PART_ERROR(H5Gget_info_by_idx_invalid_grp_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gget_info_by_idx_invalid_grp_name);
+
+ PART_BEGIN(H5Gget_info_by_idx_invalid_index_type)
+ {
+ TESTING_2("H5Gget_info_by_idx with an invalid index type");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Gget_info_by_idx(file_id, ".", H5_INDEX_UNKNOWN, H5_ITER_INC, 0, &group_info,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved info of group using H5Gget_info_by_idx with invalid index type "
+ "H5_INDEX_UNKNOWN!\n");
+ PART_ERROR(H5Gget_info_by_idx_invalid_index_type);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Gget_info_by_idx(file_id, ".", H5_INDEX_N, H5_ITER_INC, 0, &group_info, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved info of group using H5Gget_info_by_idx with invalid index type "
+ "H5_INDEX_N!\n");
+ PART_ERROR(H5Gget_info_by_idx_invalid_index_type);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gget_info_by_idx_invalid_index_type);
+
+ PART_BEGIN(H5Gget_info_by_idx_invalid_iter_order)
+ {
+ TESTING_2("H5Gget_info_by_idx with an invalid iteration order");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Gget_info_by_idx(file_id, ".", H5_INDEX_NAME, H5_ITER_UNKNOWN, 0, &group_info,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved info of group using H5Gget_info_by_idx with invalid iteration order "
+ "H5_ITER_UNKNOWN!\n");
+ PART_ERROR(H5Gget_info_by_idx_invalid_iter_order);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Gget_info_by_idx(file_id, ".", H5_INDEX_NAME, H5_ITER_N, 0, &group_info, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved info of group using H5Gget_info_by_idx with invalid iteration order "
+ "H5_ITER_N!\n");
+ PART_ERROR(H5Gget_info_by_idx_invalid_iter_order);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gget_info_by_idx_invalid_iter_order);
+
+ PART_BEGIN(H5Gget_info_by_idx_invalid_grp_info_pointer)
+ {
+ TESTING_2("H5Gget_info_by_idx with an invalid group info pointer");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Gget_info_by_idx(file_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved info of group using H5Gget_info_by_idx with an invalid group info "
+ "pointer!\n");
+ PART_ERROR(H5Gget_info_by_idx_invalid_grp_info_pointer);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gget_info_by_idx_invalid_grp_info_pointer);
+
+ PART_BEGIN(H5Gget_info_by_idx_invalid_lapl)
+ {
+ TESTING_2("H5Gget_info_by_idx with an invalid LAPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Gget_info_by_idx(file_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0, &group_info,
+ H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" retrieved info of group using H5Gget_info_by_idx with an invalid LAPL!\n");
+ PART_ERROR(H5Gget_info_by_idx_invalid_lapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gget_info_by_idx_invalid_lapl);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test for H5Gflush.
+ */
+static int
+test_flush_group(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+
+ TESTING("H5Gflush");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, more group, creation order aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, GROUP_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, GROUP_FLUSH_GNAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", GROUP_FLUSH_GNAME);
+ goto error;
+ }
+
+ /* Flush the group */
+ if (H5Gflush(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't flush the group '%s'\n", GROUP_FLUSH_GNAME);
+ goto error;
+ }
+
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that H5Gflush fails when it
+ * is passed invalid parameters.
+ */
+static int
+test_flush_group_invalid_params(void)
+{
+ herr_t status;
+
+ TESTING("H5Gflush with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH)) {
+ SKIPPED();
+ HDprintf(" API functions for group flush aren't supported with this connector\n");
+ return 0;
+ }
+
+ H5E_BEGIN_TRY
+ {
+ status = H5Gflush(H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (status >= 0) {
+ H5_FAILED();
+ HDprintf(" flushed group with invalid ID!\n");
+ goto error;
+ }
+
+ PASSED();
+
+ return 0;
+
+error:
+ return 1;
+}
+
+/*
+ * A test for H5Grefresh.
+ */
+static int
+test_refresh_group(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+
+ TESTING("H5Grefresh");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or refresh aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, GROUP_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, GROUP_REFRESH_GNAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", GROUP_REFRESH_GNAME);
+ goto error;
+ }
+
+ /* Refresh the group */
+ if (H5Grefresh(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't refresh the group '%s'\n", GROUP_REFRESH_GNAME);
+ goto error;
+ }
+
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that H5Grefresh fails when it
+ * is passed invalid parameters.
+ */
+static int
+test_refresh_group_invalid_params(void)
+{
+ herr_t status;
+
+ TESTING("H5Grefresh with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH)) {
+ SKIPPED();
+ HDprintf(" API functions for group refresh aren't supported with this connector\n");
+ return 0;
+ }
+
+ H5E_BEGIN_TRY
+ {
+ status = H5Grefresh(H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (status >= 0) {
+ H5_FAILED();
+ HDprintf(" refreshed group with invalid ID!\n");
+ goto error;
+ }
+
+ PASSED();
+
+ return 0;
+
+error:
+ return 1;
+}
+
+int
+H5_api_group_test(void)
+{
+ size_t i;
+ int nerrors;
+
+ HDprintf("**********************************************\n");
+ HDprintf("* *\n");
+ HDprintf("* API Group Tests *\n");
+ HDprintf("* *\n");
+ HDprintf("**********************************************\n\n");
+
+ for (i = 0, nerrors = 0; i < ARRAY_LENGTH(group_tests); i++) {
+ nerrors += (*group_tests[i])() ? 1 : 0;
+ }
+
+ HDprintf("\n");
+
+ return nerrors;
+}
diff --git a/test/API/H5_api_group_test.h b/test/API/H5_api_group_test.h
new file mode 100644
index 0000000..baf14c8
--- /dev/null
+++ b/test/API/H5_api_group_test.h
@@ -0,0 +1,65 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifndef H5_API_GROUP_TEST_H
+#define H5_API_GROUP_TEST_H
+
+#include "H5_api_test.h"
+
+int H5_api_group_test(void);
+
+/**********************************************
+ * *
+ * API Group test defines *
+ * *
+ **********************************************/
+
+#define GROUP_CREATE_UNDER_ROOT_GNAME "/group_under_root"
+
+#define GROUP_CREATE_UNDER_GROUP_REL_GNAME "child_group"
+#define GROUP_CREATE_UNDER_GROUP_ABS_GNAME "child_group/grandchild_group"
+
+#define GROUP_CREATE_INVALID_PARAMS_GROUP_NAME "/invalid_params_group"
+
+#define GROUP_CREATE_ANONYMOUS_GROUP_NAME "anon_group"
+
+#define GROUP_CREATE_INTMD_REL_INTMD_NAME "rel_intmd"
+#define GROUP_CREATE_INTMD_REL_END_NAME "rel_end"
+#define GROUP_CREATE_INTMD_ABS_INTMD_NAME "abs_intmd"
+#define GROUP_CREATE_INTMD_ABS_END_NAME "abs_end"
+#define GROUP_CREATE_INTMD_MULT_INTMD1_NAME "mult_intmd1"
+#define GROUP_CREATE_INTMD_MULT_INTMD2_NAME "mult_intmd2"
+#define GROUP_CREATE_INTMD_MULT_END_NAME "mult_end"
+
+#define OPEN_NONEXISTENT_GROUP_TEST_GNAME "/nonexistent_group"
+
+#define GROUP_PROPERTY_LIST_TEST_GROUP_NAME1 "property_list_test_group1"
+#define GROUP_PROPERTY_LIST_TEST_GROUP_NAME2 "property_list_test_group2"
+#define GROUP_PROPERTY_LIST_TEST_DUMMY_VAL H5P_CRT_ORDER_TRACKED
+
+#define GROUP_GET_INFO_TEST_GROUP_NAME "group_info_test"
+#define GROUP_GET_INFO_TEST_GROUP_NUMB 16
+
+#define GROUP_FLUSH_GNAME "group_flush_test"
+
+#define GROUP_REFRESH_GNAME "group_refresh_test"
+
+#define NAME_BUF_SIZE 64
+#define GROUP_NUMB 16
+
+#define MANY_GROUP_CREATIONS_GNAME "home_for_many_groups"
+#define GROUP_NUMB_MANY 100u
+
+#define DEEP_GROUP_CREATIONS_GNAME "home_for_deep_groups"
+#define GROUP_DEPTH 100u
+
+#endif
diff --git a/test/API/H5_api_link_test.c b/test/API/H5_api_link_test.c
new file mode 100644
index 0000000..9a8c65a
--- /dev/null
+++ b/test/API/H5_api_link_test.c
@@ -0,0 +1,27072 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include "H5_api_link_test.h"
+
+/*
+ * TODO: add link tests for short-circuit success in operator callback
+ */
+
+static int test_create_hard_link(void);
+static int test_create_hard_link_long_name(void);
+static int test_create_hard_link_many(void);
+static int test_create_hard_link_same_loc(void);
+static int test_create_hard_link_invalid_params(void);
+static int test_create_soft_link_existing_relative(void);
+static int test_create_soft_link_existing_absolute(void);
+static int test_create_soft_link_dangling_relative(void);
+static int test_create_soft_link_dangling_absolute(void);
+static int test_create_soft_link_long_name(void);
+static int test_create_soft_link_many(void);
+static int test_create_soft_link_invalid_params(void);
+static int test_create_external_link(void);
+static int test_create_external_link_dangling(void);
+static int test_create_external_link_multi(void);
+static int test_create_external_link_ping_pong(void);
+static int test_create_external_link_invalid_params(void);
+static int test_create_user_defined_link(void);
+static int test_create_user_defined_link_invalid_params(void);
+static int test_delete_link(void);
+static int test_delete_link_reset_grp_max_crt_order(void);
+static int test_delete_link_invalid_params(void);
+static int test_copy_link(void);
+static int test_copy_links_into_group_with_links(void);
+static int test_copy_link_across_files(void);
+static int test_copy_link_invalid_params(void);
+static int test_move_link(void);
+static int test_move_links_into_group_with_links(void);
+static int test_move_link_across_files(void);
+static int test_move_link_reset_grp_max_crt_order(void);
+static int test_move_link_invalid_params(void);
+static int test_get_link_val(void);
+static int test_get_link_val_invalid_params(void);
+static int test_get_link_info(void);
+static int test_get_link_info_invalid_params(void);
+static int test_get_link_name(void);
+static int test_get_link_name_invalid_params(void);
+static int test_link_iterate_hard_links(void);
+static int test_link_iterate_soft_links(void);
+static int test_link_iterate_external_links(void);
+static int test_link_iterate_ud_links(void);
+static int test_link_iterate_mixed_links(void);
+static int test_link_iterate_invalid_params(void);
+static int test_link_iterate_0_links(void);
+static int test_link_visit_hard_links_no_cycles(void);
+static int test_link_visit_soft_links_no_cycles(void);
+static int test_link_visit_external_links_no_cycles(void);
+static int test_link_visit_ud_links_no_cycles(void);
+static int test_link_visit_mixed_links_no_cycles(void);
+static int test_link_visit_hard_links_cycles(void);
+static int test_link_visit_soft_links_cycles(void);
+static int test_link_visit_external_links_cycles(void);
+static int test_link_visit_ud_links_cycles(void);
+static int test_link_visit_mixed_links_cycles(void);
+static int test_link_visit_invalid_params(void);
+static int test_link_visit_0_links(void);
+
+static herr_t link_iter_hard_links_cb(hid_t group_id, const char *name, const H5L_info2_t *info,
+ void *op_data);
+static herr_t link_iter_soft_links_cb(hid_t group_id, const char *name, const H5L_info2_t *info,
+ void *op_data);
+#ifndef NO_EXTERNAL_LINKS
+static herr_t link_iter_external_links_cb(hid_t group_id, const char *name, const H5L_info2_t *info,
+ void *op_data);
+#endif
+#ifndef NO_USER_DEFINED_LINKS
+static herr_t link_iter_ud_links_cb(hid_t group_id, const char *name, const H5L_info2_t *info, void *op_data);
+#endif
+#if !defined(NO_EXTERNAL_LINKS) && !defined(NO_USER_DEFINED_LINKS)
+static herr_t link_iter_mixed_links_cb(hid_t group_id, const char *name, const H5L_info2_t *info,
+ void *op_data);
+#endif
+static herr_t link_iter_invalid_params_cb(hid_t group_id, const char *name, const H5L_info2_t *info,
+ void *op_data);
+static herr_t link_iter_0_links_cb(hid_t group_id, const char *name, const H5L_info2_t *info, void *op_data);
+#if !defined(NO_EXTERNAL_LINKS) && !defined(NO_USER_DEFINED_LINKS)
+static herr_t link_iter_idx_saving_cb(hid_t group_id, const char *name, const H5L_info2_t *info,
+ void *op_data);
+#endif
+
+static herr_t link_visit_hard_links_no_cycles_cb(hid_t group_id, const char *name, const H5L_info2_t *info,
+ void *op_data);
+static herr_t link_visit_soft_links_no_cycles_cb(hid_t group_id, const char *name, const H5L_info2_t *info,
+ void *op_data);
+#ifndef NO_EXTERNAL_LINKS
+static herr_t link_visit_external_links_no_cycles_cb(hid_t group_id, const char *name,
+ const H5L_info2_t *info, void *op_data);
+#endif
+#ifndef NO_USER_DEFINED_LINKS
+static herr_t link_visit_ud_links_no_cycles_cb(hid_t group_id, const char *name, const H5L_info2_t *info,
+ void *op_data);
+#endif
+#if !defined(NO_EXTERNAL_LINKS) && !defined(NO_USER_DEFINED_LINKS)
+static herr_t link_visit_mixed_links_no_cycles_cb(hid_t group_id, const char *name, const H5L_info2_t *info,
+ void *op_data);
+#endif
+static herr_t link_visit_hard_links_cycles_cb(hid_t group_id, const char *name, const H5L_info2_t *info,
+ void *op_data);
+static herr_t link_visit_soft_links_cycles_cb(hid_t group_id, const char *name, const H5L_info2_t *info,
+ void *op_data);
+#ifndef NO_EXTERNAL_LINKS
+static herr_t link_visit_external_links_cycles_cb(hid_t group_id, const char *name, const H5L_info2_t *info,
+ void *op_data);
+#endif
+#ifndef NO_USER_DEFINED_LINKS
+static herr_t link_visit_ud_links_cycles_cb(hid_t group_id, const char *name, const H5L_info2_t *info,
+ void *op_data);
+#endif
+#if !defined(NO_EXTERNAL_LINKS) && !defined(NO_USER_DEFINED_LINKS)
+static herr_t link_visit_mixed_links_cycles_cb(hid_t group_id, const char *name, const H5L_info2_t *info,
+ void *op_data);
+#endif
+static herr_t link_visit_invalid_params_cb(hid_t group_id, const char *name, const H5L_info2_t *info,
+ void *op_data);
+static herr_t link_visit_0_links_cb(hid_t group_id, const char *name, const H5L_info2_t *info, void *op_data);
+
+/*
+ * The array of link tests to be performed.
+ */
+static int (*link_tests[])(void) = {
+ test_create_hard_link,
+ test_create_hard_link_long_name,
+ test_create_hard_link_many,
+ test_create_hard_link_same_loc,
+ test_create_hard_link_invalid_params,
+ test_create_soft_link_existing_relative,
+ test_create_soft_link_existing_absolute,
+ test_create_soft_link_dangling_relative,
+ test_create_soft_link_dangling_absolute,
+ test_create_soft_link_long_name,
+ test_create_soft_link_many,
+ test_create_soft_link_invalid_params,
+ test_create_external_link,
+ test_create_external_link_dangling,
+ test_create_external_link_multi,
+ test_create_external_link_ping_pong,
+ test_create_external_link_invalid_params,
+ test_create_user_defined_link,
+ test_create_user_defined_link_invalid_params,
+ test_delete_link,
+ test_delete_link_reset_grp_max_crt_order,
+ test_delete_link_invalid_params,
+ test_copy_link,
+ test_copy_links_into_group_with_links,
+ test_copy_link_across_files,
+ test_copy_link_invalid_params,
+ test_move_link,
+ test_move_links_into_group_with_links,
+ test_move_link_across_files,
+ test_move_link_reset_grp_max_crt_order,
+ test_move_link_invalid_params,
+ test_get_link_val,
+ test_get_link_val_invalid_params,
+ test_get_link_info,
+ test_get_link_info_invalid_params,
+ test_get_link_name,
+ test_get_link_name_invalid_params,
+ test_link_iterate_hard_links,
+ test_link_iterate_soft_links,
+ test_link_iterate_external_links,
+ test_link_iterate_ud_links,
+ test_link_iterate_mixed_links,
+ test_link_iterate_invalid_params,
+ test_link_iterate_0_links,
+ test_link_visit_hard_links_no_cycles,
+ test_link_visit_soft_links_no_cycles,
+ test_link_visit_external_links_no_cycles,
+ test_link_visit_ud_links_no_cycles,
+ test_link_visit_mixed_links_no_cycles,
+ test_link_visit_hard_links_cycles,
+ test_link_visit_soft_links_cycles,
+ test_link_visit_external_links_cycles,
+ test_link_visit_ud_links_cycles,
+ test_link_visit_mixed_links_cycles,
+ test_link_visit_invalid_params,
+ test_link_visit_0_links,
+};
+
+/*
+ * A test to check that a hard link can be created
+ * using H5Lcreate_hard.
+ */
+static int
+test_create_hard_link(void)
+{
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+
+ TESTING("hard link creation");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_HARD_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file or group, basic or hard link aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, HARD_LINK_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", HARD_LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if (H5Lcreate_hard(file_id, "/", group_id, HARD_LINK_TEST_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", HARD_LINK_TEST_LINK_NAME);
+ goto error;
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(group_id, HARD_LINK_TEST_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", HARD_LINK_TEST_LINK_NAME);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link did not exist\n");
+ goto error;
+ }
+
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a hard link with a long name can be created
+ * using H5Lcreate_hard.
+ */
+static int
+test_create_hard_link_long_name(void)
+{
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ char vol_name[5];
+ size_t name_len = MAX_NAME_LEN;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ char *objname = NULL; /* Name of object [Long] */
+ size_t u; /* Local index variable */
+
+ TESTING("hard link creation with a long name");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_HARD_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file or group, basic or hard link aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, HARD_LINK_TEST_GROUP_LONG_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", HARD_LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if (H5VLget_connector_name(file_id, vol_name, 5) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get VOL connector name\n");
+ goto error;
+ }
+
+ /** for DAOS VOL, max link name supported is 99 (Lexical key) */
+ if (strcmp(vol_name, "daos") == 0)
+ name_len = 99;
+
+ /* Construct very long file name */
+ if ((objname = (char *)HDmalloc((size_t)(name_len + 1))) == NULL)
+ TEST_ERROR;
+
+ for (u = 0; u < name_len; u++)
+ objname[u] = 'a';
+ objname[name_len] = '\0';
+
+ if (H5Lcreate_hard(file_id, "/", group_id, objname, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link with a long name\n");
+ goto error;
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(group_id, objname, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if the link with a long name exists\n");
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link did not exist\n");
+ goto error;
+ }
+
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ /* Release memory */
+ if (objname)
+ HDfree(objname);
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ if (objname)
+ HDfree(objname);
+
+ return 1;
+}
+
+/*
+ * A test to check that many hard links can be created
+ * using H5Lcreate_hard.
+ */
+static int
+test_create_hard_link_many(void)
+{
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID, group_id2 = H5I_INVALID_HID;
+#ifndef NO_OBJECT_GET_NAME
+ char objname[HARD_LINK_TEST_GROUP_MANY_NAME_BUF_SIZE]; /* Object name */
+#endif
+
+ TESTING("hard link creation of many links");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_HARD_LINKS)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file or group, or hard link aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, HARD_LINK_TEST_GROUP_MANY_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", HARD_LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id2 = H5Gcreate2(group_id, HARD_LINK_TEST_GROUP_MANY_FINAL_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", HARD_LINK_TEST_GROUP_MANY_FINAL_NAME);
+ goto error;
+ }
+
+ if (H5Lcreate_hard(group_id, HARD_LINK_TEST_GROUP_MANY_FINAL_NAME, group_id, "hard1", H5P_DEFAULT,
+ H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_hard(group_id, "hard1", group_id, "hard2", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_hard(group_id, "hard2", group_id, "hard3", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_hard(group_id, "hard3", group_id, "hard4", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_hard(group_id, "hard4", group_id, "hard5", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_hard(group_id, "hard5", group_id, "hard6", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_hard(group_id, "hard6", group_id, "hard7", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_hard(group_id, "hard7", group_id, "hard8", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_hard(group_id, "hard8", group_id, "hard9", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_hard(group_id, "hard9", group_id, "hard10", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_hard(group_id, "hard10", group_id, "hard11", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_hard(group_id, "hard11", group_id, "hard12", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_hard(group_id, "hard12", group_id, "hard13", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_hard(group_id, "hard13", group_id, "hard14", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_hard(group_id, "hard14", group_id, "hard15", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_hard(group_id, "hard15", group_id, "hard16", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_hard(group_id, "hard16", group_id, "hard17", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_hard(group_id, "hard17", group_id, "hard18", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_hard(group_id, "hard18", group_id, "hard19", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_hard(group_id, "hard19", group_id, "hard20", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_hard(group_id, "hard20", group_id, "hard21", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(group_id, "hard21", H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link 'hard21' exists\n");
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link 'hard21' did not exist\n");
+ goto error;
+ }
+
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id2) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ /* Reopen the file and group and verify the hard link */
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gopen2(container_group, HARD_LINK_TEST_GROUP_MANY_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container sub-group '%s'\n", HARD_LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ /* Open the object through last hard link */
+ if ((group_id2 = H5Gopen2(group_id, "hard21", H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open the group '%s' with the last hard link 'hard21'\n",
+ HARD_LINK_TEST_GROUP_MANY_FINAL_NAME);
+ goto error;
+ }
+#ifndef NO_OBJECT_GET_NAME
+ /* Check name */
+ if (H5Iget_name(group_id2, objname, (size_t)HARD_LINK_TEST_GROUP_MANY_NAME_BUF_SIZE) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get the name of the object '%s'\n", HARD_LINK_TEST_GROUP_MANY_FINAL_NAME);
+ goto error;
+ }
+
+ if (HDstrcmp(objname, "/" LINK_TEST_GROUP_NAME "/" HARD_LINK_TEST_GROUP_MANY_NAME "/hard21")) {
+ H5_FAILED();
+ HDprintf(" wrong name of the object '%s'\n", objname);
+ goto error;
+ }
+#endif
+
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id2) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id);
+ H5Gclose(group_id2);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that behavior is correct when using
+ * the H5L_SAME_LOC macro for H5Lcreate_hard().
+ */
+static int
+test_create_hard_link_same_loc(void)
+{
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("hard link creation with H5L_SAME_LOC");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_HARD_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file or group, basic or hard link aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, H5L_SAME_LOC_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", H5L_SAME_LOC_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5L_SAME_LOC_first_param)
+ {
+ TESTING_2("usage of H5L_SAME_LOC for first parameter of H5Lcreate_hard");
+
+ /* Library functionality for this part of the test is broken */
+ if (H5Lcreate_hard(H5L_SAME_LOC, ".", group_id, H5L_SAME_LOC_TEST_LINK_NAME1, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create first link '%s'\n", H5L_SAME_LOC_TEST_LINK_NAME1);
+ PART_ERROR(H5L_SAME_LOC_first_param);
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(group_id, H5L_SAME_LOC_TEST_LINK_NAME1, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link exists\n");
+ PART_ERROR(H5L_SAME_LOC_first_param);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link did not exist\n");
+ PART_ERROR(H5L_SAME_LOC_first_param);
+ }
+
+ PASSED();
+ }
+ PART_END(H5L_SAME_LOC_first_param);
+
+ PART_BEGIN(H5L_SAME_LOC_third_param)
+ {
+ TESTING_2("usage of H5L_SAME_LOC for third parameter of H5Lcreate_hard");
+
+ if (H5Lcreate_hard(group_id, ".", H5L_SAME_LOC, H5L_SAME_LOC_TEST_LINK_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create second link '%s'\n", H5L_SAME_LOC_TEST_LINK_NAME2);
+ PART_ERROR(H5L_SAME_LOC_third_param);
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(group_id, H5L_SAME_LOC_TEST_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", H5L_SAME_LOC_TEST_LINK_NAME2);
+ PART_ERROR(H5L_SAME_LOC_third_param);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link did not exist\n");
+ PART_ERROR(H5L_SAME_LOC_third_param);
+ }
+
+ PASSED();
+ }
+ PART_END(H5L_SAME_LOC_third_param);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a hard link can't be created when
+ * H5Lcreate_hard is passed invalid parameters.
+ */
+static int
+test_create_hard_link_invalid_params(void)
+{
+ herr_t err_ret = -1;
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+#ifndef NO_PREVENT_HARD_LINKS_ACROSS_FILES
+ char ext_link_filename[H5_API_TEST_FILENAME_MAX_LENGTH];
+#endif
+ hid_t ext_file_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("hard link creation with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_HARD_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file or group, basic or hard link aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, HARD_LINK_INVALID_PARAMS_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", HARD_LINK_INVALID_PARAMS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Lcreate_hard_invalid_cur_loc_id)
+ {
+ TESTING_2("H5Lcreate_hard with an invalid cur_loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_hard(H5I_INVALID_HID, "/", group_id,
+ HARD_LINK_INVALID_PARAMS_TEST_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created hard link with an invalid cur_loc_id!\n");
+ PART_ERROR(H5Lcreate_hard_invalid_cur_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_hard_invalid_cur_loc_id);
+
+ PART_BEGIN(H5Lcreate_hard_invalid_cur_name)
+ {
+ TESTING_2("H5Lcreate_hard with an invalid cur_name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_hard(file_id, NULL, group_id, HARD_LINK_INVALID_PARAMS_TEST_LINK_NAME,
+ H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created hard link with a NULL cur_name!\n");
+ PART_ERROR(H5Lcreate_hard_invalid_cur_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_hard(file_id, "", group_id, HARD_LINK_INVALID_PARAMS_TEST_LINK_NAME,
+ H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created hard link with an invalid cur_name of ''!\n");
+ PART_ERROR(H5Lcreate_hard_invalid_cur_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_hard_invalid_cur_name);
+
+ PART_BEGIN(H5Lcreate_hard_invalid_new_loc_id)
+ {
+ TESTING_2("H5Lcreate_hard with an invalid new_loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_hard(file_id, "/", H5I_INVALID_HID,
+ HARD_LINK_INVALID_PARAMS_TEST_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created hard link with an invalid new_loc_id!\n");
+ PART_ERROR(H5Lcreate_hard_invalid_new_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_hard_invalid_new_loc_id);
+
+ PART_BEGIN(H5Lcreate_hard_invalid_new_name)
+ {
+ TESTING_2("H5Lcreate_hard with an invalid new_name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_hard(file_id, "/", group_id, NULL, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created hard link with a NULL new_name!\n");
+ PART_ERROR(H5Lcreate_hard_invalid_new_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_hard(file_id, "/", group_id, "", H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created hard link with an invalid new_name of ''!\n");
+ PART_ERROR(H5Lcreate_hard_invalid_new_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_hard_invalid_new_name);
+
+ PART_BEGIN(H5Lcreate_hard_invalid_lcpl)
+ {
+ TESTING_2("H5Lcreate_hard with an invalid LCPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_hard(file_id, "/", group_id, HARD_LINK_INVALID_PARAMS_TEST_LINK_NAME,
+ H5I_INVALID_HID, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created hard link with an invalid LCPL!\n");
+ PART_ERROR(H5Lcreate_hard_invalid_lcpl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_hard_invalid_lcpl);
+
+ PART_BEGIN(H5Lcreate_hard_invalid_lapl)
+ {
+ TESTING_2("H5Lcreate_hard with an invalid LAPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_hard(file_id, "/", group_id, HARD_LINK_INVALID_PARAMS_TEST_LINK_NAME,
+ H5P_DEFAULT, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created hard link with an invalid LAPL!\n");
+ PART_ERROR(H5Lcreate_hard_invalid_lapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_hard_invalid_lapl);
+
+ PART_BEGIN(H5Lcreate_hard_invalid_same_loc)
+ {
+ TESTING_2("H5Lcreate_hard with the invalid same location");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_hard(H5L_SAME_LOC, "/", H5L_SAME_LOC,
+ HARD_LINK_INVALID_PARAMS_TEST_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created hard link with the invalid same location!\n");
+ PART_ERROR(H5Lcreate_hard_invalid_same_loc);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_hard_invalid_same_loc);
+
+ PART_BEGIN(H5Lcreate_hard_across_files)
+ {
+ TESTING_2("H5Lcreate_hard across files");
+#ifndef NO_PREVENT_HARD_LINKS_ACROSS_FILES
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Lcreate_hard_across_files);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_hard(file_id, "/", ext_file_id, HARD_LINK_INVALID_PARAMS_TEST_LINK_NAME,
+ H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created hard link across files!\n");
+ PART_ERROR(H5Lcreate_hard_across_files);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_hard(ext_file_id, "/", group_id, HARD_LINK_INVALID_PARAMS_TEST_LINK_NAME,
+ H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created hard link across files!\n");
+ PART_ERROR(H5Lcreate_hard_across_files);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lcreate_hard_across_files);
+#endif
+ }
+ PART_END(H5Lcreate_hard_across_files);
+
+ PART_BEGIN(H5Lcreate_hard_invalid_existence)
+ {
+ TESTING_2("invalid link existence after previous invalid H5Lcreate_hard calls");
+
+ /* Verify the link hasn't been created */
+ if ((link_exists = H5Lexists(group_id, HARD_LINK_INVALID_PARAMS_TEST_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n",
+ HARD_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_hard_invalid_existence);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" link existed!\n");
+ PART_ERROR(H5Lcreate_hard_invalid_existence);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_hard_invalid_existence);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+#ifndef NO_PREVENT_HARD_LINKS_ACROSS_FILES
+ if (H5Fclose(ext_file_id) < 0)
+ TEST_ERROR;
+#endif
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(ext_file_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+} /* test_create_hard_link_invalid_params */
+
+/*
+ * A test to check that a soft link, which points to an
+ * existing object with a relative path, can be created.
+ */
+static int
+test_create_soft_link_existing_relative(void)
+{
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t object_id = H5I_INVALID_HID;
+
+ TESTING("soft link creation to existing object by relative path");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file or group, basic or soft link aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, SOFT_LINK_EXISTING_RELATIVE_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ SOFT_LINK_EXISTING_RELATIVE_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((object_id = H5Gcreate2(group_id, SOFT_LINK_EXISTING_RELATIVE_TEST_OBJECT_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create object '%s' for soft link's target\n",
+ SOFT_LINK_EXISTING_RELATIVE_TEST_OBJECT_NAME);
+ goto error;
+ }
+
+ if (H5Gclose(object_id) < 0)
+ TEST_ERROR;
+
+ if (H5Lcreate_soft(SOFT_LINK_EXISTING_RELATIVE_TEST_OBJECT_NAME, group_id,
+ SOFT_LINK_EXISTING_RELATIVE_TEST_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", SOFT_LINK_EXISTING_RELATIVE_TEST_LINK_NAME);
+ goto error;
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(group_id, SOFT_LINK_EXISTING_RELATIVE_TEST_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", SOFT_LINK_EXISTING_RELATIVE_TEST_LINK_NAME);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link did not exist\n");
+ goto error;
+ }
+
+ if ((object_id = H5Gopen2(group_id, SOFT_LINK_EXISTING_RELATIVE_TEST_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open object '%s' through the soft link\n",
+ SOFT_LINK_EXISTING_RELATIVE_TEST_OBJECT_NAME);
+ goto error;
+ }
+
+ if (H5Gclose(object_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(object_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a soft link, which points to an
+ * existing object using an absolute path, can be created.
+ */
+static int
+test_create_soft_link_existing_absolute(void)
+{
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID, root_id = H5I_INVALID_HID;
+
+ TESTING("soft link creation to existing object by absolute path");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file or group, basic or soft link aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, SOFT_LINK_EXISTING_ABSOLUTE_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ SOFT_LINK_EXISTING_ABSOLUTE_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if (H5Lcreate_soft("/", group_id, SOFT_LINK_EXISTING_ABSOLUTE_TEST_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", SOFT_LINK_EXISTING_ABSOLUTE_TEST_LINK_NAME);
+ goto error;
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(group_id, SOFT_LINK_EXISTING_ABSOLUTE_TEST_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", SOFT_LINK_EXISTING_ABSOLUTE_TEST_LINK_NAME);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link did not exist\n");
+ goto error;
+ }
+
+ if ((root_id = H5Gopen2(group_id, SOFT_LINK_EXISTING_ABSOLUTE_TEST_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open object pointed to by soft link '%s'\n",
+ SOFT_LINK_EXISTING_ABSOLUTE_TEST_LINK_NAME);
+ goto error;
+ }
+
+ if (H5Gclose(root_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(root_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a soft link, which points to
+ * an object that doesn't exist by using a relative
+ * path, can be created.
+ */
+static int
+test_create_soft_link_dangling_relative(void)
+{
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t object_id = H5I_INVALID_HID;
+
+ TESTING("dangling soft link creation to object by relative path");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file or group, basic or soft link aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, SOFT_LINK_DANGLING_RELATIVE_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ SOFT_LINK_DANGLING_RELATIVE_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if (H5Lcreate_soft(SOFT_LINK_DANGLING_RELATIVE_TEST_OBJECT_NAME, group_id,
+ SOFT_LINK_DANGLING_RELATIVE_TEST_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", SOFT_LINK_DANGLING_RELATIVE_TEST_LINK_NAME);
+ goto error;
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(group_id, SOFT_LINK_DANGLING_RELATIVE_TEST_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", SOFT_LINK_DANGLING_RELATIVE_TEST_LINK_NAME);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link did not exist\n");
+ goto error;
+ }
+
+ H5E_BEGIN_TRY
+ {
+ object_id = H5Gopen2(group_id, SOFT_LINK_DANGLING_RELATIVE_TEST_LINK_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (object_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened target of dangling link '%s'!\n", SOFT_LINK_DANGLING_RELATIVE_TEST_LINK_NAME);
+ H5Gclose(object_id);
+ goto error;
+ }
+
+ if ((object_id = H5Gcreate2(group_id, SOFT_LINK_DANGLING_RELATIVE_TEST_OBJECT_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create object '%s' for soft link's target\n",
+ SOFT_LINK_DANGLING_RELATIVE_TEST_OBJECT_NAME);
+ goto error;
+ }
+
+ if (H5Gclose(object_id) < 0)
+ TEST_ERROR;
+
+ if ((object_id = H5Gopen2(group_id, SOFT_LINK_DANGLING_RELATIVE_TEST_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to open object pointed to by soft link '%s'\n",
+ SOFT_LINK_DANGLING_RELATIVE_TEST_LINK_NAME);
+ goto error;
+ }
+
+ if (H5Gclose(object_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(object_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a soft link, which points to an
+ * object that doesn't exist by using an absolute path,
+ * can be created.
+ */
+static int
+test_create_soft_link_dangling_absolute(void)
+{
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t object_id = H5I_INVALID_HID;
+
+ TESTING("dangling soft link creation to object by absolute path");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file or group, basic or soft link aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, SOFT_LINK_DANGLING_ABSOLUTE_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ SOFT_LINK_DANGLING_ABSOLUTE_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" SOFT_LINK_DANGLING_ABSOLUTE_TEST_SUBGROUP_NAME
+ "/" SOFT_LINK_DANGLING_ABSOLUTE_TEST_OBJECT_NAME,
+ group_id, SOFT_LINK_DANGLING_ABSOLUTE_TEST_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", SOFT_LINK_DANGLING_ABSOLUTE_TEST_LINK_NAME);
+ goto error;
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(group_id, SOFT_LINK_DANGLING_ABSOLUTE_TEST_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", SOFT_LINK_DANGLING_ABSOLUTE_TEST_LINK_NAME);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link did not exist\n");
+ goto error;
+ }
+
+ H5E_BEGIN_TRY
+ {
+ object_id = H5Gopen2(group_id, SOFT_LINK_DANGLING_ABSOLUTE_TEST_LINK_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (object_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened target of dangling link '%s'!\n", SOFT_LINK_DANGLING_ABSOLUTE_TEST_LINK_NAME);
+ H5Gclose(object_id);
+ goto error;
+ }
+
+ if ((object_id = H5Gcreate2(group_id, SOFT_LINK_DANGLING_ABSOLUTE_TEST_OBJECT_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create object '%s' for soft link's target\n",
+ SOFT_LINK_DANGLING_ABSOLUTE_TEST_OBJECT_NAME);
+ goto error;
+ }
+
+ if (H5Gclose(object_id) < 0)
+ TEST_ERROR;
+
+ if ((object_id = H5Gopen2(group_id, SOFT_LINK_DANGLING_ABSOLUTE_TEST_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to open object pointed to by soft link '%s'\n",
+ SOFT_LINK_DANGLING_ABSOLUTE_TEST_LINK_NAME);
+ goto error;
+ }
+
+ if (H5Gclose(object_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(object_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a soft link with a long name can be created
+ * using H5Lcreate_soft.
+ */
+static int
+test_create_soft_link_long_name(void)
+{
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ char vol_name[5];
+ size_t name_len = MAX_NAME_LEN;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ char *objname = NULL; /* Name of object [Long] */
+ size_t u; /* Local index variable */
+
+ TESTING("soft link creation with a long name");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file or group, basic or soft link aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, SOFT_LINK_TEST_GROUP_LONG_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", SOFT_LINK_TEST_GROUP_LONG_NAME);
+ goto error;
+ }
+
+ if (H5VLget_connector_name(file_id, vol_name, 5) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get VOL connector name\n");
+ goto error;
+ }
+
+ /** for DAOS VOL, max link name supported is 99 (Lexical key) */
+ if (strcmp(vol_name, "daos") == 0)
+ name_len = 99;
+
+ /* Construct very long file name */
+ if ((objname = (char *)HDmalloc((size_t)(name_len + 1))) == NULL)
+ TEST_ERROR;
+
+ for (u = 0; u < name_len; u++)
+ objname[u] = 'b';
+ objname[name_len] = '\0';
+
+ if (H5Lcreate_soft(SOFT_LINK_TEST_LONG_OBJECT_NAME, group_id, objname, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link with a long name\n");
+ goto error;
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(group_id, objname, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if the link with a long name exists\n");
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link did not exist\n");
+ goto error;
+ }
+
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ /* Release memory */
+ if (objname)
+ HDfree(objname);
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ if (objname)
+ HDfree(objname);
+
+ return 1;
+}
+
+/*
+ * A test to check that many soft links can be created
+ * using H5Lcreate_soft.
+ */
+static int
+test_create_soft_link_many(void)
+{
+#ifndef NO_SOFT_LINK_MANY_DANGLING
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t object_id = H5I_INVALID_HID;
+#ifndef NO_OBJECT_GET_NAME
+ char objname[SOFT_LINK_TEST_GROUP_MANY_NAME_BUF_SIZE]; /* Object name */
+#endif
+#endif
+
+ TESTING("soft link creation of many links");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file or group, basic or soft link aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+#ifndef NO_SOFT_LINK_MANY_DANGLING
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, SOFT_LINK_TEST_GROUP_MANY_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", SOFT_LINK_TEST_GROUP_MANY_NAME);
+ goto error;
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" SOFT_LINK_TEST_GROUP_MANY_NAME
+ "/" SOFT_LINK_TEST_GROUP_MANY_FINAL_NAME,
+ group_id, "soft1", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_soft("soft1", group_id, "soft2", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_soft("soft2", group_id, "soft3", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_soft("soft3", group_id, "soft4", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_soft("soft4", group_id, "soft5", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_soft("soft5", group_id, "soft6", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_soft("soft6", group_id, "soft7", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_soft("soft7", group_id, "soft8", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_soft("soft8", group_id, "soft9", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_soft("soft9", group_id, "soft10", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_soft("soft10", group_id, "soft11", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_soft("soft11", group_id, "soft12", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_soft("soft12", group_id, "soft13", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_soft("soft13", group_id, "soft14", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_soft("soft14", group_id, "soft15", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+ if (H5Lcreate_soft("soft15", group_id, "soft16", H5P_DEFAULT, H5P_DEFAULT) < 0)
+ TEST_ERROR;
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(group_id, "soft16", H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link 'soft16' exists\n");
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link 'soft16' did not exist\n");
+ goto error;
+ }
+
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ /* Reopen the file and group and verify the hard link */
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gopen2(container_group, SOFT_LINK_TEST_GROUP_MANY_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container sub-group '%s'\n", SOFT_LINK_TEST_GROUP_MANY_NAME);
+ goto error;
+ }
+
+ /*
+ * XXX: Try to open the object through last soft link. If should fail because it doesn't exist. If
+ * H5Oopen is available, use that.
+ */
+ H5E_BEGIN_TRY
+ {
+ object_id = H5Gopen2(group_id, "soft16", H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (object_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened target of dangling soft link '%s'!\n", SOFT_LINK_TEST_GROUP_MANY_NAME);
+ H5Gclose(object_id);
+ goto error;
+ }
+
+ if ((object_id = H5Gcreate2(group_id, SOFT_LINK_TEST_GROUP_MANY_FINAL_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create object '%s' for soft link's target\n", SOFT_LINK_TEST_GROUP_MANY_NAME);
+ goto error;
+ }
+
+ if (H5Gclose(object_id) < 0)
+ TEST_ERROR;
+
+ /*
+ * XXX: Open the object through last soft link. It should work this time. If H5Oopen is available, use
+ * that.
+ */
+ if ((object_id = H5Gopen2(group_id, "soft16", H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to open object pointed to by soft link '%s'\n", SOFT_LINK_TEST_GROUP_MANY_NAME);
+ goto error;
+ }
+#ifndef NO_OBJECT_GET_NAME
+ /* Check name */
+ if (H5Iget_name(object_id, objname, (size_t)SOFT_LINK_TEST_GROUP_MANY_NAME_BUF_SIZE) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get the name of the object 'soft16'\n");
+ goto error;
+ }
+
+ if (HDstrcmp(objname, "/" LINK_TEST_GROUP_NAME "/" SOFT_LINK_TEST_GROUP_MANY_NAME "/soft16")) {
+ H5_FAILED();
+ HDprintf(" wrong name of the object '%s'\n", objname);
+ goto error;
+ }
+#endif
+
+ if (H5Gclose(object_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(object_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+#else
+ SKIPPED();
+ return 0;
+#endif
+}
+
+/*
+ * A test to check that a soft link can't be created
+ * when H5Lcreate_soft is passed invalid parameters.
+ */
+static int
+test_create_soft_link_invalid_params(void)
+{
+ herr_t err_ret = -1;
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("soft link creation with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, or link aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, SOFT_LINK_INVALID_PARAMS_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", SOFT_LINK_INVALID_PARAMS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Lcreate_soft_invalid_link_target)
+ {
+ TESTING_2("H5Lcreate_soft with an invalid link target");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_soft(NULL, group_id, SOFT_LINK_INVALID_PARAMS_TEST_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created soft link '%s' with an invalid link target!\n",
+ SOFT_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_soft_invalid_link_target);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_soft("", group_id, SOFT_LINK_INVALID_PARAMS_TEST_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created soft link '%s' with an invalid link target!\n",
+ SOFT_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_soft_invalid_link_target);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_soft_invalid_link_target);
+
+ PART_BEGIN(H5Lcreate_soft_invalid_link_loc_id)
+ {
+ TESTING_2("H5Lcreate_soft with an invalid link_loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_soft("/", H5I_INVALID_HID, SOFT_LINK_INVALID_PARAMS_TEST_LINK_NAME,
+ H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created soft link '%s' with an invalid link_loc_id!\n",
+ SOFT_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_soft_invalid_link_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_soft_invalid_link_loc_id);
+
+ PART_BEGIN(H5Lcreate_soft_invalid_link_name)
+ {
+ TESTING_2("H5Lcreate_soft with an invalid link name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_soft("/", group_id, NULL, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created soft link '%s' with a NULL link name!\n",
+ SOFT_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_soft_invalid_link_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_soft("/", group_id, "", H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created soft link '%s' with an invalid link name of ''!\n",
+ SOFT_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_soft_invalid_link_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_soft_invalid_link_name);
+
+ PART_BEGIN(H5Lcreate_soft_invalid_lcpl)
+ {
+ TESTING_2("H5Lcreate_soft with an invalid LCPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_soft("/", group_id, SOFT_LINK_INVALID_PARAMS_TEST_LINK_NAME,
+ H5I_INVALID_HID, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created soft link '%s' with an invalid LCPL!\n",
+ SOFT_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_soft_invalid_lcpl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_soft_invalid_lcpl);
+
+ PART_BEGIN(H5Lcreate_soft_invalid_lapl)
+ {
+ TESTING_2("H5Lcreate_soft with an invalid LAPL");
+#ifndef NO_INVALID_PROPERTY_LIST_TESTS
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_soft("/", group_id, SOFT_LINK_INVALID_PARAMS_TEST_LINK_NAME, H5P_DEFAULT,
+ H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created soft link '%s' with an invalid LAPL!\n",
+ SOFT_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_soft_invalid_lapl);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lcreate_soft_invalid_lapl);
+#endif
+ }
+ PART_END(H5Lcreate_soft_invalid_lapl);
+
+ PART_BEGIN(H5Lcreate_soft_invalid_existence)
+ {
+ TESTING_2("invalid link existence after previous invalid H5Lcreate_soft calls");
+
+ /* Verify the link hasn't been created */
+ if ((link_exists = H5Lexists(group_id, SOFT_LINK_INVALID_PARAMS_TEST_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n",
+ SOFT_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_soft_invalid_existence);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' existed!\n", SOFT_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_soft_invalid_existence);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_soft_invalid_existence);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that an external link can be created
+ * using H5Lcreate_external.
+ */
+static int
+test_create_external_link(void)
+{
+#ifndef NO_EXTERNAL_LINKS
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t root_id = H5I_INVALID_HID;
+ char ext_link_filename[H5_API_TEST_FILENAME_MAX_LENGTH];
+#endif
+
+ TESTING("external link creation to existing object");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_EXTERNAL_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, basic link, or external link aren't supported "
+ "with this connector\n");
+ return 0;
+ }
+
+#ifndef NO_EXTERNAL_LINKS
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s", EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ goto error;
+ }
+
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, EXTERNAL_LINK_TEST_SUBGROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n", EXTERNAL_LINK_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if (H5Lcreate_external(ext_link_filename, "/", group_id, EXTERNAL_LINK_TEST_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", EXTERNAL_LINK_TEST_LINK_NAME);
+ goto error;
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(group_id, EXTERNAL_LINK_TEST_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", EXTERNAL_LINK_TEST_LINK_NAME);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link did not exist\n");
+ goto error;
+ }
+
+ if ((root_id = H5Gopen2(group_id, EXTERNAL_LINK_TEST_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open root group of other file using external link '%s'\n",
+ EXTERNAL_LINK_TEST_LINK_NAME);
+ goto error;
+ }
+
+ if (H5Gclose(root_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(root_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+#else
+ SKIPPED();
+ return 0;
+#endif
+}
+
+/*
+ * A test to check that an external link, which points to an
+ * object that doesn't exist by using an absolute path, can
+ * be created.
+ */
+static int
+test_create_external_link_dangling(void)
+{
+#ifndef NO_EXTERNAL_LINKS
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID, ext_file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t object_id = H5I_INVALID_HID;
+ char ext_link_filename[H5_API_TEST_FILENAME_MAX_LENGTH];
+#endif
+
+ TESTING("dangling external link creation");
+
+#ifndef NO_EXTERNAL_LINKS
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_EXTERNAL_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, basic link, or external link aren't supported "
+ "with this connector\n");
+ return 0;
+ }
+
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s", EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ goto error;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, EXTERNAL_LINK_TEST_DANGLING_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n", EXTERNAL_LINK_TEST_DANGLING_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if (H5Lcreate_external(ext_link_filename, "/" EXTERNAL_LINK_TEST_DANGLING_OBJECT_NAME, group_id,
+ EXTERNAL_LINK_TEST_DANGLING_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dangling external link '%s'\n", EXTERNAL_LINK_TEST_DANGLING_LINK_NAME);
+ goto error;
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(group_id, EXTERNAL_LINK_TEST_DANGLING_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", EXTERNAL_LINK_TEST_DANGLING_LINK_NAME);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link did not exist\n");
+ goto error;
+ }
+
+ H5E_BEGIN_TRY
+ {
+ object_id = H5Gopen2(group_id, EXTERNAL_LINK_TEST_DANGLING_LINK_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (object_id >= 0) {
+ H5_FAILED();
+ HDprintf(" opened non-existent object in other file using dangling external link '%s'!\n",
+ EXTERNAL_LINK_TEST_DANGLING_LINK_NAME);
+ H5Gclose(object_id);
+ goto error;
+ }
+
+ if ((object_id = H5Gcreate2(ext_file_id, EXTERNAL_LINK_TEST_DANGLING_OBJECT_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create object '%s' for external link's target\n",
+ EXTERNAL_LINK_TEST_DANGLING_OBJECT_NAME);
+ goto error;
+ }
+
+ if (H5Gclose(object_id) < 0)
+ TEST_ERROR;
+
+ if ((object_id = H5Gopen2(group_id, EXTERNAL_LINK_TEST_DANGLING_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to open object pointed to by external link '%s'\n",
+ EXTERNAL_LINK_TEST_DANGLING_LINK_NAME);
+ goto error;
+ }
+
+ if (H5Gclose(object_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(ext_file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(object_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ H5Fclose(ext_file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+#else
+ SKIPPED();
+ return 0;
+#endif
+}
+
+/*
+ * A test to check that an external link to an object
+ * that crosses several files using H5Lcreate_external.
+ */
+static int
+test_create_external_link_multi(void)
+{
+#ifndef NO_EXTERNAL_LINKS
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t group_id2 = H5I_INVALID_HID, group_id3 = H5I_INVALID_HID;
+ hid_t root_id = H5I_INVALID_HID;
+ char ext_link_filename1[H5_API_TEST_FILENAME_MAX_LENGTH];
+ char ext_link_filename2[H5_API_TEST_FILENAME_MAX_LENGTH];
+ char ext_link_filename3[H5_API_TEST_FILENAME_MAX_LENGTH];
+ char objname[EXTERNAL_LINK_TEST_MULTI_NAME_BUF_SIZE];
+#endif
+
+ TESTING_MULTIPART("external link creation to an object across several files");
+
+#ifndef NO_EXTERNAL_LINKS
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_EXTERNAL_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, or external link aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Lcreate_external_first_file)
+ {
+ TESTING_2("Create the first external file to be pointed to");
+
+ HDsnprintf(ext_link_filename1, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((file_id = H5Fcreate(ext_link_filename1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n",
+ ext_link_filename1);
+ PART_ERROR(H5Lcreate_external_first_file);
+ }
+
+ /* Create object down a path */
+ if ((group_id = H5Gcreate2(file_id, "A", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create a group\n");
+ PART_ERROR(H5Lcreate_external_first_file);
+ }
+
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close a group\n");
+ PART_ERROR(H5Lcreate_external_first_file);
+ }
+
+ if ((group_id = H5Gcreate2(file_id, "A/B", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create a group\n");
+ PART_ERROR(H5Lcreate_external_first_file);
+ }
+
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close a group\n");
+ PART_ERROR(H5Lcreate_external_first_file);
+ }
+
+ if ((group_id = H5Gcreate2(file_id, "A/B/C", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create a group\n");
+ PART_ERROR(H5Lcreate_external_first_file);
+ }
+
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close a group\n");
+ PART_ERROR(H5Lcreate_external_first_file);
+ }
+
+ /* Close file */
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close a group\n");
+ PART_ERROR(H5Lcreate_external_first_file);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_external_first_file);
+
+ PART_BEGIN(H5Lcreate_external_second_file)
+ {
+ TESTING_2("Create the second external file to be pointed to");
+
+ HDsnprintf(ext_link_filename2, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME2);
+
+ if ((file_id = H5Fcreate(ext_link_filename2, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n",
+ ext_link_filename2);
+ PART_ERROR(H5Lcreate_external_second_file);
+ }
+
+ /* Create object down a path */
+ if ((group_id = H5Gcreate2(file_id, "D", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create a group\n");
+ PART_ERROR(H5Lcreate_external_second_file);
+ }
+
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close a group\n");
+ PART_ERROR(H5Lcreate_external_second_file);
+ }
+
+ if ((group_id = H5Gcreate2(file_id, "D/E", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create a group\n");
+ PART_ERROR(H5Lcreate_external_second_file);
+ }
+
+ /* Create external link to object in first file */
+ if (H5Lcreate_external(ext_link_filename1, "/A/B/C", group_id, "F", H5P_DEFAULT, H5P_DEFAULT) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link 'F'\n");
+ PART_ERROR(H5Lcreate_external_second_file);
+ }
+
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close a group\n");
+ PART_ERROR(H5Lcreate_external_second_file);
+ }
+
+ /* Close file */
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close a file\n");
+ PART_ERROR(H5Lcreate_external_second_file);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_external_second_file);
+
+ PART_BEGIN(H5Lcreate_external_third_file)
+ {
+ TESTING_2("Create the third external file to be pointed to");
+
+ HDsnprintf(ext_link_filename3, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME3);
+
+ if ((file_id = H5Fcreate(ext_link_filename3, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n",
+ ext_link_filename3);
+ PART_ERROR(H5Lcreate_external_third_file);
+ }
+
+ /* Create object down a path */
+ if ((group_id = H5Gcreate2(file_id, "G", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create a group\n");
+ PART_ERROR(H5Lcreate_external_third_file);
+ }
+
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close a group\n");
+ PART_ERROR(H5Lcreate_external_third_file);
+ }
+
+ if ((group_id = H5Gcreate2(file_id, "G/H", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create a group\n");
+ PART_ERROR(H5Lcreate_external_third_file);
+ }
+
+ /* Create external link to object in second file */
+ if (H5Lcreate_external(ext_link_filename2, "/D/E/F", group_id, "I", H5P_DEFAULT, H5P_DEFAULT) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link 'I'\n");
+ PART_ERROR(H5Lcreate_external_third_file);
+ }
+
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close a group\n");
+ PART_ERROR(H5Lcreate_external_third_file);
+ }
+
+ /* Close file */
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close a file\n");
+ PART_ERROR(H5Lcreate_external_third_file);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_external_third_file);
+
+ PART_BEGIN(H5Lcreate_external_final_file)
+ {
+ TESTING_2("Open the file and create the final external link");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ PART_ERROR(H5Lcreate_external_final_file);
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ PART_ERROR(H5Lcreate_external_final_file);
+ }
+
+ if ((group_id = H5Gcreate2(container_group, EXTERNAL_LINK_TEST_MULTI_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n", EXTERNAL_LINK_TEST_MULTI_NAME);
+ PART_ERROR(H5Lcreate_external_final_file);
+ }
+
+ if (H5Lcreate_external(ext_link_filename3, "/G/H/I", group_id, "ext_link", H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link 'ext_link'\n");
+ PART_ERROR(H5Lcreate_external_final_file);
+ }
+
+ if ((group_id2 = H5Gopen2(group_id, "ext_link", H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open the group that is the external link\n");
+ PART_ERROR(H5Lcreate_external_final_file);
+ }
+
+ /* Check name */
+ if (H5Iget_name(group_id2, objname, (size_t)EXTERNAL_LINK_TEST_MULTI_NAME_BUF_SIZE) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get the name of the object '%s'\n",
+ HARD_LINK_TEST_GROUP_MANY_FINAL_NAME);
+ PART_ERROR(H5Lcreate_external_final_file);
+ }
+
+ if (HDstrcmp(objname, "/A/B/C")) {
+ H5_FAILED();
+ HDprintf(" wrong name of the object '%s'\n", objname);
+ PART_ERROR(H5Lcreate_external_final_file);
+ }
+
+ /* Create an object in the external file */
+ if ((group_id3 = H5Gcreate2(group_id2, "new_group", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create a group 'new_group' in the external file\n");
+ PART_ERROR(H5Lcreate_external_final_file);
+ }
+
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close a group\n");
+ PART_ERROR(H5Lcreate_external_final_file);
+ }
+
+ if (H5Gclose(group_id2) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close a group\n");
+ PART_ERROR(H5Lcreate_external_final_file);
+ }
+
+ if (H5Gclose(group_id3) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close a group\n");
+ PART_ERROR(H5Lcreate_external_final_file);
+ }
+
+ if (H5Gclose(container_group) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close a group\n");
+ PART_ERROR(H5Lcreate_external_final_file);
+ }
+
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close a file\n");
+ PART_ERROR(H5Lcreate_external_final_file);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_external_final_file);
+
+ PART_BEGIN(H5Lcreate_external_object_created)
+ {
+ TESTING_2("Check the group being created through the external link");
+
+ if ((file_id = H5Fopen(ext_link_filename1, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", ext_link_filename1);
+ PART_ERROR(H5Lcreate_external_object_created);
+ }
+
+ if ((group_id = H5Gopen2(file_id, "/A/B/C/new_group", H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open a group 'new_group' in the external file\n");
+ PART_ERROR(H5Lcreate_external_object_created);
+ }
+
+ /* Check name */
+ if (H5Iget_name(group_id, objname, (size_t)EXTERNAL_LINK_TEST_MULTI_NAME_BUF_SIZE) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get the name of the object '/A/B/C/new_group'\n");
+ PART_ERROR(H5Lcreate_external_object_created);
+ }
+
+ if (HDstrcmp(objname, "/A/B/C/new_group")) {
+ H5_FAILED();
+ HDprintf(" wrong name of the object '%s'\n", objname);
+ PART_ERROR(H5Lcreate_external_object_created);
+ }
+
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close the group\n");
+ PART_ERROR(H5Lcreate_external_object_created);
+ }
+
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close the file\n");
+ PART_ERROR(H5Lcreate_external_object_created);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_external_object_created);
+ }
+ END_MULTIPART;
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(root_id);
+ H5Gclose(group_id);
+ H5Gclose(group_id2);
+ H5Gclose(group_id3);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+#else
+ SKIPPED();
+ return 0;
+#endif
+}
+
+/*
+ * A test to build a file with external link to object that
+ * goes back and forth between two files a couple of times:
+ *
+ * file1:/link1 -> file2: /link2
+ * file2:/link2 -> file1: /link3
+ * file1:/link3 -> file2: /link4
+ * file2:/link4 -> file1: /link5
+ * file1:/link5 -> file2: /link6
+ * file2:/link6 -> file1: /final
+ */
+static int
+test_create_external_link_ping_pong(void)
+{
+#ifndef NO_EXTERNAL_LINKS
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t group_id2 = H5I_INVALID_HID;
+ char ext_link_filename1[H5_API_TEST_FILENAME_MAX_LENGTH];
+ char ext_link_filename2[H5_API_TEST_FILENAME_MAX_LENGTH];
+ char objname[EXTERNAL_LINK_TEST_MULTI_NAME_BUF_SIZE];
+#endif
+
+ TESTING_MULTIPART("external link creation to an object in ping pong style");
+
+#ifndef NO_EXTERNAL_LINKS
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_EXTERNAL_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, or external link aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ HDsnprintf(ext_link_filename1, H5_API_TEST_FILENAME_MAX_LENGTH, "%s", EXTERNAL_LINK_TEST_PING_PONG_NAME1);
+ HDsnprintf(ext_link_filename2, H5_API_TEST_FILENAME_MAX_LENGTH, "%s", EXTERNAL_LINK_TEST_PING_PONG_NAME2);
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Lcreate_external_first_file)
+ {
+ TESTING_2("Create the first external file");
+
+ /* Create the first file */
+ if ((file_id = H5Fcreate(ext_link_filename1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n",
+ ext_link_filename1);
+ PART_ERROR(H5Lcreate_external_first_file);
+ }
+
+ /* Create external links for chain */
+ if (H5Lcreate_external(ext_link_filename2, "/link2", file_id, "link1", H5P_DEFAULT, H5P_DEFAULT) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link\n");
+ PART_ERROR(H5Lcreate_external_first_file);
+ }
+
+ if (H5Lcreate_external(ext_link_filename2, "/link4", file_id, "link3", H5P_DEFAULT, H5P_DEFAULT) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link\n");
+ PART_ERROR(H5Lcreate_external_first_file);
+ }
+
+ if (H5Lcreate_external(ext_link_filename2, "/link6", file_id, "link5", H5P_DEFAULT, H5P_DEFAULT) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link\n");
+ PART_ERROR(H5Lcreate_external_first_file);
+ }
+
+ /* Create final object */
+ if ((group_id = H5Gcreate2(file_id, "final", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create a group\n");
+ PART_ERROR(H5Lcreate_external_first_file);
+ }
+
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close a group\n");
+ PART_ERROR(H5Lcreate_external_first_file);
+ }
+
+ /* Close file */
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close a group\n");
+ PART_ERROR(H5Lcreate_external_first_file);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_external_first_file);
+
+ PART_BEGIN(H5Lcreate_external_second_file)
+ {
+ TESTING_2("Create the second external file");
+
+ /* Create the second file */
+ if ((file_id = H5Fcreate(ext_link_filename2, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link\n", ext_link_filename2);
+ PART_ERROR(H5Lcreate_external_second_file);
+ }
+
+ /* Create external links for chain */
+ if (H5Lcreate_external(ext_link_filename1, "/link3", file_id, "link2", H5P_DEFAULT, H5P_DEFAULT) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link\n");
+ PART_ERROR(H5Lcreate_external_second_file);
+ }
+
+ if (H5Lcreate_external(ext_link_filename1, "/link5", file_id, "link4", H5P_DEFAULT, H5P_DEFAULT) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link\n");
+ PART_ERROR(H5Lcreate_external_second_file);
+ }
+
+ if (H5Lcreate_external(ext_link_filename1, "/final", file_id, "link6", H5P_DEFAULT, H5P_DEFAULT) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link\n");
+ PART_ERROR(H5Lcreate_external_second_file);
+ }
+
+ /* Close file */
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close the file\n");
+ PART_ERROR(H5Lcreate_external_second_file);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_external_second_file);
+
+ PART_BEGIN(H5Lcreate_external_verify)
+ {
+ TESTING_2("Open the first file to verify the object being pointed to");
+
+ if ((file_id = H5Fopen(ext_link_filename1, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", ext_link_filename1);
+ PART_ERROR(H5Lcreate_external_verify);
+ }
+
+ /* Open object through external link */
+ if ((group_id = H5Gopen2(file_id, "link1", H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open the group that is the external link 'link1'\n");
+ PART_ERROR(H5Lcreate_external_verify);
+ }
+
+ /* Check the name of the object being pointed to */
+ if (H5Iget_name(group_id, objname, (size_t)EXTERNAL_LINK_TEST_PING_PONG_NAME_BUF_SIZE) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get the name of the object\n");
+ PART_ERROR(H5Lcreate_external_verify);
+ }
+
+ if (HDstrcmp(objname, "/final")) {
+ H5_FAILED();
+ HDprintf(" wrong name of the object '%s'\n", objname);
+ PART_ERROR(H5Lcreate_external_verify);
+ }
+
+ /* Create an object in the external file */
+ if ((group_id2 = H5Gcreate2(group_id, "new_group", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create a new group 'new_group'\n");
+ PART_ERROR(H5Lcreate_external_verify);
+ }
+
+ if (H5Gclose(group_id2) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close a group\n");
+ PART_ERROR(H5Lcreate_external_verify);
+ }
+
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close a group\n");
+ PART_ERROR(H5Lcreate_external_verify);
+ }
+
+ /* Close file */
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close the file\n");
+ PART_ERROR(H5Lcreate_external_verify);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_external_verify);
+
+ PART_BEGIN(H5Lcreate_external_verify_again)
+ {
+ TESTING_2("Open the first file to verify the object being created");
+
+ if ((file_id = H5Fopen(ext_link_filename1, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", ext_link_filename1);
+ PART_ERROR(H5Lcreate_external_verify_again);
+ }
+
+ /* Open object through external link */
+ if ((group_id = H5Gopen2(file_id, "/final/new_group", H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open the group that is the external link\n");
+ PART_ERROR(H5Lcreate_external_verify_again);
+ }
+
+ /* Check the name of the object being pointed to */
+ if (H5Iget_name(group_id, objname, (size_t)EXTERNAL_LINK_TEST_PING_PONG_NAME_BUF_SIZE) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get the name of the object\n");
+ PART_ERROR(H5Lcreate_external_verify_again);
+ }
+
+ if (HDstrcmp(objname, "/final/new_group")) {
+ H5_FAILED();
+ HDprintf(" wrong name of the object '%s'\n", objname);
+ PART_ERROR(H5Lcreate_external_verify_again);
+ }
+
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close a group\n");
+ PART_ERROR(H5Lcreate_external_verify_again);
+ }
+
+ /* Close file */
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close the file\n");
+ PART_ERROR(H5Lcreate_external_verify_again);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_external_verify_again);
+ }
+ END_MULTIPART;
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id);
+ H5Gclose(group_id2);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+#else
+ SKIPPED();
+ return 0;
+#endif
+}
+
+/*
+ * A test to check that an external link can't be created
+ * when H5Lcreate_external is passed invalid parameters.
+ */
+static int
+test_create_external_link_invalid_params(void)
+{
+ herr_t err_ret = -1;
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ char ext_link_filename[H5_API_TEST_FILENAME_MAX_LENGTH];
+
+ TESTING_MULTIPART("H5Lcreate_external with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_EXTERNAL_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, or basic link or external link aren't supported "
+ "with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_INVALID_PARAMS_TEST_FILE_NAME);
+
+ if ((file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ goto error;
+ }
+
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, EXTERNAL_LINK_INVALID_PARAMS_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ EXTERNAL_LINK_INVALID_PARAMS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Lcreate_external_invalid_file_name)
+ {
+ TESTING_2("H5Lcreate_external with an invalid file name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_external(NULL, "/", group_id, EXTERNAL_LINK_INVALID_PARAMS_TEST_LINK_NAME,
+ H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created external link '%s' using a NULL file name!\n",
+ EXTERNAL_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_external_invalid_file_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_external("", "/", group_id, EXTERNAL_LINK_INVALID_PARAMS_TEST_LINK_NAME,
+ H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created external link '%s' using an invalid file name of ''!\n",
+ EXTERNAL_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_external_invalid_file_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_external_invalid_file_name);
+
+ PART_BEGIN(H5Lcreate_external_invalid_ext_obj_name)
+ {
+ TESTING_2("H5Lcreate_external with an invalid external object name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Lcreate_external(ext_link_filename, NULL, group_id,
+ EXTERNAL_LINK_INVALID_PARAMS_TEST_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created external link '%s' using a NULL external object name!\n",
+ EXTERNAL_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_external_invalid_ext_obj_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Lcreate_external(ext_link_filename, "", group_id,
+ EXTERNAL_LINK_INVALID_PARAMS_TEST_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created external link '%s' using an invalid external object name of ''!\n",
+ EXTERNAL_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_external_invalid_ext_obj_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_external_invalid_ext_obj_name);
+
+ PART_BEGIN(H5Lcreate_external_invalid_link_loc_id)
+ {
+ TESTING_2("H5Lcreate_external with an invalid link_loc_id");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Lcreate_external(ext_link_filename, "/", H5I_INVALID_HID,
+ EXTERNAL_LINK_INVALID_PARAMS_TEST_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created external link '%s' using an invalid link_loc_id!\n",
+ EXTERNAL_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_external_invalid_link_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_external_invalid_link_loc_id);
+
+ PART_BEGIN(H5Lcreate_external_invalid_link_name)
+ {
+ TESTING_2("H5Lcreate_external with an invalid link name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Lcreate_external(ext_link_filename, "/", group_id, NULL, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created external link '%s' using a NULL link_loc_id!\n",
+ EXTERNAL_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_external_invalid_link_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_external(ext_link_filename, "/", group_id, "", H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created external link '%s' using an invalid link name of ''!\n",
+ EXTERNAL_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_external_invalid_link_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_external_invalid_link_name);
+
+ PART_BEGIN(H5Lcreate_external_invalid_lcpl)
+ {
+ TESTING_2("H5Lcreate_external with an invalid LCPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_external(ext_link_filename, "/", group_id,
+ EXTERNAL_LINK_INVALID_PARAMS_TEST_LINK_NAME, H5I_INVALID_HID,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created external link '%s' using an invalid LCPL!\n",
+ EXTERNAL_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_external_invalid_lcpl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_external_invalid_lcpl);
+
+ PART_BEGIN(H5Lcreate_external_invalid_lapl)
+ {
+ TESTING_2("H5Lcreate_external with an invalid LAPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_external(ext_link_filename, "/", group_id,
+ EXTERNAL_LINK_INVALID_PARAMS_TEST_LINK_NAME, H5P_DEFAULT,
+ H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created external link '%s' using an invalid LAPL!\n",
+ EXTERNAL_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_external_invalid_lapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_external_invalid_lapl);
+
+ PART_BEGIN(H5Lcreate_external_invalid_existence)
+ {
+ TESTING_2("invalid link existence after previous invalid H5Lcreate_external calls");
+
+ /* Verify the link hasn't been created */
+ if ((link_exists =
+ H5Lexists(group_id, EXTERNAL_LINK_INVALID_PARAMS_TEST_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n",
+ EXTERNAL_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_external_invalid_existence);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' existed!\n", EXTERNAL_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_external_invalid_existence);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_external_invalid_existence);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a user-defined link can be created.
+ */
+static int
+test_create_user_defined_link(void)
+{
+#ifndef NO_USER_DEFINED_LINKS
+ ssize_t udata_size;
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ char udata[UD_LINK_TEST_UDATA_MAX_SIZE];
+#endif
+
+ TESTING("user-defined link creation");
+
+#ifndef NO_USER_DEFINED_LINKS
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_UD_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, or user-defined link aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, UD_LINK_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n", UD_LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((udata_size = HDsnprintf(udata, UD_LINK_TEST_UDATA_MAX_SIZE, "udata")) < 0)
+ TEST_ERROR;
+
+ if (H5Lcreate_ud(group_id, UD_LINK_TEST_LINK_NAME, H5L_TYPE_EXTERNAL, udata, (size_t)udata_size,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create user-defined link '%s'\n", UD_LINK_TEST_LINK_NAME);
+ goto error;
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(group_id, UD_LINK_TEST_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", UD_LINK_TEST_LINK_NAME);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' didn't exist!\n", UD_LINK_TEST_LINK_NAME);
+ goto error;
+ }
+
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+#else
+ SKIPPED();
+ return 0;
+#endif
+}
+
+/*
+ * A test to check that H5Lcreate_ud fails when
+ * it is given invalid parameters.
+ */
+static int
+test_create_user_defined_link_invalid_params(void)
+{
+ ssize_t udata_size;
+ htri_t link_exists;
+ herr_t err_ret = -1;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ char udata[UD_LINK_INVALID_PARAMS_TEST_UDATA_MAX_SIZE];
+
+ TESTING_MULTIPART("H5Lcreate_ud with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_UD_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, or link aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, UD_LINK_INVALID_PARAMS_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n", UD_LINK_INVALID_PARAMS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((udata_size = HDsnprintf(udata, UD_LINK_INVALID_PARAMS_TEST_UDATA_MAX_SIZE, "udata")) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Lcreate_ud_invalid_link_loc_id)
+ {
+ TESTING_2("H5Lcreate_ud with an invalid link location ID");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Lcreate_ud(H5I_INVALID_HID, UD_LINK_INVALID_PARAMS_TEST_LINK_NAME, H5L_TYPE_EXTERNAL,
+ udata, (size_t)udata_size, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created user-defined link '%s' with an invalid link location ID!\n",
+ UD_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_ud_invalid_link_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_ud_invalid_link_loc_id);
+
+ PART_BEGIN(H5Lcreate_ud_invalid_link_name)
+ {
+ TESTING_2("H5Lcreate_ud with an invalid link name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_ud(group_id, NULL, H5L_TYPE_EXTERNAL, udata, (size_t)udata_size,
+ H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created user-defined link '%s' with a NULL link name!\n",
+ UD_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_ud_invalid_link_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_ud(group_id, "", H5L_TYPE_EXTERNAL, udata, (size_t)udata_size,
+ H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created user-defined link '%s' with an invalid link name of ''!\n",
+ UD_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_ud_invalid_link_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_ud_invalid_link_name);
+
+ PART_BEGIN(H5Lcreate_ud_invalid_link_type)
+ {
+ TESTING_2("H5Lcreate_ud with an invalid link type");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_ud(group_id, UD_LINK_INVALID_PARAMS_TEST_LINK_NAME, H5L_TYPE_HARD, udata,
+ (size_t)udata_size, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created user-defined link '%s' with an invalid link type!\n",
+ UD_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_ud_invalid_link_type);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_ud_invalid_link_type);
+
+ PART_BEGIN(H5Lcreate_ud_invalid_udata_pointer)
+ {
+ TESTING_2("H5Lcreate_ud with an invalid udata pointer");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_ud(group_id, UD_LINK_INVALID_PARAMS_TEST_LINK_NAME, H5L_TYPE_EXTERNAL,
+ NULL, (size_t)udata_size, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created user-defined link '%s' with an invalid udata pointer!\n",
+ UD_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_ud_invalid_udata_pointer);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_ud_invalid_udata_pointer);
+
+ PART_BEGIN(H5Lcreate_ud_invalid_lcpl)
+ {
+ TESTING_2("H5Lcreate_ud with an invalid LCPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_ud(group_id, UD_LINK_INVALID_PARAMS_TEST_LINK_NAME, H5L_TYPE_EXTERNAL,
+ udata, (size_t)udata_size, H5I_INVALID_HID, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created user-defined link '%s' with an invalid LCPL!\n",
+ UD_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_ud_invalid_lcpl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_ud_invalid_lcpl);
+
+ PART_BEGIN(H5Lcreate_ud_invalid_lapl)
+ {
+ TESTING_2("H5Lcreate_ud with an invalid LAPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcreate_ud(group_id, UD_LINK_INVALID_PARAMS_TEST_LINK_NAME, H5L_TYPE_EXTERNAL,
+ udata, (size_t)udata_size, H5P_DEFAULT, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" created user-defined link '%s' with an invalid LAPL!\n",
+ UD_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_ud_invalid_lapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_ud_invalid_lapl);
+
+ PART_BEGIN(H5Lcreate_ud_invalid_existence)
+ {
+ TESTING_2("invalid link existence after previous invalid H5Lcreate_ud calls");
+
+ /* Verify the link hasn't been created */
+ if ((link_exists = H5Lexists(group_id, UD_LINK_INVALID_PARAMS_TEST_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n",
+ UD_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_ud_invalid_existence);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' existed!\n", UD_LINK_INVALID_PARAMS_TEST_LINK_NAME);
+ PART_ERROR(H5Lcreate_ud_invalid_existence);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcreate_ud_invalid_existence);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a link can be deleted
+ * using H5Ldelete and H5Ldelete_by_idx.
+ */
+static int
+test_delete_link(void)
+{
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID, ext_file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t subgroup_id = H5I_INVALID_HID;
+ hid_t nested_grp_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+#ifndef NO_EXTERNAL_LINKS
+ char ext_link_filename[H5_API_TEST_FILENAME_MAX_LENGTH];
+#endif
+
+ TESTING_MULTIPART("link deletion");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_HARD_LINKS) || !(vol_cap_flags_g & H5VL_CAP_FLAG_EXTERNAL_LINKS) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS) || !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, or link, hard, soft, or external link aren't "
+ "supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create GCPL for link creation order tracking\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set link creation order tracking\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, LINK_DELETE_TEST_SUBGROUP_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n", LINK_DELETE_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Ldelete_hard)
+ {
+ TESTING_2("H5Ldelete on hard link");
+
+ if ((subgroup_id = H5Gcreate2(group_id, LINK_DELETE_TEST_SUBGROUP1_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", LINK_DELETE_TEST_SUBGROUP1_NAME);
+ PART_ERROR(H5Ldelete_hard);
+ }
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create first hard link '%s'\n", LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_hard);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if first hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_hard);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" first hard link did not exist\n");
+ PART_ERROR(H5Ldelete_hard);
+ }
+
+ if (H5Ldelete(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete hard link '%s' using H5Ldelete\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_hard);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if first hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_hard);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" first hard link exists!\n");
+ PART_ERROR(H5Ldelete_hard);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", LINK_DELETE_TEST_SUBGROUP1_NAME);
+ PART_ERROR(H5Ldelete_hard);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ldelete_hard);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Ldelete_hard_indirect)
+ {
+ TESTING_2("H5Ldelete on nested hard link");
+
+ if ((subgroup_id = H5Gcreate2(group_id, LINK_DELETE_TEST_NESTED_SUBGROUP_NAME1, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", LINK_DELETE_TEST_NESTED_SUBGROUP_NAME1);
+ PART_ERROR(H5Ldelete_hard_indirect);
+ }
+
+ if ((nested_grp_id = H5Gcreate2(subgroup_id, LINK_DELETE_TEST_NESTED_GRP_NAME, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", LINK_DELETE_TEST_NESTED_GRP_NAME);
+ PART_ERROR(H5Ldelete_hard_indirect);
+ }
+
+ if (H5Lcreate_hard(nested_grp_id, ".", nested_grp_id, LINK_DELETE_TEST_HARD_LINK_NAME,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create first hard link '%s'\n", LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_hard_indirect);
+ }
+
+ if ((link_exists = H5Lexists(nested_grp_id, LINK_DELETE_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if first hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_hard_indirect);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" first hard link did not exist\n");
+ PART_ERROR(H5Ldelete_hard_indirect);
+ }
+
+ if (H5Ldelete(subgroup_id, LINK_DELETE_TEST_NESTED_HARD_LINK_NAME, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete hard link '%s' using H5Ldelete\n",
+ LINK_DELETE_TEST_NESTED_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_hard_indirect);
+ }
+
+ if ((link_exists = H5Lexists(nested_grp_id, LINK_DELETE_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if first hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_hard_indirect);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" first hard link exists!\n");
+ PART_ERROR(H5Ldelete_hard_indirect);
+ }
+
+ if (H5Gclose(nested_grp_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", LINK_DELETE_TEST_NESTED_GRP_NAME);
+ PART_ERROR(H5Ldelete_hard_indirect);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", LINK_DELETE_TEST_SUBGROUP1_NAME);
+ PART_ERROR(H5Ldelete_hard_indirect);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ldelete_hard_indirect);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(nested_grp_id);
+ nested_grp_id = H5I_INVALID_HID;
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Ldelete_soft)
+ {
+ TESTING_2("H5Ldelete on soft link");
+
+ if ((subgroup_id = H5Gcreate2(group_id, LINK_DELETE_TEST_SUBGROUP2_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", LINK_DELETE_TEST_SUBGROUP2_NAME);
+ PART_ERROR(H5Ldelete_soft);
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" LINK_DELETE_TEST_SUBGROUP_NAME
+ "/" LINK_DELETE_TEST_SUBGROUP2_NAME,
+ subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create first soft link '%s'\n", LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_soft);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if first soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_soft);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" first soft link did not exist\n");
+ PART_ERROR(H5Ldelete_soft);
+ }
+
+ if (H5Ldelete(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete soft link '%s' using H5Ldelete\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_soft);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if first soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_soft);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" first soft link exists!\n");
+ PART_ERROR(H5Ldelete_soft);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", LINK_DELETE_TEST_SUBGROUP2_NAME);
+ PART_ERROR(H5Ldelete_soft);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ldelete_soft);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Ldelete_external)
+ {
+ TESTING_2("H5Ldelete on external link");
+#ifndef NO_EXTERNAL_LINKS
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Ldelete_external);
+ }
+
+ if (H5Fclose(ext_file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close file '%s'\n", ext_link_filename);
+ PART_ERROR(H5Ldelete_external);
+ }
+
+ if ((subgroup_id = H5Gcreate2(group_id, LINK_DELETE_TEST_SUBGROUP3_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", LINK_DELETE_TEST_SUBGROUP3_NAME);
+ PART_ERROR(H5Ldelete_external);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create first external link '%s'\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_external);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if first external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_external);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" first external link did not exist\n");
+ PART_ERROR(H5Ldelete_external);
+ }
+
+ if (H5Ldelete(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete external link '%s' using H5Ldelete\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_external);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if first external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_external);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" first external link exists!\n");
+ PART_ERROR(H5Ldelete_external);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", LINK_DELETE_TEST_SUBGROUP3_NAME);
+ PART_ERROR(H5Ldelete_external);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Ldelete_external);
+#endif
+ }
+ PART_END(H5Ldelete_external);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ H5Fclose(ext_file_id);
+ ext_file_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Ldelete_ud)
+ {
+ TESTING_2("H5Ldelete on user-defined link");
+
+ /* TODO */
+
+ SKIPPED();
+ PART_EMPTY(H5Ldelete_ud);
+ }
+ PART_END(H5Ldelete_ud);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Ldelete_by_idx_hard_crt_order_increasing)
+ {
+ TESTING_2("H5Ldelete_by_idx on hard link by creation order in increasing order");
+
+ if ((subgroup_id = H5Gcreate2(group_id, LINK_DELETE_TEST_SUBGROUP5_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", LINK_DELETE_TEST_SUBGROUP5_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ /* Create several hard links */
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME3, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ /* Delete a link */
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete hard link '%s' using H5Ldelete_by_idx by creation order in "
+ "increasing order\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ /* Ensure that the link is gone and others remain */
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' exists after deletion!\n", LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ /* Repeat until all links have been deleted */
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete hard link '%s' using H5Ldelete_by_idx by creation order in "
+ "increasing order\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' exists after deletion!\n", LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' exists after deletion!\n", LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete hard link '%s' using H5Ldelete_by_idx by creation order in "
+ "increasing order\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' exists after deletion!\n", LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' exists after deletion!\n", LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' exists after deletion!\n", LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", LINK_DELETE_TEST_SUBGROUP5_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ldelete_by_idx_hard_crt_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Ldelete_by_idx_hard_crt_order_decreasing)
+ {
+ TESTING_2("H5Ldelete_by_idx on hard link by creation order in decreasing order");
+
+ if ((subgroup_id = H5Gcreate2(group_id, LINK_DELETE_TEST_SUBGROUP6_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", LINK_DELETE_TEST_SUBGROUP6_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ /* Create several hard links */
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME3, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ /* Delete a link */
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 2, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete hard link '%s' using H5Ldelete_by_idx by creation order in "
+ "decreasing order\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ /* Ensure that the link is gone and others remain */
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' exists after deletion!\n", LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ /* Repeat until all links have been deleted */
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete hard link '%s' using H5Ldelete_by_idx by creation order in "
+ "decreasing order\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' exists after deletion!\n", LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' exists after deletion!\n", LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete hard link '%s' using H5Ldelete_by_idx by creation order in "
+ "decreasing order\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' exists after deletion!\n", LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' exists after deletion!\n", LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' exists after deletion!\n", LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", LINK_DELETE_TEST_SUBGROUP6_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_crt_order_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ldelete_by_idx_hard_crt_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Ldelete_by_idx_hard_name_order_increasing)
+ {
+ TESTING_2("H5Ldelete_by_idx on hard link by alphabetical order in increasing order");
+
+ if ((subgroup_id = H5Gcreate2(group_id, LINK_DELETE_TEST_SUBGROUP7_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", LINK_DELETE_TEST_SUBGROUP7_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ /* Create several hard links */
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME3, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ /* Delete a link */
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete hard link '%s' using H5Ldelete_by_idx by alphabetical order in "
+ "increasing order\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ /* Ensure that the link is gone and others remain */
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' exists after deletion!\n", LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ /* Repeat until all links have been deleted */
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete hard link '%s' using H5Ldelete_by_idx by alphabetical order in "
+ "increasing order\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' exists after deletion!\n", LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' exists after deletion!\n", LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete hard link '%s' using H5Ldelete_by_idx by alphabetical order in "
+ "increasing order\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' exists after deletion!\n", LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' exists after deletion!\n", LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' exists after deletion!\n", LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", LINK_DELETE_TEST_SUBGROUP7_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ldelete_by_idx_hard_name_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Ldelete_by_idx_hard_name_order_decreasing)
+ {
+ TESTING_2("H5Ldelete_by_idx on hard link by alphabetical order in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ if ((subgroup_id = H5Gcreate2(group_id, LINK_DELETE_TEST_SUBGROUP8_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", LINK_DELETE_TEST_SUBGROUP8_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ /* Create several hard links */
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME3, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ /* Delete a link */
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 2, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete hard link '%s' using H5Ldelete_by_idx by alphabetical order in "
+ "decreasing order\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ /* Ensure that the link is gone and others remain */
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' exists after deletion!\n", LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ /* Repeat until all links have been deleted */
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete hard link '%s' using H5Ldelete_by_idx by alphabetical order in "
+ "decreasing order\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' exists after deletion!\n", LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' exists after deletion!\n", LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete hard link '%s' using H5Ldelete_by_idx by alphabetical order in "
+ "decreasing order\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' exists after deletion!\n", LINK_DELETE_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' exists after deletion!\n", LINK_DELETE_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' exists after deletion!\n", LINK_DELETE_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", LINK_DELETE_TEST_SUBGROUP8_NAME);
+ PART_ERROR(H5Ldelete_by_idx_hard_name_order_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Ldelete_by_idx_hard_name_order_decreasing);
+#endif
+ }
+ PART_END(H5Ldelete_by_idx_hard_name_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Ldelete_by_idx_soft_crt_order_increasing)
+ {
+ TESTING_2("H5Ldelete_by_idx on soft link by creation order in increasing order");
+
+ if ((subgroup_id = H5Gcreate2(group_id, LINK_DELETE_TEST_SUBGROUP9_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", LINK_DELETE_TEST_SUBGROUP9_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ /* Create several soft links */
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" LINK_DELETE_TEST_SUBGROUP_NAME
+ "/" LINK_DELETE_TEST_SUBGROUP9_NAME,
+ subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" LINK_DELETE_TEST_SUBGROUP_NAME
+ "/" LINK_DELETE_TEST_SUBGROUP9_NAME,
+ subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" LINK_DELETE_TEST_SUBGROUP_NAME
+ "/" LINK_DELETE_TEST_SUBGROUP9_NAME,
+ subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME3, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ /* Delete a link */
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete soft link '%s' using H5Ldelete_by_idx by creation order in "
+ "increasing order\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ /* Ensure that the link is gone and others remain */
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' exists after deletion!\n", LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ /* Repeat until all links have been deleted */
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete soft link '%s' using H5Ldelete_by_idx by creation order in "
+ "increasing order\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' exists after deletion!\n", LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' exists after deletion!\n", LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete soft link '%s' using H5Ldelete_by_idx by creation order in "
+ "increasing order\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' exists after deletion!\n", LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' exists after deletion!\n", LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' exists after deletion!\n", LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", LINK_DELETE_TEST_SUBGROUP9_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ldelete_by_idx_soft_crt_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Ldelete_by_idx_soft_crt_order_decreasing)
+ {
+ TESTING_2("H5Ldelete_by_idx on soft link by creation order in decreasing order");
+
+ if ((subgroup_id = H5Gcreate2(group_id, LINK_DELETE_TEST_SUBGROUP10_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", LINK_DELETE_TEST_SUBGROUP10_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ /* Create several soft links */
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" LINK_DELETE_TEST_SUBGROUP_NAME
+ "/" LINK_DELETE_TEST_SUBGROUP10_NAME,
+ subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" LINK_DELETE_TEST_SUBGROUP_NAME
+ "/" LINK_DELETE_TEST_SUBGROUP10_NAME,
+ subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" LINK_DELETE_TEST_SUBGROUP_NAME
+ "/" LINK_DELETE_TEST_SUBGROUP10_NAME,
+ subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME3, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ /* Delete a link */
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 2, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete soft link '%s' using H5Ldelete_by_idx by creation order in "
+ "decreasing order\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ /* Ensure that the link is gone and others remain */
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' exists after deletion!\n", LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ /* Repeat until all links have been deleted */
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete soft link '%s' using H5Ldelete_by_idx by creation order in "
+ "decreasing order\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' exists after deletion!\n", LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' exists after deletion!\n", LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete soft link '%s' using H5Ldelete_by_idx by creation order in "
+ "decreasing order\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' exists after deletion!\n", LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' exists after deletion!\n", LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' exists after deletion!\n", LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", LINK_DELETE_TEST_SUBGROUP10_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_crt_order_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ldelete_by_idx_soft_crt_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Ldelete_by_idx_soft_name_order_increasing)
+ {
+ TESTING_2("H5Ldelete_by_idx on soft link by alphabetical order in increasing order");
+
+ if ((subgroup_id = H5Gcreate2(group_id, LINK_DELETE_TEST_SUBGROUP11_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", LINK_DELETE_TEST_SUBGROUP11_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ /* Create several soft links */
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" LINK_DELETE_TEST_SUBGROUP_NAME
+ "/" LINK_DELETE_TEST_SUBGROUP11_NAME,
+ subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" LINK_DELETE_TEST_SUBGROUP_NAME
+ "/" LINK_DELETE_TEST_SUBGROUP11_NAME,
+ subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" LINK_DELETE_TEST_SUBGROUP_NAME
+ "/" LINK_DELETE_TEST_SUBGROUP11_NAME,
+ subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME3, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ /* Delete a link */
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete soft link '%s' using H5Ldelete_by_idx by alphabetical order in "
+ "increasing order\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ /* Ensure that the link is gone and others remain */
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' exists after deletion!\n", LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ /* Repeat until all links have been deleted */
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete soft link '%s' using H5Ldelete_by_idx by alphabetical order in "
+ "increasing order\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' exists after deletion!\n", LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' exists after deletion!\n", LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete soft link '%s' using H5Ldelete_by_idx by alphabetical order in "
+ "increasing order\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' exists after deletion!\n", LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' exists after deletion!\n", LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' exists after deletion!\n", LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", LINK_DELETE_TEST_SUBGROUP11_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ldelete_by_idx_soft_name_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Ldelete_by_idx_soft_name_order_decreasing)
+ {
+ TESTING_2("H5Ldelete_by_idx on soft link by alphabetical order in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ if ((subgroup_id = H5Gcreate2(group_id, LINK_DELETE_TEST_SUBGROUP12_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", LINK_DELETE_TEST_SUBGROUP12_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ /* Create several soft links */
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" LINK_DELETE_TEST_SUBGROUP_NAME
+ "/" LINK_DELETE_TEST_SUBGROUP12_NAME,
+ subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" LINK_DELETE_TEST_SUBGROUP_NAME
+ "/" LINK_DELETE_TEST_SUBGROUP12_NAME,
+ subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" LINK_DELETE_TEST_SUBGROUP_NAME
+ "/" LINK_DELETE_TEST_SUBGROUP12_NAME,
+ subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME3, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ /* Delete a link */
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 2, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete soft link '%s' using H5Ldelete_by_idx by alphabetical order in "
+ "decreasing order\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ /* Ensure that the link is gone and others remain */
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' exists after deletion!\n", LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ /* Repeat until all links have been deleted */
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete soft link '%s' using H5Ldelete_by_idx by alphabetical order in "
+ "decreasing order\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' exists after deletion!\n", LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' exists after deletion!\n", LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete soft link '%s' using H5Ldelete_by_idx by alphabetical order in "
+ "decreasing order\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' exists after deletion!\n", LINK_DELETE_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' exists after deletion!\n", LINK_DELETE_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' exists after deletion!\n", LINK_DELETE_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", LINK_DELETE_TEST_SUBGROUP12_NAME);
+ PART_ERROR(H5Ldelete_by_idx_soft_name_order_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Ldelete_by_idx_soft_name_order_decreasing);
+#endif
+ }
+ PART_END(H5Ldelete_by_idx_soft_name_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Ldelete_by_idx_external_crt_order_increasing)
+ {
+ TESTING_2("H5Ldelete_by_idx on external link by creation order in increasing order");
+#ifndef NO_EXTERNAL_LINKS
+ /* Create file for external link to reference */
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ if (H5Fclose(ext_file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close file '%s'\n", ext_link_filename);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ if ((subgroup_id = H5Gcreate2(group_id, LINK_DELETE_TEST_SUBGROUP13_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", LINK_DELETE_TEST_SUBGROUP13_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ /* Create several external links */
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME2,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME3,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME3, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ /* Delete a link */
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete external link '%s' using H5Ldelete_by_idx by creation order in "
+ "increasing order\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ /* Ensure that the link is gone and others remain */
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' exists after deletion!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME3, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ /* Repeat until all links have been deleted */
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete external link '%s' using H5Ldelete_by_idx by creation order in "
+ "increasing order\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' exists after deletion!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' exists after deletion!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME3, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete external link '%s' using H5Ldelete_by_idx by creation order in "
+ "increasing order\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' exists after deletion!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' exists after deletion!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME3, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' exists after deletion!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", LINK_DELETE_TEST_SUBGROUP13_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_increasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Ldelete_by_idx_external_crt_order_increasing);
+#endif
+ }
+ PART_END(H5Ldelete_by_idx_external_crt_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ H5Fclose(ext_file_id);
+ ext_file_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Ldelete_by_idx_external_crt_order_decreasing)
+ {
+ TESTING_2("H5Ldelete_by_idx on external link by creation order in decreasing order");
+#ifndef NO_EXTERNAL_LINKS
+ /* Create file for external link to reference */
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ if (H5Fclose(ext_file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close file '%s'\n", ext_link_filename);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ if ((subgroup_id = H5Gcreate2(group_id, LINK_DELETE_TEST_SUBGROUP14_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", LINK_DELETE_TEST_SUBGROUP14_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ /* Create several external links */
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME2,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME3,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME3, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ /* Delete a link */
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 2, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete external link '%s' using H5Ldelete_by_idx by creation order in "
+ "decreasing order\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ /* Ensure that the link is gone and others remain */
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' exists after deletion!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME3, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ /* Repeat until all links have been deleted */
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete external link '%s' using H5Ldelete_by_idx by creation order in "
+ "decreasing order\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' exists after deletion!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' exists after deletion!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME3, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete external link '%s' using H5Ldelete_by_idx by creation order in "
+ "decreasing order\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' exists after deletion!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' exists after deletion!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME3, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' exists after deletion!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", LINK_DELETE_TEST_SUBGROUP14_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_crt_order_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Ldelete_by_idx_external_crt_order_decreasing);
+#endif
+ }
+ PART_END(H5Ldelete_by_idx_external_crt_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ H5Fclose(ext_file_id);
+ ext_file_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Ldelete_by_idx_external_name_order_increasing)
+ {
+ TESTING_2("H5Ldelete_by_idx on external link by alphabetical order in increasing order");
+#ifndef NO_EXTERNAL_LINKS
+ /* Create file for external link to reference */
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ if (H5Fclose(ext_file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close file '%s'\n", ext_link_filename);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ if ((subgroup_id = H5Gcreate2(group_id, LINK_DELETE_TEST_SUBGROUP15_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", LINK_DELETE_TEST_SUBGROUP15_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ /* Create several external links */
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME2,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME3,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME3, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ /* Delete a link */
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete external link '%s' using H5Ldelete_by_idx by alphabetical "
+ "order in increasing order\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ /* Ensure that the link is gone and others remain */
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' exists after deletion!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME3, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ /* Repeat until all links have been deleted */
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete external link '%s' using H5Ldelete_by_idx by alphabetical "
+ "order in increasing order\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' exists after deletion!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' exists after deletion!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME3, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete external link '%s' using H5Ldelete_by_idx by alphabetical "
+ "order in increasing order\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' exists after deletion!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' exists after deletion!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME3, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' exists after deletion!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", LINK_DELETE_TEST_SUBGROUP15_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_increasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Ldelete_by_idx_external_name_order_increasing);
+#endif
+ }
+ PART_END(H5Ldelete_by_idx_external_name_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ H5Fclose(ext_file_id);
+ ext_file_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Ldelete_by_idx_external_name_order_decreasing)
+ {
+ TESTING_2("H5Ldelete_by_idx on external link by alphabetical order in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Create file for external link to reference */
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ if (H5Fclose(ext_file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close file '%s'\n", ext_link_filename);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ if ((subgroup_id = H5Gcreate2(group_id, LINK_DELETE_TEST_SUBGROUP16_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", LINK_DELETE_TEST_SUBGROUP16_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ /* Create several external links */
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME2,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME3,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME3, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before deletion\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ /* Delete a link */
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 2, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete external link '%s' using H5Ldelete_by_idx by alphabetical "
+ "order in decreasing order\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ /* Ensure that the link is gone and others remain */
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' exists after deletion!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME3, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ /* Repeat until all links have been deleted */
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete external link '%s' using H5Ldelete_by_idx by alphabetical "
+ "order in decreasing order\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' exists after deletion!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' exists after deletion!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME3, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist after deletion of a different link!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ if (H5Ldelete_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 0, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't delete external link '%s' using H5Ldelete_by_idx by alphabetical "
+ "order in decreasing order\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' exists after deletion!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' exists after deletion!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, LINK_DELETE_TEST_EXTERNAL_LINK_NAME3, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' exists after deletion!\n",
+ LINK_DELETE_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", LINK_DELETE_TEST_SUBGROUP16_NAME);
+ PART_ERROR(H5Ldelete_by_idx_external_name_order_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Ldelete_by_idx_external_name_order_decreasing);
+#endif
+ }
+ PART_END(H5Ldelete_by_idx_external_name_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ H5Fclose(ext_file_id);
+ ext_file_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Ldelete_by_idx_ud_crt_order_increasing)
+ {
+ TESTING_2("H5Ldelete_by_idx on user-defined link by creation order in increasing order");
+
+ /* TODO */
+
+ SKIPPED();
+ PART_EMPTY(H5Ldelete_by_idx_ud_crt_order_increasing);
+ }
+ PART_END(H5Ldelete_by_idx_ud_crt_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Ldelete_by_idx_ud_crt_order_decreasing)
+ {
+ TESTING_2("H5Ldelete_by_idx on user-defined link by creation order in decreasing order");
+
+ /* TODO */
+
+ SKIPPED();
+ PART_EMPTY(H5Ldelete_by_idx_ud_crt_order_decreasing);
+ }
+ PART_END(H5Ldelete_by_idx_ud_crt_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Ldelete_by_idx_ud_name_order_increasing)
+ {
+ TESTING_2("H5Ldelete_by_idx on user-defined link by alphabetical order in increasing order");
+
+ /* TODO */
+
+ SKIPPED();
+ PART_EMPTY(H5Ldelete_by_idx_ud_name_order_increasing);
+ }
+ PART_END(H5Ldelete_by_idx_ud_name_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Ldelete_by_idx_ud_name_order_decreasing)
+ {
+ TESTING_2("H5Ldelete_by_idx on user-defined link by alphabetical order in decreasing order");
+
+ /* TODO */
+
+ SKIPPED();
+ PART_EMPTY(H5Ldelete_by_idx_ud_name_order_decreasing);
+ }
+ PART_END(H5Ldelete_by_idx_ud_name_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(gcpl_id);
+ H5Gclose(subgroup_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(ext_file_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a group's always-increasing
+ * maximum link creation order value gets reset once
+ * all the links have been deleted from the group.
+ */
+static int
+test_delete_link_reset_grp_max_crt_order(void)
+{
+#ifndef NO_MAX_LINK_CRT_ORDER_RESET
+ H5G_info_t grp_info;
+ size_t i;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t subgroup_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+ char link_name[LINK_DELETE_RESET_MAX_CRT_ORDER_TEST_BUF_SIZE];
+#endif
+
+ TESTING_MULTIPART("H5Ldelete of all links in group resets group's maximum link creation order value");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_MORE)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, basic and more group, or basic link aren't supported "
+ "with this connector\n");
+ return 0;
+ }
+
+#ifndef NO_MAX_LINK_CRT_ORDER_RESET
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create GCPL for link creation order tracking\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set link creation order tracking\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, LINK_DELETE_RESET_MAX_CRT_ORDER_TEST_SUBGROUP_NAME,
+ H5P_DEFAULT, gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ LINK_DELETE_RESET_MAX_CRT_ORDER_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Ldelete_links_bottom_up)
+ {
+ TESTING_2("H5Ldelete from least-recently created link to most-recently created link");
+
+ if ((subgroup_id = H5Gcreate2(group_id, LINK_DELETE_RESET_MAX_CRT_ORDER_TEST_SUBGROUP1_NAME,
+ H5P_DEFAULT, gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n",
+ LINK_DELETE_RESET_MAX_CRT_ORDER_TEST_SUBGROUP1_NAME);
+ PART_ERROR(H5Ldelete_links_bottom_up);
+ }
+
+ /* Create several links inside the group */
+ for (i = 0; i < LINK_DELETE_RESET_MAX_CRT_ORDER_TEST_NUM_LINKS; i++) {
+ snprintf(link_name, LINK_DELETE_RESET_MAX_CRT_ORDER_TEST_BUF_SIZE, "link%d", (int)i);
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, link_name, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", link_name);
+ PART_ERROR(H5Ldelete_links_bottom_up);
+ }
+ }
+
+ /* Delete the links, checking the group's maximum creation order value each time */
+ for (i = 0; i < LINK_DELETE_RESET_MAX_CRT_ORDER_TEST_NUM_LINKS; i++) {
+ memset(&grp_info, 0, sizeof(grp_info));
+
+ if (H5Gget_info(subgroup_id, &grp_info) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve group's info\n");
+ PART_ERROR(H5Ldelete_links_bottom_up);
+ }
+
+ if (grp_info.max_corder != LINK_DELETE_RESET_MAX_CRT_ORDER_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" group's maximum creation order value got adjusted to %lld during link "
+ "deletion; value should have remained at %lld\n",
+ (long long)grp_info.max_corder, LINK_DELETE_RESET_MAX_CRT_ORDER_TEST_NUM_LINKS);
+ PART_ERROR(H5Ldelete_links_bottom_up);
+ }
+
+ snprintf(link_name, LINK_DELETE_RESET_MAX_CRT_ORDER_TEST_BUF_SIZE, "link%d", (int)i);
+
+ if (H5Ldelete(subgroup_id, link_name, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to delete link '%s'\n", link_name);
+ PART_ERROR(H5Ldelete_links_bottom_up);
+ }
+ }
+
+ /* Ensure the group's maximum creation order value has now reset to 0 after all the links are gone
+ */
+ memset(&grp_info, 0, sizeof(grp_info));
+
+ if (H5Gget_info(subgroup_id, &grp_info) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve group's info\n");
+ PART_ERROR(H5Ldelete_links_bottom_up);
+ }
+
+ if (grp_info.max_corder != 0) {
+ H5_FAILED();
+ HDprintf(" group's maximum creation order value didn't reset to 0 after deleting all "
+ "links from group; value is still %lld\n",
+ (long long)grp_info.max_corder);
+ PART_ERROR(H5Ldelete_links_bottom_up);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ldelete_links_bottom_up);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Ldelete_links_top_down)
+ {
+ TESTING_2("H5Ldelete from most-recently created link to least-recently created link");
+
+ if ((subgroup_id = H5Gcreate2(group_id, LINK_DELETE_RESET_MAX_CRT_ORDER_TEST_SUBGROUP2_NAME,
+ H5P_DEFAULT, gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n",
+ LINK_DELETE_RESET_MAX_CRT_ORDER_TEST_SUBGROUP2_NAME);
+ PART_ERROR(H5Ldelete_links_top_down);
+ }
+
+ /* Create several links inside the group */
+ for (i = 0; i < LINK_DELETE_RESET_MAX_CRT_ORDER_TEST_NUM_LINKS; i++) {
+ snprintf(link_name, LINK_DELETE_RESET_MAX_CRT_ORDER_TEST_BUF_SIZE, "link%d", (int)i);
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, link_name, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", link_name);
+ PART_ERROR(H5Ldelete_links_top_down);
+ }
+ }
+
+ /* Delete the links, checking the group's maximum creation order value each time */
+ for (i = 0; i < LINK_DELETE_RESET_MAX_CRT_ORDER_TEST_NUM_LINKS; i++) {
+ memset(&grp_info, 0, sizeof(grp_info));
+
+ if (H5Gget_info(subgroup_id, &grp_info) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve group's info\n");
+ PART_ERROR(H5Ldelete_links_top_down);
+ }
+
+ if (grp_info.max_corder != LINK_DELETE_RESET_MAX_CRT_ORDER_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" group's maximum creation order value got adjusted to %lld during link "
+ "deletion; value should have remained at %lld\n",
+ (long long)grp_info.max_corder, LINK_DELETE_RESET_MAX_CRT_ORDER_TEST_NUM_LINKS);
+ PART_ERROR(H5Ldelete_links_top_down);
+ }
+
+ snprintf(link_name, LINK_DELETE_RESET_MAX_CRT_ORDER_TEST_BUF_SIZE, "link%d",
+ (int)(LINK_DELETE_RESET_MAX_CRT_ORDER_TEST_NUM_LINKS - i - 1));
+
+ if (H5Ldelete(subgroup_id, link_name, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to delete link '%s'\n", link_name);
+ PART_ERROR(H5Ldelete_links_top_down);
+ }
+ }
+
+ /* Ensure the group's maximum creation order value has now reset to 0 after all the links are gone
+ */
+ memset(&grp_info, 0, sizeof(grp_info));
+
+ if (H5Gget_info(subgroup_id, &grp_info) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve group's info\n");
+ PART_ERROR(H5Ldelete_links_top_down);
+ }
+
+ if (grp_info.max_corder != 0) {
+ H5_FAILED();
+ HDprintf(" group's maximum creation order value didn't reset to 0 after deleting all "
+ "links from group; value is still %lld\n",
+ (long long)grp_info.max_corder);
+ PART_ERROR(H5Ldelete_links_top_down);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ldelete_links_top_down);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(gcpl_id);
+ H5Gclose(subgroup_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+#else
+ SKIPPED();
+ return 0;
+#endif
+}
+
+static int
+test_delete_link_invalid_params(void)
+{
+ htri_t link_exists;
+ herr_t err_ret = -1;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("H5Ldelete with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_BY_IDX) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_HARD_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, or link aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, LINK_DELETE_INVALID_PARAMS_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n", LINK_DELETE_INVALID_PARAMS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if (H5Lcreate_hard(group_id, ".", group_id, LINK_DELETE_INVALID_PARAMS_TEST_HARD_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", LINK_DELETE_INVALID_PARAMS_TEST_HARD_LINK_NAME);
+ goto error;
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(group_id, LINK_DELETE_INVALID_PARAMS_TEST_HARD_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if first hard link '%s' exists\n",
+ LINK_DELETE_INVALID_PARAMS_TEST_HARD_LINK_NAME);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" first hard link did not exist\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Ldelete_invalid_loc_id)
+ {
+ TESTING_2("H5Ldelete with an invalid location ID");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Ldelete(H5I_INVALID_HID, LINK_DELETE_INVALID_PARAMS_TEST_HARD_LINK_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ldelete succeeded with an invalid location ID!\n");
+ PART_ERROR(H5Ldelete_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ldelete_invalid_loc_id);
+
+ PART_BEGIN(H5Ldelete_invalid_link_name)
+ {
+ TESTING_2("H5Ldelete with an invalid link name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ldelete(group_id, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ldelete succeeded with a NULL link name!\n");
+ PART_ERROR(H5Ldelete_invalid_link_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ldelete(group_id, "", H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ldelete succeeded with an invalid link name of ''!\n");
+ PART_ERROR(H5Ldelete_invalid_link_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ldelete_invalid_link_name);
+
+ PART_BEGIN(H5Ldelete_invalid_lapl)
+ {
+ TESTING_2("H5Ldelete with an invalid LAPL");
+#ifndef NO_INVALID_PROPERTY_LIST_TESTS
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Ldelete(group_id, LINK_DELETE_INVALID_PARAMS_TEST_HARD_LINK_NAME, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ldelete succeeded with an invalid LAPL!\n");
+ PART_ERROR(H5Ldelete_invalid_lapl);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Ldelete_invalid_lapl);
+#endif
+ }
+ PART_END(H5Ldelete_invalid_lapl);
+
+ PART_BEGIN(H5Ldelete_by_idx_invalid_loc_id)
+ {
+ TESTING_2("H5Ldelete_by_idx with an invalid location ID");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ldelete_by_idx(H5I_INVALID_HID, ".", H5_INDEX_NAME, H5_ITER_INC, 0, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ldelete_by_idx succeeded with an invalid location ID!\n");
+ PART_ERROR(H5Ldelete_by_idx_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ldelete_by_idx_invalid_loc_id);
+
+ PART_BEGIN(H5Ldelete_by_idx_invalid_grp_name)
+ {
+ TESTING_2("H5Ldelete_by_idx with an invalid group name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ldelete_by_idx(group_id, NULL, H5_INDEX_NAME, H5_ITER_INC, 0, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ldelete_by_idx succeeded with a NULL group name!\n");
+ PART_ERROR(H5Ldelete_by_idx_invalid_grp_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ldelete_by_idx(group_id, "", H5_INDEX_NAME, H5_ITER_INC, 0, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ldelete_by_idx succeeded with an invalid group name of ''!\n");
+ PART_ERROR(H5Ldelete_by_idx_invalid_grp_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ldelete_by_idx_invalid_grp_name);
+
+ PART_BEGIN(H5Ldelete_by_idx_invalid_index_type)
+ {
+ TESTING_2("H5Ldelete_by_idx with an invalid index type");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ldelete_by_idx(group_id, ".", H5_INDEX_UNKNOWN, H5_ITER_INC, 0, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ldelete_by_idx succeeded with invalid index type H5_INDEX_UNKNOWN!\n");
+ PART_ERROR(H5Ldelete_by_idx_invalid_index_type);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ldelete_by_idx(group_id, ".", H5_INDEX_N, H5_ITER_INC, 0, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ldelete_by_idx succeeded with invalid index type H5_INDEX_N!\n");
+ PART_ERROR(H5Ldelete_by_idx_invalid_index_type);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ldelete_by_idx_invalid_index_type);
+
+ PART_BEGIN(H5Ldelete_by_idx_invalid_index_order)
+ {
+ TESTING_2("H5Ldelete_by_idx with an invalid iteration ordering");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ldelete_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_UNKNOWN, 0, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ldelete_by_idx succeeded with invalid iteration ordering H5_ITER_UNKNOWN!\n");
+ PART_ERROR(H5Ldelete_by_idx_invalid_index_order);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ldelete_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_N, 0, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ldelete_by_idx succeeded with invalid iteration ordering H5_ITER_N!\n");
+ PART_ERROR(H5Ldelete_by_idx_invalid_index_order);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ldelete_by_idx_invalid_index_order);
+
+ PART_BEGIN(H5Ldelete_by_idx_invalid_lapl)
+ {
+ TESTING_2("H5Ldelete_by_idx with an invalid LAPL");
+#ifndef NO_INVALID_PROPERTY_LIST_TESTS
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ldelete_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ldelete_by_idx succeeded with an invalid LAPL!\n");
+ PART_ERROR(H5Ldelete_by_idx_invalid_lapl);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Ldelete_by_idx_invalid_lapl);
+#endif
+ }
+ PART_END(H5Ldelete_by_idx_invalid_lapl);
+
+ PART_BEGIN(H5Ldelete_by_idx_link_existence)
+ {
+ TESTING_2("valid link existence after previous invalid H5Ldelete(_by_idx) calls");
+
+ /* Verify that the link hasn't been deleted */
+ if ((link_exists =
+ H5Lexists(group_id, LINK_DELETE_INVALID_PARAMS_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ LINK_DELETE_INVALID_PARAMS_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Ldelete_by_idx_link_existence);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link didn't exist!\n");
+ PART_ERROR(H5Ldelete_by_idx_link_existence);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ldelete_by_idx_link_existence);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a link can be copied using H5Lcopy.
+ */
+static int
+test_copy_link(void)
+{
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID, ext_file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t src_grp_id = H5I_INVALID_HID, dst_grp_id = H5I_INVALID_HID;
+#ifndef NO_EXTERNAL_LINKS
+ char ext_link_filename[H5_API_TEST_FILENAME_MAX_LENGTH];
+#endif
+
+ TESTING_MULTIPART("link copying");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_HARD_LINKS) || !(vol_cap_flags_g & H5VL_CAP_FLAG_EXTERNAL_LINKS) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, or link, hard, soft, or external link aren't "
+ "supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't opewn container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, COPY_LINK_TEST_SUBGROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", COPY_LINK_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((src_grp_id = H5Gcreate2(group_id, COPY_LINK_TEST_SRC_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", COPY_LINK_TEST_SRC_GROUP_NAME);
+ goto error;
+ }
+
+ if ((dst_grp_id = H5Gcreate2(group_id, COPY_LINK_TEST_DST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", COPY_LINK_TEST_DST_GROUP_NAME);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Lcopy_hard_no_check)
+ {
+ TESTING_2("H5Lcopy on hard link (copied link's properties not checked)");
+
+ /* Try to copy a hard link */
+ if (H5Lcreate_hard(group_id, ".", src_grp_id, COPY_LINK_TEST_HARD_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", COPY_LINK_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lcopy_hard_no_check);
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(src_grp_id, COPY_LINK_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n", COPY_LINK_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lcopy_hard_no_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link did not exist\n");
+ PART_ERROR(H5Lcopy_hard_no_check);
+ }
+
+ /* Verify the link doesn't currently exist in the target group */
+ if ((link_exists = H5Lexists(dst_grp_id, COPY_LINK_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n", COPY_LINK_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lcopy_hard_no_check);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link existed in target group before copy!\n");
+ PART_ERROR(H5Lcopy_hard_no_check);
+ }
+
+ /* Copy the link */
+ if (H5Lcopy(src_grp_id, COPY_LINK_TEST_HARD_LINK_NAME, dst_grp_id,
+ COPY_LINK_TEST_HARD_LINK_COPY_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to copy hard link '%s'\n", COPY_LINK_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lcopy_hard_no_check);
+ }
+
+ /* Verify the link has been copied and still exists in the source group */
+ if ((link_exists = H5Lexists(dst_grp_id, COPY_LINK_TEST_HARD_LINK_COPY_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link copy '%s' exists\n",
+ COPY_LINK_TEST_HARD_LINK_COPY_NAME);
+ PART_ERROR(H5Lcopy_hard_no_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link copy did not exist\n");
+ PART_ERROR(H5Lcopy_hard_no_check);
+ }
+
+ if ((link_exists = H5Lexists(src_grp_id, COPY_LINK_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if original hard link '%s' exists\n",
+ COPY_LINK_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lcopy_hard_no_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" original hard link did not exist\n");
+ PART_ERROR(H5Lcopy_hard_no_check);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcopy_hard_no_check);
+
+ PART_BEGIN(H5Lcopy_hard_check)
+ {
+ H5L_info2_t orig_info, new_info;
+ int cmp_value;
+
+ TESTING_2("H5Lcopy on hard link (copied link's properties checked)");
+
+ /* Try to copy a hard link */
+ if (H5Lcreate_hard(group_id, ".", src_grp_id, COPY_LINK_TEST_HARD_LINK_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", COPY_LINK_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lcopy_hard_check);
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(src_grp_id, COPY_LINK_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n", COPY_LINK_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lcopy_hard_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link did not exist\n");
+ PART_ERROR(H5Lcopy_hard_check);
+ }
+
+ /* Retrieve the link's info */
+ if (H5Lget_info2(src_grp_id, COPY_LINK_TEST_HARD_LINK_NAME2, &orig_info, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve info for link '%s'\n", COPY_LINK_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lcopy_hard_check);
+ }
+
+ /* Verify the link doesn't currently exist in the target group */
+ if ((link_exists = H5Lexists(dst_grp_id, COPY_LINK_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n", COPY_LINK_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lcopy_hard_check);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link existed in target group before copy!\n");
+ PART_ERROR(H5Lcopy_hard_check);
+ }
+
+ /* Copy the link */
+ if (H5Lcopy(src_grp_id, COPY_LINK_TEST_HARD_LINK_NAME2, dst_grp_id,
+ COPY_LINK_TEST_HARD_LINK_COPY_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to copy hard link '%s'\n", COPY_LINK_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lcopy_hard_check);
+ }
+
+ /* Verify the link has been copied and still exists in the source group */
+ if ((link_exists = H5Lexists(dst_grp_id, COPY_LINK_TEST_HARD_LINK_COPY_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link copy '%s' exists\n",
+ COPY_LINK_TEST_HARD_LINK_COPY_NAME2);
+ PART_ERROR(H5Lcopy_hard_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link copy did not exist\n");
+ PART_ERROR(H5Lcopy_hard_check);
+ }
+
+ if ((link_exists = H5Lexists(src_grp_id, COPY_LINK_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if original hard link '%s' exists\n",
+ COPY_LINK_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lcopy_hard_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" original hard link did not exist\n");
+ PART_ERROR(H5Lcopy_hard_check);
+ }
+
+ /* Retrieve the new link's info */
+ if (H5Lget_info2(dst_grp_id, COPY_LINK_TEST_HARD_LINK_COPY_NAME2, &new_info, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve info for link '%s'\n", COPY_LINK_TEST_HARD_LINK_COPY_NAME2);
+ PART_ERROR(H5Lcopy_hard_check);
+ }
+
+ if (new_info.type != orig_info.type) {
+ H5_FAILED();
+ HDprintf(" copied link's link type doesn't match original link's type\n");
+ PART_ERROR(H5Lcopy_hard_check);
+ }
+
+ if (H5Otoken_cmp(dst_grp_id, &new_info.u.token, &orig_info.u.token, &cmp_value) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to compare link target tokens\n");
+ PART_ERROR(H5Lcopy_hard_check);
+ }
+
+ if (cmp_value != 0) {
+ H5_FAILED();
+ HDprintf(" copied hard link's object token doesn't match original link's object token\n");
+ PART_ERROR(H5Lcopy_hard_check);
+ }
+
+ if (new_info.corder_valid != orig_info.corder_valid) {
+ H5_FAILED();
+ HDprintf(" copied link's 'corder_valid' field doesn't match original link's "
+ "'corder_valid' field\n");
+ PART_ERROR(H5Lcopy_hard_check);
+ }
+
+ if (new_info.corder_valid && orig_info.corder_valid && (new_info.corder != orig_info.corder)) {
+ H5_FAILED();
+ HDprintf(" copied link's creation order value %" PRId64
+ " doesn't match original link's creation order value %" PRId64 "\n",
+ new_info.corder, orig_info.corder);
+ PART_ERROR(H5Lcopy_hard_check);
+ }
+
+ if (new_info.cset != orig_info.cset) {
+ H5_FAILED();
+ HDprintf(" copied link's character set doesn't match original link's character set\n");
+ PART_ERROR(H5Lcopy_hard_check);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcopy_hard_check);
+
+ PART_BEGIN(H5Lcopy_hard_same_loc)
+ {
+ TESTING_2("H5Lcopy on hard link using H5L_SAME_LOC");
+
+ /* Try to copy a hard link */
+ if (H5Lcreate_hard(group_id, ".", src_grp_id, COPY_LINK_TEST_HARD_LINK_NAME3, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", COPY_LINK_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lcopy_hard_same_loc);
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(src_grp_id, COPY_LINK_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n", COPY_LINK_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lcopy_hard_same_loc);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link did not exist\n");
+ PART_ERROR(H5Lcopy_hard_same_loc);
+ }
+
+ /* Verify the links don't currently exist in the target group */
+ if ((link_exists = H5Lexists(src_grp_id, COPY_LINK_TEST_HARD_LINK_SAME_LOC_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ COPY_LINK_TEST_HARD_LINK_SAME_LOC_NAME);
+ PART_ERROR(H5Lcopy_hard_same_loc);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link existed in target group before copy!\n");
+ PART_ERROR(H5Lcopy_hard_same_loc);
+ }
+
+ if ((link_exists = H5Lexists(src_grp_id, COPY_LINK_TEST_HARD_LINK_SAME_LOC_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ COPY_LINK_TEST_HARD_LINK_SAME_LOC_NAME2);
+ PART_ERROR(H5Lcopy_hard_same_loc);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link existed in target group before copy!\n");
+ PART_ERROR(H5Lcopy_hard_same_loc);
+ }
+
+ /* Copy the link using H5L_SAME_LOC as the first parameter to H5Lcopy */
+ if (H5Lcopy(H5L_SAME_LOC, COPY_LINK_TEST_HARD_LINK_NAME3, src_grp_id,
+ COPY_LINK_TEST_HARD_LINK_SAME_LOC_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(
+ " failed to copy hard link '%s' using H5L_SAME_LOC as first parameter to H5Lcopy\n",
+ COPY_LINK_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lcopy_hard_same_loc);
+ }
+
+ /* Copy the link using H5L_SAME_LOC as the third parameter to H5Lcopy */
+ if (H5Lcopy(src_grp_id, COPY_LINK_TEST_HARD_LINK_NAME3, H5L_SAME_LOC,
+ COPY_LINK_TEST_HARD_LINK_SAME_LOC_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(
+ " failed to copy hard link '%s' using H5L_SAME_LOC as third parameter to H5Lcopy\n",
+ COPY_LINK_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lcopy_hard_same_loc);
+ }
+
+ /* Verify the links have been copied and the original still exist in the source group */
+ if ((link_exists = H5Lexists(src_grp_id, COPY_LINK_TEST_HARD_LINK_SAME_LOC_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link copy '%s' exists\n",
+ COPY_LINK_TEST_HARD_LINK_SAME_LOC_NAME);
+ PART_ERROR(H5Lcopy_hard_same_loc);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link copy did not exist\n");
+ PART_ERROR(H5Lcopy_hard_same_loc);
+ }
+
+ if ((link_exists = H5Lexists(src_grp_id, COPY_LINK_TEST_HARD_LINK_SAME_LOC_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link copy '%s' exists\n",
+ COPY_LINK_TEST_HARD_LINK_SAME_LOC_NAME2);
+ PART_ERROR(H5Lcopy_hard_same_loc);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link copy did not exist\n");
+ PART_ERROR(H5Lcopy_hard_same_loc);
+ }
+
+ if ((link_exists = H5Lexists(src_grp_id, COPY_LINK_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if original hard link '%s' exists\n",
+ COPY_LINK_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lcopy_hard_same_loc);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" original hard link did not exist\n");
+ PART_ERROR(H5Lcopy_hard_same_loc);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcopy_hard_same_loc);
+
+ PART_BEGIN(H5Lcopy_soft_no_check)
+ {
+ TESTING_2("H5Lcopy on soft link (copied link's properties not checked)");
+
+ /* Try to copy a soft link */
+ if (H5Lcreate_soft(COPY_LINK_TEST_SOFT_LINK_TARGET_PATH, src_grp_id,
+ COPY_LINK_TEST_SOFT_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", COPY_LINK_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lcopy_soft_no_check);
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(src_grp_id, COPY_LINK_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n", COPY_LINK_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lcopy_soft_no_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link did not exist\n");
+ PART_ERROR(H5Lcopy_soft_no_check);
+ }
+
+ /* Verify the link doesn't currently exist in the target group */
+ if ((link_exists = H5Lexists(dst_grp_id, COPY_LINK_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n", COPY_LINK_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lcopy_soft_no_check);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link existed in target group before copy!\n");
+ PART_ERROR(H5Lcopy_soft_no_check);
+ }
+
+ /* Copy the link */
+ if (H5Lcopy(src_grp_id, COPY_LINK_TEST_SOFT_LINK_NAME, dst_grp_id,
+ COPY_LINK_TEST_SOFT_LINK_COPY_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to copy soft link '%s'\n", COPY_LINK_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lcopy_soft_no_check);
+ }
+
+ /* Verify the link has been copied and still exists in the source group */
+ if ((link_exists = H5Lexists(dst_grp_id, COPY_LINK_TEST_SOFT_LINK_COPY_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' copy exists\n",
+ COPY_LINK_TEST_SOFT_LINK_COPY_NAME);
+ PART_ERROR(H5Lcopy_soft_no_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link copy did not exist\n");
+ PART_ERROR(H5Lcopy_soft_no_check);
+ }
+
+ if ((link_exists = H5Lexists(src_grp_id, COPY_LINK_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if original soft link '%s' exists\n",
+ COPY_LINK_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lcopy_soft_no_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" original soft link did not exist\n");
+ PART_ERROR(H5Lcopy_soft_no_check);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcopy_soft_no_check);
+
+ PART_BEGIN(H5Lcopy_soft_check)
+ {
+ H5L_info2_t orig_info, new_info;
+ char orig_link_val[COPY_LINK_TEST_LINK_VAL_BUF_SIZE];
+ char new_link_val[COPY_LINK_TEST_LINK_VAL_BUF_SIZE];
+
+ TESTING_2("H5Lcopy on soft link (copied link's properties checked)");
+
+ /* Try to copy a soft link */
+ if (H5Lcreate_soft(COPY_LINK_TEST_SOFT_LINK_TARGET_PATH, src_grp_id,
+ COPY_LINK_TEST_SOFT_LINK_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", COPY_LINK_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lcopy_soft_check);
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(src_grp_id, COPY_LINK_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n", COPY_LINK_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lcopy_soft_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link did not exist\n");
+ PART_ERROR(H5Lcopy_soft_check);
+ }
+
+ /* Retrieve the link's info */
+ if (H5Lget_info2(src_grp_id, COPY_LINK_TEST_SOFT_LINK_NAME2, &orig_info, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve info for link '%s'\n", COPY_LINK_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lcopy_soft_check);
+ }
+
+ /* Retrieve the link's value */
+ if (H5Lget_val(src_grp_id, COPY_LINK_TEST_SOFT_LINK_NAME2, orig_link_val,
+ COPY_LINK_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve value for soft link '%s'\n", COPY_LINK_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lcopy_soft_check);
+ }
+
+ /* Verify the link doesn't currently exist in the target group */
+ if ((link_exists = H5Lexists(dst_grp_id, COPY_LINK_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n", COPY_LINK_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lcopy_soft_check);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link existed in target group before copy!\n");
+ PART_ERROR(H5Lcopy_soft_check);
+ }
+
+ /* Copy the link */
+ if (H5Lcopy(src_grp_id, COPY_LINK_TEST_SOFT_LINK_NAME2, dst_grp_id,
+ COPY_LINK_TEST_SOFT_LINK_COPY_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to copy soft link '%s'\n", COPY_LINK_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lcopy_soft_check);
+ }
+
+ /* Verify the link has been copied and still exists in the source group */
+ if ((link_exists = H5Lexists(dst_grp_id, COPY_LINK_TEST_SOFT_LINK_COPY_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' copy exists\n",
+ COPY_LINK_TEST_SOFT_LINK_COPY_NAME2);
+ PART_ERROR(H5Lcopy_soft_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link copy did not exist\n");
+ PART_ERROR(H5Lcopy_soft_check);
+ }
+
+ if ((link_exists = H5Lexists(src_grp_id, COPY_LINK_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if original soft link '%s' exists\n",
+ COPY_LINK_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lcopy_soft_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" original soft link did not exist\n");
+ PART_ERROR(H5Lcopy_soft_check);
+ }
+
+ /* Retrieve the new link's info */
+ if (H5Lget_info2(dst_grp_id, COPY_LINK_TEST_SOFT_LINK_COPY_NAME2, &new_info, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve info for link '%s'\n", COPY_LINK_TEST_SOFT_LINK_COPY_NAME2);
+ PART_ERROR(H5Lcopy_soft_check);
+ }
+
+ if (new_info.type != orig_info.type) {
+ H5_FAILED();
+ HDprintf(" copied link's link type doesn't match original link's type\n");
+ PART_ERROR(H5Lcopy_soft_check);
+ }
+
+ if (new_info.u.val_size != orig_info.u.val_size) {
+ H5_FAILED();
+ HDprintf(" copied soft link's value size of %llu doesn't match original link's value size "
+ "of %llu\n",
+ (unsigned long long)new_info.u.val_size, (unsigned long long)orig_info.u.val_size);
+ PART_ERROR(H5Lcopy_soft_check);
+ }
+
+ if (new_info.corder_valid != orig_info.corder_valid) {
+ H5_FAILED();
+ HDprintf(" copied link's 'corder_valid' field doesn't match original link's "
+ "'corder_valid' field\n");
+ PART_ERROR(H5Lcopy_soft_check);
+ }
+
+ if (new_info.corder_valid && orig_info.corder_valid && (new_info.corder != orig_info.corder)) {
+ H5_FAILED();
+ HDprintf(" copied link's creation order value %" PRId64
+ " doesn't match original link's creation order value %" PRId64 "\n",
+ new_info.corder, orig_info.corder);
+ PART_ERROR(H5Lcopy_soft_check);
+ }
+
+ if (new_info.cset != orig_info.cset) {
+ H5_FAILED();
+ HDprintf(" copied link's character set doesn't match original link's character set\n");
+ PART_ERROR(H5Lcopy_soft_check);
+ }
+
+ /* Check the soft link's value */
+ if (H5Lget_val(dst_grp_id, COPY_LINK_TEST_SOFT_LINK_COPY_NAME2, new_link_val,
+ COPY_LINK_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve value for soft link '%s'\n",
+ COPY_LINK_TEST_SOFT_LINK_COPY_NAME2);
+ PART_ERROR(H5Lcopy_soft_check);
+ }
+
+ if (HDstrncmp(orig_link_val, new_link_val, COPY_LINK_TEST_LINK_VAL_BUF_SIZE)) {
+ H5_FAILED();
+ HDprintf(" copied soft link's value '%s' doesn't match original link's value '%s'\n",
+ new_link_val, orig_link_val);
+ PART_ERROR(H5Lcopy_soft_check);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcopy_soft_check);
+
+ PART_BEGIN(H5Lcopy_soft_same_loc)
+ {
+ TESTING_2("H5Lcopy on soft link using H5L_SAME_LOC");
+
+ /* Try to copy a soft link */
+ if (H5Lcreate_soft(COPY_LINK_TEST_SOFT_LINK_TARGET_PATH, src_grp_id,
+ COPY_LINK_TEST_SOFT_LINK_NAME3, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", COPY_LINK_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lcopy_soft_same_loc);
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(src_grp_id, COPY_LINK_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n", COPY_LINK_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lcopy_soft_same_loc);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link did not exist\n");
+ PART_ERROR(H5Lcopy_soft_same_loc);
+ }
+
+ /* Verify the links don't currently exist in the target group */
+ if ((link_exists = H5Lexists(src_grp_id, COPY_LINK_TEST_SOFT_LINK_SAME_LOC_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ COPY_LINK_TEST_SOFT_LINK_SAME_LOC_NAME);
+ PART_ERROR(H5Lcopy_soft_same_loc);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link existed in target group before copy!\n");
+ PART_ERROR(H5Lcopy_soft_same_loc);
+ }
+
+ if ((link_exists = H5Lexists(src_grp_id, COPY_LINK_TEST_SOFT_LINK_SAME_LOC_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ COPY_LINK_TEST_SOFT_LINK_SAME_LOC_NAME2);
+ PART_ERROR(H5Lcopy_soft_same_loc);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link existed in target group before copy!\n");
+ PART_ERROR(H5Lcopy_soft_same_loc);
+ }
+
+ /* Copy the link using H5L_SAME_LOC as the first parameter to H5Lcopy */
+ if (H5Lcopy(H5L_SAME_LOC, COPY_LINK_TEST_SOFT_LINK_NAME3, src_grp_id,
+ COPY_LINK_TEST_SOFT_LINK_SAME_LOC_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(
+ " failed to copy soft link '%s' using H5L_SAME_LOC as first parameter to H5Lcopy\n",
+ COPY_LINK_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lcopy_soft_same_loc);
+ }
+
+ /* Copy the link using H5L_SAME_LOC as the third parameter to H5Lcopy */
+ if (H5Lcopy(src_grp_id, COPY_LINK_TEST_SOFT_LINK_NAME3, H5L_SAME_LOC,
+ COPY_LINK_TEST_SOFT_LINK_SAME_LOC_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(
+ " failed to copy soft link '%s' using H5L_SAME_LOC as third parameter to H5Lcopy\n",
+ COPY_LINK_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lcopy_soft_same_loc);
+ }
+
+ /* Verify the links have been copied and the original still exists in the source group */
+ if ((link_exists = H5Lexists(src_grp_id, COPY_LINK_TEST_SOFT_LINK_SAME_LOC_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' copy exists\n",
+ COPY_LINK_TEST_SOFT_LINK_SAME_LOC_NAME);
+ PART_ERROR(H5Lcopy_soft_same_loc);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link copy did not exist\n");
+ PART_ERROR(H5Lcopy_soft_same_loc);
+ }
+
+ if ((link_exists = H5Lexists(src_grp_id, COPY_LINK_TEST_SOFT_LINK_SAME_LOC_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' copy exists\n",
+ COPY_LINK_TEST_SOFT_LINK_SAME_LOC_NAME2);
+ PART_ERROR(H5Lcopy_soft_same_loc);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link copy did not exist\n");
+ PART_ERROR(H5Lcopy_soft_same_loc);
+ }
+
+ if ((link_exists = H5Lexists(src_grp_id, COPY_LINK_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if original soft link '%s' exists\n",
+ COPY_LINK_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lcopy_soft_same_loc);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" original soft link did not exist\n");
+ PART_ERROR(H5Lcopy_soft_same_loc);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcopy_soft_same_loc);
+
+ PART_BEGIN(H5Lcopy_external_no_check)
+ {
+ TESTING_2("H5Lcopy on external link (copied link's properties not checked)");
+#ifndef NO_EXTERNAL_LINKS
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Lcopy_external_no_check);
+ }
+
+ if (H5Fclose(ext_file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close file '%s'\n", ext_link_filename);
+ PART_ERROR(H5Lcopy_external_no_check);
+ }
+
+ /* Try to copy an external link */
+ if (H5Lcreate_external(ext_link_filename, "/", src_grp_id, COPY_LINK_TEST_EXTERNAL_LINK_NAME,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", COPY_LINK_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Lcopy_external_no_check);
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(src_grp_id, COPY_LINK_TEST_EXTERNAL_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ COPY_LINK_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Lcopy_external_no_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link did not exist\n");
+ PART_ERROR(H5Lcopy_external_no_check);
+ }
+
+ /* Verify the link doesn't currently exist in the target group */
+ if ((link_exists = H5Lexists(dst_grp_id, COPY_LINK_TEST_EXTERNAL_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ COPY_LINK_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Lcopy_external_no_check);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link existed in target group before copy!\n");
+ PART_ERROR(H5Lcopy_external_no_check);
+ }
+
+ /* Copy the link */
+ if (H5Lcopy(src_grp_id, COPY_LINK_TEST_EXTERNAL_LINK_NAME, dst_grp_id,
+ COPY_LINK_TEST_EXTERNAL_LINK_COPY_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to copy external link '%s'\n", COPY_LINK_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Lcopy_external_no_check);
+ }
+
+ /* Verify the link has been copied and still exists in the source group */
+ if ((link_exists = H5Lexists(dst_grp_id, COPY_LINK_TEST_EXTERNAL_LINK_COPY_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link copy '%s' exists\n",
+ COPY_LINK_TEST_EXTERNAL_LINK_COPY_NAME);
+ PART_ERROR(H5Lcopy_external_no_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link copy did not exist\n");
+ PART_ERROR(H5Lcopy_external_no_check);
+ }
+
+ if ((link_exists = H5Lexists(src_grp_id, COPY_LINK_TEST_EXTERNAL_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if original external link '%s' exists\n",
+ COPY_LINK_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Lcopy_external_no_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" original external link did not exist\n");
+ PART_ERROR(H5Lcopy_external_no_check);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lcopy_external_no_check);
+#endif
+ }
+ PART_END(H5Lcopy_external_no_check);
+
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(ext_file_id);
+ ext_file_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lcopy_external_check)
+ {
+#ifndef NO_EXTERNAL_LINKS
+ H5L_info2_t orig_info, new_info;
+ const char *orig_filename, *new_filename;
+ const char *orig_objname, *new_objname;
+ unsigned unpack_flags = 0;
+ char orig_link_val[COPY_LINK_TEST_LINK_VAL_BUF_SIZE];
+ char new_link_val[COPY_LINK_TEST_LINK_VAL_BUF_SIZE];
+#endif
+
+ TESTING_2("H5Lcopy on external link (copied link's properties checked)");
+#ifndef NO_EXTERNAL_LINKS
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Lcopy_external_check);
+ }
+
+ if (H5Fclose(ext_file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close file '%s'\n", ext_link_filename);
+ PART_ERROR(H5Lcopy_external_check);
+ }
+
+ /* Try to copy an external link */
+ if (H5Lcreate_external(ext_link_filename, "/", src_grp_id, COPY_LINK_TEST_EXTERNAL_LINK_NAME2,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", COPY_LINK_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Lcopy_external_check);
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(src_grp_id, COPY_LINK_TEST_EXTERNAL_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ COPY_LINK_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Lcopy_external_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link did not exist\n");
+ PART_ERROR(H5Lcopy_external_check);
+ }
+
+ /* Retrieve the link's info */
+ if (H5Lget_info2(src_grp_id, COPY_LINK_TEST_EXTERNAL_LINK_NAME2, &orig_info, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve info for link '%s'\n", COPY_LINK_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Lcopy_external_check);
+ }
+
+ /* Retrieve the link's value */
+ if (H5Lget_val(src_grp_id, COPY_LINK_TEST_EXTERNAL_LINK_NAME2, orig_link_val,
+ COPY_LINK_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve value for external link '%s'\n",
+ COPY_LINK_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Lcopy_external_check);
+ }
+
+ if (H5Lunpack_elink_val(orig_link_val, orig_info.u.val_size, &unpack_flags, &orig_filename,
+ &orig_objname) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't unpack original external link's value buffer\n");
+ PART_ERROR(H5Lcopy_external_check);
+ }
+
+ /* Verify the link doesn't currently exist in the target group */
+ if ((link_exists = H5Lexists(dst_grp_id, COPY_LINK_TEST_EXTERNAL_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ COPY_LINK_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Lcopy_external_check);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link existed in target group before copy!\n");
+ PART_ERROR(H5Lcopy_external_check);
+ }
+
+ /* Copy the link */
+ if (H5Lcopy(src_grp_id, COPY_LINK_TEST_EXTERNAL_LINK_NAME2, dst_grp_id,
+ COPY_LINK_TEST_EXTERNAL_LINK_COPY_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to copy external link '%s'\n", COPY_LINK_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Lcopy_external_check);
+ }
+
+ /* Verify the link has been copied and still exists in the source group */
+ if ((link_exists = H5Lexists(dst_grp_id, COPY_LINK_TEST_EXTERNAL_LINK_COPY_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link copy '%s' exists\n",
+ COPY_LINK_TEST_EXTERNAL_LINK_COPY_NAME2);
+ PART_ERROR(H5Lcopy_external_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link copy did not exist\n");
+ PART_ERROR(H5Lcopy_external_check);
+ }
+
+ if ((link_exists = H5Lexists(src_grp_id, COPY_LINK_TEST_EXTERNAL_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if original external link '%s' exists\n",
+ COPY_LINK_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Lcopy_external_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" original external link did not exist\n");
+ PART_ERROR(H5Lcopy_external_check);
+ }
+
+ /* Retrieve the new link's info */
+ if (H5Lget_info2(dst_grp_id, COPY_LINK_TEST_EXTERNAL_LINK_COPY_NAME2, &new_info, H5P_DEFAULT) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve info for link '%s'\n",
+ COPY_LINK_TEST_EXTERNAL_LINK_COPY_NAME2);
+ PART_ERROR(H5Lcopy_external_check);
+ }
+
+ if (new_info.type != orig_info.type) {
+ H5_FAILED();
+ HDprintf(" copied link's link type doesn't match original link's type\n");
+ PART_ERROR(H5Lcopy_external_check);
+ }
+
+ if (new_info.u.val_size != orig_info.u.val_size) {
+ H5_FAILED();
+ HDprintf(" copied external link's value size of %llu doesn't match original link's value "
+ "size of %llu\n",
+ (unsigned long long)new_info.u.val_size, (unsigned long long)orig_info.u.val_size);
+ PART_ERROR(H5Lcopy_external_check);
+ }
+
+ if (new_info.corder_valid != orig_info.corder_valid) {
+ H5_FAILED();
+ HDprintf(" copied link's 'corder_valid' field doesn't match original link's "
+ "'corder_valid' field\n");
+ PART_ERROR(H5Lcopy_external_check);
+ }
+
+ if (new_info.corder_valid && orig_info.corder_valid && (new_info.corder != orig_info.corder)) {
+ H5_FAILED();
+ HDprintf(" copied link's creation order value %lld doesn't match original link's creation "
+ "order value %lld\n",
+ new_info.corder, orig_info.corder);
+ PART_ERROR(H5Lcopy_external_check);
+ }
+
+ if (new_info.cset != orig_info.cset) {
+ H5_FAILED();
+ HDprintf(" copied link's character set doesn't match original link's character set\n");
+ PART_ERROR(H5Lcopy_external_check);
+ }
+
+ /* Check the external link's value */
+ if (H5Lget_val(dst_grp_id, COPY_LINK_TEST_EXTERNAL_LINK_COPY_NAME2, new_link_val,
+ COPY_LINK_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve value for external link '%s'\n",
+ COPY_LINK_TEST_EXTERNAL_LINK_COPY_NAME2);
+ PART_ERROR(H5Lcopy_external_check);
+ }
+
+ if (H5Lunpack_elink_val(new_link_val, new_info.u.val_size, &unpack_flags, &new_filename,
+ &new_objname) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't unpack copied external link's value buffer\n");
+ PART_ERROR(H5Lcopy_external_check);
+ }
+
+ if (HDstrncmp(new_filename, orig_filename, strlen(orig_filename)) < 0) {
+ H5_FAILED();
+ HDprintf(" copied external link's filename '%s' doesn't match original external link's "
+ "filename '%s'\n",
+ new_filename, orig_filename);
+ PART_ERROR(H5Lcopy_external_check);
+ }
+
+ if (HDstrncmp(new_objname, orig_objname, strlen(orig_objname)) < 0) {
+ H5_FAILED();
+ HDprintf(" copied external link's object name '%s' doesn't match original external link's "
+ "object name '%s'\n",
+ new_objname, orig_objname);
+ PART_ERROR(H5Lcopy_external_check);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lcopy_external_check);
+#endif
+ }
+ PART_END(H5Lcopy_external_check);
+
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(ext_file_id);
+ ext_file_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lcopy_external_same_loc)
+ {
+ TESTING_2("H5Lcopy on external link using H5L_SAME_LOC");
+#ifndef NO_EXTERNAL_LINKS
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Lcopy_external_same_loc);
+ }
+
+ if (H5Fclose(ext_file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close file '%s'\n", ext_link_filename);
+ PART_ERROR(H5Lcopy_external_same_loc);
+ }
+
+ /* Try to copy an external link */
+ if (H5Lcreate_external(ext_link_filename, "/", src_grp_id, COPY_LINK_TEST_EXTERNAL_LINK_NAME3,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", COPY_LINK_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Lcopy_external_same_loc);
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(src_grp_id, COPY_LINK_TEST_EXTERNAL_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ COPY_LINK_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Lcopy_external_same_loc);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link did not exist\n");
+ PART_ERROR(H5Lcopy_external_same_loc);
+ }
+
+ /* Verify the links don't currently exist in the target group */
+ if ((link_exists =
+ H5Lexists(src_grp_id, COPY_LINK_TEST_EXTERNAL_LINK_SAME_LOC_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ COPY_LINK_TEST_EXTERNAL_LINK_SAME_LOC_NAME);
+ PART_ERROR(H5Lcopy_external_same_loc);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link existed in target group before copy!\n");
+ PART_ERROR(H5Lcopy_external_same_loc);
+ }
+
+ if ((link_exists =
+ H5Lexists(src_grp_id, COPY_LINK_TEST_EXTERNAL_LINK_SAME_LOC_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ COPY_LINK_TEST_EXTERNAL_LINK_SAME_LOC_NAME2);
+ PART_ERROR(H5Lcopy_external_same_loc);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link existed in target group before copy!\n");
+ PART_ERROR(H5Lcopy_external_same_loc);
+ }
+
+ /* Copy the link using H5L_SAME_LOC as the first parameter to H5Lcopy */
+ if (H5Lcopy(H5L_SAME_LOC, COPY_LINK_TEST_EXTERNAL_LINK_NAME3, src_grp_id,
+ COPY_LINK_TEST_EXTERNAL_LINK_SAME_LOC_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to copy external link '%s' using H5L_SAME_LOC as first parameter to "
+ "H5Lcopy\n",
+ COPY_LINK_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Lcopy_external_same_loc);
+ }
+
+ /* Copy the link using H5L_SAME_LOC as the third parameter to H5Lcopy */
+ if (H5Lcopy(src_grp_id, COPY_LINK_TEST_EXTERNAL_LINK_NAME3, H5L_SAME_LOC,
+ COPY_LINK_TEST_EXTERNAL_LINK_SAME_LOC_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to copy external link '%s' using H5L_SAME_LOC as third parameter to "
+ "H5Lcopy\n",
+ COPY_LINK_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Lcopy_external_same_loc);
+ }
+
+ /* Verify the links have been copied and the original still exists in the source group */
+ if ((link_exists =
+ H5Lexists(src_grp_id, COPY_LINK_TEST_EXTERNAL_LINK_SAME_LOC_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link copy '%s' exists\n",
+ COPY_LINK_TEST_EXTERNAL_LINK_SAME_LOC_NAME);
+ PART_ERROR(H5Lcopy_external_same_loc);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link copy did not exist\n");
+ PART_ERROR(H5Lcopy_external_same_loc);
+ }
+
+ if ((link_exists =
+ H5Lexists(src_grp_id, COPY_LINK_TEST_EXTERNAL_LINK_SAME_LOC_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link copy '%s' exists\n",
+ COPY_LINK_TEST_EXTERNAL_LINK_SAME_LOC_NAME2);
+ PART_ERROR(H5Lcopy_external_same_loc);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link copy did not exist\n");
+ PART_ERROR(H5Lcopy_external_same_loc);
+ }
+
+ if ((link_exists = H5Lexists(src_grp_id, COPY_LINK_TEST_EXTERNAL_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if original external link '%s' exists\n",
+ COPY_LINK_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Lcopy_external_same_loc);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" original external link did not exist\n");
+ PART_ERROR(H5Lcopy_external_same_loc);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lcopy_external_same_loc);
+#endif
+ }
+ PART_END(H5Lcopy_external_same_loc);
+
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(ext_file_id);
+ ext_file_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lcopy_ud_no_check)
+ {
+ TESTING_2("H5Lcopy on user-defined link (copied link's properties not checked)");
+
+ /* TODO */
+
+ SKIPPED();
+ PART_EMPTY(H5Lcopy_ud_no_check);
+ }
+ PART_END(H5Lcopy_ud_no_check);
+
+ PART_BEGIN(H5Lcopy_ud_check)
+ {
+ TESTING_2("H5Lcopy on user-defined link (copied link's properties checked)");
+
+ /* TODO */
+
+ SKIPPED();
+ PART_EMPTY(H5Lcopy_ud_check);
+ }
+ PART_END(H5Lcopy_ud_check);
+
+ PART_BEGIN(H5Lcopy_ud_same_loc)
+ {
+ TESTING_2("H5Lcopy on user-defined link using H5L_SAME_LOC");
+
+ /* TODO */
+
+ SKIPPED();
+ PART_EMPTY(H5Lcopy_ud_same_loc);
+ }
+ PART_END(H5Lcopy_ud_same_loc);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Gclose(dst_grp_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(src_grp_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(dst_grp_id);
+ H5Gclose(src_grp_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(ext_file_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that using H5Lcopy to copy links into a
+ * group which already contains links will cause the new links
+ * to have creation order values ranging from the target group's
+ * maximum link creation order value and upwards. This is to
+ * check that it is not possible to run into the situation where
+ * H5Lcopy might cause a group to have two links with the same
+ * creation order values.
+ */
+static int
+test_copy_links_into_group_with_links(void)
+{
+ TESTING("H5Lcopy adjusting creation order values for copied links");
+
+ SKIPPED();
+
+ return 1;
+}
+
+/*
+ * A test to check the behavior of copying a link across files.
+ * This should fail for hard links but succeed for soft and
+ * external links (and user-defined links of those types).
+ *
+ * TODO: Ideally, tests should be written to verify that the
+ * copied links retain the properties of the original
+ * links.
+ */
+static int
+test_copy_link_across_files(void)
+{
+ TESTING("link copying across files");
+
+ /* TODO */
+
+ SKIPPED();
+
+ return 0;
+}
+
+/*
+ * A test to check that a link can't be copied
+ * when H5Lcopy is passed invalid parameters.
+ */
+static int
+test_copy_link_invalid_params(void)
+{
+ herr_t err_ret = -1;
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t src_grp_id = H5I_INVALID_HID, dst_grp_id = H5I_INVALID_HID;
+ char ext_link_filename[H5_API_TEST_FILENAME_MAX_LENGTH];
+ hid_t ext_file_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("H5Lcopy with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_HARD_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, or basic and more link aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, COPY_LINK_INVALID_PARAMS_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", COPY_LINK_INVALID_PARAMS_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((src_grp_id = H5Gcreate2(group_id, COPY_LINK_INVALID_PARAMS_TEST_SRC_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", COPY_LINK_INVALID_PARAMS_TEST_SRC_GROUP_NAME);
+ goto error;
+ }
+
+ if ((dst_grp_id = H5Gcreate2(group_id, COPY_LINK_INVALID_PARAMS_TEST_DST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", COPY_LINK_INVALID_PARAMS_TEST_DST_GROUP_NAME);
+ goto error;
+ }
+
+ if (H5Lcreate_hard(group_id, ".", src_grp_id, COPY_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", COPY_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME);
+ goto error;
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(src_grp_id, COPY_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ COPY_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link did not exist\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Lcopy_invalid_src_loc_id)
+ {
+ TESTING_2("H5Lcopy with an invalid source location ID");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Lcopy(H5I_INVALID_HID, COPY_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, dst_grp_id,
+ COPY_LINK_INVALID_PARAMS_TEST_HARD_LINK_COPY_NAME, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lcopy succeeded with an invalid source location ID\n");
+ PART_ERROR(H5Lcopy_invalid_src_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcopy_invalid_src_loc_id);
+
+ PART_BEGIN(H5Lcopy_invalid_src_name)
+ {
+ TESTING_2("H5Lcopy with an invalid source name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Lcopy(src_grp_id, NULL, dst_grp_id, COPY_LINK_INVALID_PARAMS_TEST_HARD_LINK_COPY_NAME,
+ H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lcopy succeeded with a NULL source name\n");
+ PART_ERROR(H5Lcopy_invalid_src_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Lcopy(src_grp_id, "", dst_grp_id, COPY_LINK_INVALID_PARAMS_TEST_HARD_LINK_COPY_NAME,
+ H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lcopy succeeded with an invalid source name of ''\n");
+ PART_ERROR(H5Lcopy_invalid_src_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcopy_invalid_src_name);
+
+ PART_BEGIN(H5Lcopy_invalid_dst_loc_id)
+ {
+ TESTING_2("H5Lcopy with an invalid destination location ID");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Lcopy(src_grp_id, COPY_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, H5I_INVALID_HID,
+ COPY_LINK_INVALID_PARAMS_TEST_HARD_LINK_COPY_NAME, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lcopy succeeded with an invalid destination location ID\n");
+ PART_ERROR(H5Lcopy_invalid_dst_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcopy_invalid_dst_loc_id);
+
+ PART_BEGIN(H5Lcopy_invalid_dst_name)
+ {
+ TESTING_2("H5Lcopy with an invalid destination name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcopy(src_grp_id, COPY_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, dst_grp_id, NULL,
+ H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lcopy succeeded with a NULL destination name\n");
+ PART_ERROR(H5Lcopy_invalid_dst_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lcopy(src_grp_id, COPY_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, dst_grp_id, "",
+ H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lcopy succeeded with an invalid destination name of ''\n");
+ PART_ERROR(H5Lcopy_invalid_dst_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcopy_invalid_dst_name);
+
+ PART_BEGIN(H5Lcopy_invalid_lcpl)
+ {
+ TESTING_2("H5Lcopy with an invalid LCPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Lcopy(src_grp_id, COPY_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, dst_grp_id,
+ COPY_LINK_INVALID_PARAMS_TEST_HARD_LINK_COPY_NAME, H5I_INVALID_HID, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lcopy succeeded with an invalid LCPL\n");
+ PART_ERROR(H5Lcopy_invalid_lcpl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcopy_invalid_lcpl);
+
+ PART_BEGIN(H5Lcopy_invalid_lapl)
+ {
+ TESTING_2("H5Lcopy with an invalid LAPL");
+#ifndef NO_INVALID_PROPERTY_LIST_TESTS
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Lcopy(src_grp_id, COPY_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, dst_grp_id,
+ COPY_LINK_INVALID_PARAMS_TEST_HARD_LINK_COPY_NAME, H5P_DEFAULT, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lcopy succeeded with an invalid LAPL\n");
+ PART_ERROR(H5Lcopy_invalid_lapl);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lcopy_invalid_lapl);
+#endif
+ }
+ PART_END(H5Lcopy_invalid_lapl);
+
+ PART_BEGIN(H5Lcopy_invalid_same_location)
+ {
+ TESTING_2("H5Lcopy with an invalid same location");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Lcopy(H5L_SAME_LOC, COPY_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, H5L_SAME_LOC,
+ COPY_LINK_INVALID_PARAMS_TEST_HARD_LINK_COPY_NAME, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lcopy succeeded with an invalid same location\n");
+ PART_ERROR(H5Lcopy_invalid_same_location);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcopy_invalid_same_location);
+
+ PART_BEGIN(H5Lcopy_invalid_across_files)
+ {
+ TESTING_2("H5Lcopy invalid across files");
+
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Lcopy_invalid_across_files);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Lcopy(src_grp_id, COPY_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, ext_file_id,
+ COPY_LINK_INVALID_PARAMS_TEST_HARD_LINK_COPY_NAME, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lcopy succeeded in copying a hard link across files!\n");
+ PART_ERROR(H5Lcopy_invalid_across_files);
+ }
+
+ if (H5Fclose(ext_file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close file '%s'\n", ext_link_filename);
+ PART_ERROR(H5Lcopy_invalid_across_files);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lcopy_invalid_across_files);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Gclose(dst_grp_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(src_grp_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(dst_grp_id);
+ H5Gclose(src_grp_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(ext_file_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a link can be moved with H5Lmove.
+ */
+static int
+test_move_link(void)
+{
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t src_grp_id = H5I_INVALID_HID, dst_grp_id = H5I_INVALID_HID;
+ char ext_link_filename[H5_API_TEST_FILENAME_MAX_LENGTH];
+ hid_t ext_file_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("link moving");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_HARD_LINKS) || !(vol_cap_flags_g & H5VL_CAP_FLAG_EXTERNAL_LINKS) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, or link, hard, soft, or external link aren't "
+ "supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s", EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, MOVE_LINK_TEST_SUBGROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", MOVE_LINK_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((src_grp_id = H5Gcreate2(group_id, MOVE_LINK_TEST_SRC_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", MOVE_LINK_TEST_SRC_GROUP_NAME);
+ goto error;
+ }
+
+ if ((dst_grp_id = H5Gcreate2(group_id, MOVE_LINK_TEST_DST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", MOVE_LINK_TEST_DST_GROUP_NAME);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Lmove_hard_no_check)
+ {
+ TESTING_2("H5Lmove on hard link (moved link's properties not checked)");
+
+ /* Try to move a hard link */
+ if (H5Lcreate_hard(group_id, ".", src_grp_id, MOVE_LINK_TEST_HARD_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", MOVE_LINK_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lmove_hard_no_check);
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n", MOVE_LINK_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lmove_hard_no_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link did not exist\n");
+ PART_ERROR(H5Lmove_hard_no_check);
+ }
+
+ /* Verify the link doesn't currently exist in the target group */
+ if ((link_exists = H5Lexists(dst_grp_id, MOVE_LINK_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n", MOVE_LINK_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lmove_hard_no_check);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link existed in target group before move!\n");
+ PART_ERROR(H5Lmove_hard_no_check);
+ }
+
+ /* Move the link */
+ if (H5Lmove(src_grp_id, MOVE_LINK_TEST_HARD_LINK_NAME, dst_grp_id, MOVE_LINK_TEST_HARD_LINK_NAME,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to move link '%s'\n", MOVE_LINK_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lmove_hard_no_check);
+ }
+
+ /* Verify the link has been moved */
+ if ((link_exists = H5Lexists(dst_grp_id, MOVE_LINK_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n", MOVE_LINK_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lmove_hard_no_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link did not exist\n");
+ PART_ERROR(H5Lmove_hard_no_check);
+ }
+
+ /* Verify the old link is gone */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if old hard link '%s' exists\n",
+ MOVE_LINK_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lmove_hard_no_check);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" old hard link exists\n");
+ PART_ERROR(H5Lmove_hard_no_check);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lmove_hard_no_check);
+
+ PART_BEGIN(H5Lmove_hard_check)
+ {
+ H5L_info2_t orig_info, new_info;
+ int cmp_value;
+
+ TESTING_2("H5Lmove on hard link (moved link's properties checked)");
+
+ /* Try to move a hard link */
+ if (H5Lcreate_hard(group_id, ".", src_grp_id, MOVE_LINK_TEST_HARD_LINK_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", MOVE_LINK_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lmove_hard_check);
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n", MOVE_LINK_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lmove_hard_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link did not exist\n");
+ PART_ERROR(H5Lmove_hard_check);
+ }
+
+ /* Retrieve the link's info */
+ if (H5Lget_info2(src_grp_id, MOVE_LINK_TEST_HARD_LINK_NAME2, &orig_info, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve info for link '%s'\n", MOVE_LINK_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lmove_hard_check);
+ }
+
+ /* Verify the link doesn't currently exist in the target group */
+ if ((link_exists = H5Lexists(dst_grp_id, MOVE_LINK_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n", MOVE_LINK_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lmove_hard_check);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link existed in target group before move!\n");
+ PART_ERROR(H5Lmove_hard_check);
+ }
+
+ /* Move the link */
+ if (H5Lmove(src_grp_id, MOVE_LINK_TEST_HARD_LINK_NAME2, dst_grp_id,
+ MOVE_LINK_TEST_HARD_LINK_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to move link '%s'\n", MOVE_LINK_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lmove_hard_check);
+ }
+
+ /* Verify the link has been moved */
+ if ((link_exists = H5Lexists(dst_grp_id, MOVE_LINK_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n", MOVE_LINK_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lmove_hard_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link did not exist\n");
+ PART_ERROR(H5Lmove_hard_check);
+ }
+
+ /* Verify the old link is gone */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if old hard link '%s' exists\n",
+ MOVE_LINK_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lmove_hard_check);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" old hard link exists\n");
+ PART_ERROR(H5Lmove_hard_check);
+ }
+
+ /* Retrieve the moved link's info */
+ if (H5Lget_info2(dst_grp_id, MOVE_LINK_TEST_HARD_LINK_NAME2, &new_info, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve info for link '%s'\n", MOVE_LINK_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lmove_hard_check);
+ }
+
+ if (new_info.type != orig_info.type) {
+ H5_FAILED();
+ HDprintf(" moved link's link type doesn't match original link's type\n");
+ PART_ERROR(H5Lmove_hard_check);
+ }
+
+ if (H5Otoken_cmp(dst_grp_id, &new_info.u.token, &orig_info.u.token, &cmp_value) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to compare link target tokens\n");
+ PART_ERROR(H5Lmove_hard_check);
+ }
+
+ if (cmp_value != 0) {
+ H5_FAILED();
+ HDprintf(" moved hard link's object token doesn't match original link's object token\n");
+ PART_ERROR(H5Lmove_hard_check);
+ }
+
+ if (new_info.corder_valid != orig_info.corder_valid) {
+ H5_FAILED();
+ HDprintf(" moved link's 'corder_valid' field doesn't match original link's 'corder_valid' "
+ "field\n");
+ PART_ERROR(H5Lmove_hard_check);
+ }
+
+ if (new_info.corder_valid && orig_info.corder_valid && (new_info.corder != orig_info.corder)) {
+ H5_FAILED();
+ HDprintf(" moved link's creation order value %" PRId64
+ " doesn't match original link's creation order value %" PRId64 "\n",
+ new_info.corder, orig_info.corder);
+ PART_ERROR(H5Lmove_hard_check);
+ }
+
+ if (new_info.cset != orig_info.cset) {
+ H5_FAILED();
+ HDprintf(" moved link's character set doesn't match original link's character set\n");
+ PART_ERROR(H5Lmove_hard_check);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lmove_hard_check);
+
+ PART_BEGIN(H5Lmove_hard_same_loc)
+ {
+ TESTING_2("H5Lmove on hard link using H5L_SAME_LOC");
+
+ /* Try to move a hard link */
+ if (H5Lcreate_hard(group_id, ".", src_grp_id, MOVE_LINK_TEST_HARD_LINK_NAME3, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", MOVE_LINK_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lmove_hard_same_loc);
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n", MOVE_LINK_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lmove_hard_same_loc);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link did not exist\n");
+ PART_ERROR(H5Lmove_hard_same_loc);
+ }
+
+ /* Verify the link doesn't currently exist in the target group */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_HARD_LINK_SAME_LOC_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ MOVE_LINK_TEST_HARD_LINK_SAME_LOC_NAME);
+ PART_ERROR(H5Lmove_hard_same_loc);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link existed in target group before move!\n");
+ PART_ERROR(H5Lmove_hard_same_loc);
+ }
+
+ /* Rename the link using H5L_SAME_LOC as the first parameter to H5Lmove */
+ if (H5Lmove(H5L_SAME_LOC, MOVE_LINK_TEST_HARD_LINK_NAME3, src_grp_id,
+ MOVE_LINK_TEST_HARD_LINK_SAME_LOC_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to move link '%s' using H5L_SAME_LOC as first parameter to H5Lmove\n",
+ MOVE_LINK_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lmove_hard_same_loc);
+ }
+
+ /* Ensure the link has been renamed */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n", MOVE_LINK_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lmove_hard_same_loc);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" original hard link existed in target group after move!\n");
+ PART_ERROR(H5Lmove_hard_same_loc);
+ }
+
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_HARD_LINK_SAME_LOC_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ MOVE_LINK_TEST_HARD_LINK_SAME_LOC_NAME);
+ PART_ERROR(H5Lmove_hard_same_loc);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link did not exist after move!\n");
+ PART_ERROR(H5Lmove_hard_same_loc);
+ }
+
+ /* Rename the link back using H5L_SAME_LOC as the third parameter to H5Lmove */
+ if (H5Lmove(src_grp_id, MOVE_LINK_TEST_HARD_LINK_SAME_LOC_NAME, H5L_SAME_LOC,
+ MOVE_LINK_TEST_HARD_LINK_NAME3, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to move link '%s' using H5L_SAME_LOC as third parameter to H5Lmove\n",
+ MOVE_LINK_TEST_HARD_LINK_SAME_LOC_NAME);
+ PART_ERROR(H5Lmove_hard_same_loc);
+ }
+
+ /* Verify the link has been renamed back */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n", MOVE_LINK_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lmove_hard_same_loc);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" original hard link did not exist after moving the link back!\n");
+ PART_ERROR(H5Lmove_hard_same_loc);
+ }
+
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_HARD_LINK_SAME_LOC_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if old hard link '%s' exists\n",
+ MOVE_LINK_TEST_HARD_LINK_SAME_LOC_NAME);
+ PART_ERROR(H5Lmove_hard_same_loc);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" renamed hard link exists after moving the link back!\n");
+ PART_ERROR(H5Lmove_hard_same_loc);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lmove_hard_same_loc);
+
+ PART_BEGIN(H5Lmove_hard_rename)
+ {
+ TESTING_2("H5Lmove to rename hard link without moving it");
+
+ /* Try to rename a hard link */
+ if (H5Lcreate_hard(group_id, ".", src_grp_id, MOVE_LINK_TEST_HARD_LINK_NAME4, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", MOVE_LINK_TEST_HARD_LINK_NAME4);
+ PART_ERROR(H5Lmove_hard_rename);
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_HARD_LINK_NAME4, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n", MOVE_LINK_TEST_HARD_LINK_NAME4);
+ PART_ERROR(H5Lmove_hard_rename);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link did not exist\n");
+ PART_ERROR(H5Lmove_hard_rename);
+ }
+
+ /* Verify the renamed link doesn't currently exist in the source group */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_HARD_LINK_NEW_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if renamed hard link '%s' exists\n",
+ MOVE_LINK_TEST_HARD_LINK_NEW_NAME);
+ PART_ERROR(H5Lmove_hard_rename);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" renamed hard link existed in source group before move!\n");
+ PART_ERROR(H5Lmove_hard_rename);
+ }
+
+ /* Rename the link */
+ if (H5Lmove(src_grp_id, MOVE_LINK_TEST_HARD_LINK_NAME4, src_grp_id,
+ MOVE_LINK_TEST_HARD_LINK_NEW_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to rename link '%s'\n", MOVE_LINK_TEST_HARD_LINK_NAME4);
+ PART_ERROR(H5Lmove_hard_rename);
+ }
+
+ /* Verify the link has been renamed */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_HARD_LINK_NEW_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if renamed hard link '%s' exists\n",
+ MOVE_LINK_TEST_HARD_LINK_NEW_NAME);
+ PART_ERROR(H5Lmove_hard_rename);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" renamed hard link did not exist\n");
+ PART_ERROR(H5Lmove_hard_rename);
+ }
+
+ /* Verify the old link is gone */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_HARD_LINK_NAME4, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if old hard link '%s' exists\n",
+ MOVE_LINK_TEST_HARD_LINK_NAME4);
+ PART_ERROR(H5Lmove_hard_rename);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" old hard link exists\n");
+ PART_ERROR(H5Lmove_hard_rename);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lmove_hard_rename);
+
+ PART_BEGIN(H5Lmove_soft_no_check)
+ {
+ TESTING_2("H5Lmove on soft link (moved link's properties not checked)");
+
+ /* Try to move a soft link */
+ if (H5Lcreate_soft(MOVE_LINK_TEST_SOFT_LINK_TARGET_PATH, src_grp_id,
+ MOVE_LINK_TEST_SOFT_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", MOVE_LINK_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lmove_soft_no_check);
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n", MOVE_LINK_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lmove_soft_no_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link did not exist\n");
+ PART_ERROR(H5Lmove_soft_no_check);
+ }
+
+ /* Verify the link doesn't currently exist in the target group */
+ if ((link_exists = H5Lexists(dst_grp_id, MOVE_LINK_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n", MOVE_LINK_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lmove_soft_no_check);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link existed in target group before move!\n");
+ PART_ERROR(H5Lmove_soft_no_check);
+ }
+
+ /* Move the link */
+ if (H5Lmove(src_grp_id, MOVE_LINK_TEST_SOFT_LINK_NAME, dst_grp_id, MOVE_LINK_TEST_SOFT_LINK_NAME,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to move link '%s'\n", MOVE_LINK_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lmove_soft_no_check);
+ }
+
+ /* Verify the link has been moved */
+ if ((link_exists = H5Lexists(dst_grp_id, MOVE_LINK_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n", MOVE_LINK_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lmove_soft_no_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link did not exist\n");
+ PART_ERROR(H5Lmove_soft_no_check);
+ }
+
+ /* Verify the old link is gone */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if old soft link '%s' exists\n",
+ MOVE_LINK_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lmove_soft_no_check);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" old soft link exists\n");
+ PART_ERROR(H5Lmove_soft_no_check);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lmove_soft_no_check);
+
+ PART_BEGIN(H5Lmove_soft_check)
+ {
+ H5L_info2_t orig_info, new_info;
+ char orig_link_val[MOVE_LINK_TEST_LINK_VAL_BUF_SIZE];
+ char new_link_val[MOVE_LINK_TEST_LINK_VAL_BUF_SIZE];
+
+ TESTING_2("H5Lmove on soft link (moved link's properties checked)");
+
+ /* Try to move a soft link */
+ if (H5Lcreate_soft(MOVE_LINK_TEST_SOFT_LINK_TARGET_PATH, src_grp_id,
+ MOVE_LINK_TEST_SOFT_LINK_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", MOVE_LINK_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lmove_soft_check);
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n", MOVE_LINK_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lmove_soft_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link did not exist\n");
+ PART_ERROR(H5Lmove_soft_check);
+ }
+
+ /* Retrieve the link's info */
+ if (H5Lget_info2(src_grp_id, MOVE_LINK_TEST_SOFT_LINK_NAME2, &orig_info, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve info for link '%s'\n", MOVE_LINK_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lmove_soft_check);
+ }
+
+ /* Retrieve the link's value */
+ if (H5Lget_val(src_grp_id, MOVE_LINK_TEST_SOFT_LINK_NAME2, orig_link_val,
+ MOVE_LINK_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve value for soft link '%s'\n", MOVE_LINK_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lmove_soft_check);
+ }
+
+ /* Verify the link doesn't currently exist in the target group */
+ if ((link_exists = H5Lexists(dst_grp_id, MOVE_LINK_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n", MOVE_LINK_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lmove_soft_check);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link existed in target group before move!\n");
+ PART_ERROR(H5Lmove_soft_check);
+ }
+
+ /* Move the link */
+ if (H5Lmove(src_grp_id, MOVE_LINK_TEST_SOFT_LINK_NAME2, dst_grp_id,
+ MOVE_LINK_TEST_SOFT_LINK_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to move link '%s'\n", MOVE_LINK_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lmove_soft_check);
+ }
+
+ /* Verify the link has been moved */
+ if ((link_exists = H5Lexists(dst_grp_id, MOVE_LINK_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n", MOVE_LINK_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lmove_soft_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link did not exist\n");
+ PART_ERROR(H5Lmove_soft_check);
+ }
+
+ /* Verify the old link is gone */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if old soft link '%s' exists\n",
+ MOVE_LINK_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lmove_soft_check);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" old soft link exists\n");
+ PART_ERROR(H5Lmove_soft_check);
+ }
+
+ /* Retrieve the moved link's info */
+ if (H5Lget_info2(dst_grp_id, MOVE_LINK_TEST_SOFT_LINK_NAME2, &new_info, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve info for link '%s'\n", MOVE_LINK_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lmove_soft_check);
+ }
+
+ if (new_info.type != orig_info.type) {
+ H5_FAILED();
+ HDprintf(" moved link's link type doesn't match original link's type\n");
+ PART_ERROR(H5Lmove_soft_check);
+ }
+
+ if (new_info.u.val_size != orig_info.u.val_size) {
+ H5_FAILED();
+ HDprintf(" moved soft link's value size of %llu doesn't match original link's value size "
+ "of %llu\n",
+ (unsigned long long)new_info.u.val_size, (unsigned long long)orig_info.u.val_size);
+ PART_ERROR(H5Lmove_soft_check);
+ }
+
+ if (new_info.corder_valid != orig_info.corder_valid) {
+ H5_FAILED();
+ HDprintf(" moved link's 'corder_valid' field doesn't match original link's 'corder_valid' "
+ "field\n");
+ PART_ERROR(H5Lmove_soft_check);
+ }
+
+ if (new_info.corder_valid && orig_info.corder_valid && (new_info.corder != orig_info.corder)) {
+ H5_FAILED();
+ HDprintf(" moved link's creation order value %" PRId64
+ " doesn't match original link's creation order value %" PRId64 "\n",
+ new_info.corder, orig_info.corder);
+ PART_ERROR(H5Lmove_soft_check);
+ }
+
+ if (new_info.cset != orig_info.cset) {
+ H5_FAILED();
+ HDprintf(" moved link's character set doesn't match original link's character set\n");
+ PART_ERROR(H5Lmove_soft_check);
+ }
+
+ /* Check the soft link's value */
+ if (H5Lget_val(dst_grp_id, MOVE_LINK_TEST_SOFT_LINK_NAME2, new_link_val,
+ MOVE_LINK_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve value for soft link '%s'\n", MOVE_LINK_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lmove_soft_check);
+ }
+
+ if (HDstrncmp(orig_link_val, new_link_val, MOVE_LINK_TEST_LINK_VAL_BUF_SIZE)) {
+ H5_FAILED();
+ HDprintf(" moved soft link's value '%s' doesn't match original link's value '%s'\n",
+ new_link_val, orig_link_val);
+ PART_ERROR(H5Lmove_soft_check);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lmove_soft_check);
+
+ PART_BEGIN(H5Lmove_soft_same_loc)
+ {
+ TESTING_2("H5Lmove on soft link using H5L_SAME_LOC");
+
+ /* Try to move a soft link */
+ if (H5Lcreate_soft(MOVE_LINK_TEST_SOFT_LINK_TARGET_PATH, src_grp_id,
+ MOVE_LINK_TEST_SOFT_LINK_NAME3, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", MOVE_LINK_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lmove_soft_same_loc);
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n", MOVE_LINK_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lmove_soft_same_loc);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link did not exist\n");
+ PART_ERROR(H5Lmove_soft_same_loc);
+ }
+
+ /* Verify the link doesn't currently exist in the target group */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_SOFT_LINK_SAME_LOC_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ MOVE_LINK_TEST_SOFT_LINK_SAME_LOC_NAME);
+ PART_ERROR(H5Lmove_soft_same_loc);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link existed in target group before move!\n");
+ PART_ERROR(H5Lmove_soft_same_loc);
+ }
+
+ /* Rename the link using H5L_SAME_LOC as the first parameter to H5Lmove */
+ if (H5Lmove(H5L_SAME_LOC, MOVE_LINK_TEST_SOFT_LINK_NAME3, src_grp_id,
+ MOVE_LINK_TEST_SOFT_LINK_SAME_LOC_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to move link '%s' using H5L_SAME_LOC as first parameter to H5Lmove\n",
+ MOVE_LINK_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lmove_soft_same_loc);
+ }
+
+ /* Ensure the link has been renamed */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n", MOVE_LINK_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lmove_soft_same_loc);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" original soft link existed in target group after move!\n");
+ PART_ERROR(H5Lmove_soft_same_loc);
+ }
+
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_SOFT_LINK_SAME_LOC_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ MOVE_LINK_TEST_SOFT_LINK_SAME_LOC_NAME);
+ PART_ERROR(H5Lmove_soft_same_loc);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link did not exist after move!\n");
+ PART_ERROR(H5Lmove_soft_same_loc);
+ }
+
+ /* Rename the link back using H5L_SAME_LOC as the third parameter to H5Lmove */
+ if (H5Lmove(src_grp_id, MOVE_LINK_TEST_SOFT_LINK_SAME_LOC_NAME, H5L_SAME_LOC,
+ MOVE_LINK_TEST_SOFT_LINK_NAME3, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to move link '%s' using H5L_SAME_LOC as third parameter to H5Lmove\n",
+ MOVE_LINK_TEST_SOFT_LINK_SAME_LOC_NAME);
+ PART_ERROR(H5Lmove_soft_same_loc);
+ }
+
+ /* Verify the link has been renamed back */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n", MOVE_LINK_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lmove_soft_same_loc);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" original hard link did not exist after moving the link back!\n");
+ PART_ERROR(H5Lmove_soft_same_loc);
+ }
+
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_SOFT_LINK_SAME_LOC_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if old soft link '%s' exists\n",
+ MOVE_LINK_TEST_SOFT_LINK_SAME_LOC_NAME);
+ PART_ERROR(H5Lmove_soft_same_loc);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" renamed soft link exists after moving the link back!\n");
+ PART_ERROR(H5Lmove_soft_same_loc);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lmove_soft_same_loc);
+
+ PART_BEGIN(H5Lmove_soft_rename)
+ {
+ TESTING_2("H5Lmove to rename soft link without moving it");
+
+ /* Try to rename a soft link */
+ if (H5Lcreate_soft(MOVE_LINK_TEST_SOFT_LINK_TARGET_PATH, src_grp_id,
+ MOVE_LINK_TEST_SOFT_LINK_NAME4, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", MOVE_LINK_TEST_SOFT_LINK_NAME4);
+ PART_ERROR(H5Lmove_soft_rename);
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_SOFT_LINK_NAME4, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n", MOVE_LINK_TEST_SOFT_LINK_NAME4);
+ PART_ERROR(H5Lmove_soft_rename);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link did not exist\n");
+ PART_ERROR(H5Lmove_soft_rename);
+ }
+
+ /* Verify the renamed link doesn't currently exist in the source group */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_SOFT_LINK_NEW_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if renamed soft link '%s' exists\n",
+ MOVE_LINK_TEST_SOFT_LINK_NEW_NAME);
+ PART_ERROR(H5Lmove_soft_rename);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" renamed soft link existed in source group before move!\n");
+ PART_ERROR(H5Lmove_soft_rename);
+ }
+
+ /* Rename the link */
+ if (H5Lmove(src_grp_id, MOVE_LINK_TEST_SOFT_LINK_NAME4, src_grp_id,
+ MOVE_LINK_TEST_SOFT_LINK_NEW_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to rename link '%s'\n", MOVE_LINK_TEST_SOFT_LINK_NAME4);
+ PART_ERROR(H5Lmove_soft_rename);
+ }
+
+ /* Verify the link has been renamed */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_SOFT_LINK_NEW_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if renamed soft link '%s' exists\n",
+ MOVE_LINK_TEST_SOFT_LINK_NEW_NAME);
+ PART_ERROR(H5Lmove_soft_rename);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" renamed soft link did not exist\n");
+ PART_ERROR(H5Lmove_soft_rename);
+ }
+
+ /* Verify the old link is gone */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_SOFT_LINK_NAME4, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if old soft link '%s' exists\n",
+ MOVE_LINK_TEST_SOFT_LINK_NAME4);
+ PART_ERROR(H5Lmove_soft_rename);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" old soft link exists\n");
+ PART_ERROR(H5Lmove_soft_rename);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lmove_soft_rename);
+
+ PART_BEGIN(H5Lmove_external_no_check)
+ {
+ TESTING_2("H5Lmove on external link (moved link's properties not checked)");
+#ifndef NO_EXTERNAL_LINKS
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Lmove_external_no_check);
+ }
+
+ if (H5Fclose(ext_file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close file '%s'\n", ext_link_filename);
+ PART_ERROR(H5Lmove_external_no_check);
+ }
+
+ /* Try to move an external link */
+ if (H5Lcreate_external(ext_link_filename, "/", src_grp_id, MOVE_LINK_TEST_EXTERN_LINK_NAME,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", MOVE_LINK_TEST_EXTERN_LINK_NAME);
+ PART_ERROR(H5Lmove_external_no_check);
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_EXTERN_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ MOVE_LINK_TEST_EXTERN_LINK_NAME);
+ PART_ERROR(H5Lmove_external_no_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link did not exist\n");
+ PART_ERROR(H5Lmove_external_no_check);
+ }
+
+ /* Verify the link doesn't currently exist in the target group */
+ if ((link_exists = H5Lexists(dst_grp_id, MOVE_LINK_TEST_EXTERN_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ MOVE_LINK_TEST_EXTERN_LINK_NAME);
+ PART_ERROR(H5Lmove_external_no_check);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link existed in target group before move!\n");
+ PART_ERROR(H5Lmove_external_no_check);
+ }
+
+ /* Move the link */
+ if (H5Lmove(src_grp_id, MOVE_LINK_TEST_EXTERN_LINK_NAME, dst_grp_id,
+ MOVE_LINK_TEST_EXTERN_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to move link '%s'\n", MOVE_LINK_TEST_EXTERN_LINK_NAME);
+ PART_ERROR(H5Lmove_external_no_check);
+ }
+
+ /* Verify the link has been moved */
+ if ((link_exists = H5Lexists(dst_grp_id, MOVE_LINK_TEST_EXTERN_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ MOVE_LINK_TEST_EXTERN_LINK_NAME);
+ PART_ERROR(H5Lmove_external_no_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link did not exist\n");
+ PART_ERROR(H5Lmove_external_no_check);
+ }
+
+ /* Verify the old link is gone */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_EXTERN_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if old external link '%s' exists\n",
+ MOVE_LINK_TEST_EXTERN_LINK_NAME);
+ PART_ERROR(H5Lmove_external_no_check);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" old external link exists\n");
+ PART_ERROR(H5Lmove_external_no_check);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lmove_external_no_check);
+#endif
+ }
+ PART_END(H5Lmove_external_no_check);
+
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(ext_file_id);
+ ext_file_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lmove_external_check)
+ {
+#ifndef NO_EXTERNAL_LINKS
+ H5L_info2_t orig_info, new_info;
+ const char *orig_filename, *new_filename;
+ const char *orig_objname, *new_objname;
+ unsigned unpack_flags = 0;
+ char orig_link_val[MOVE_LINK_TEST_LINK_VAL_BUF_SIZE];
+ char new_link_val[MOVE_LINK_TEST_LINK_VAL_BUF_SIZE];
+#endif
+
+ TESTING_2("H5Lmove on external link (moved link's properties checked)");
+#ifndef NO_EXTERNAL_LINKS
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Lmove_external_check);
+ }
+
+ if (H5Fclose(ext_file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close file '%s'\n", ext_link_filename);
+ PART_ERROR(H5Lmove_external_check);
+ }
+
+ /* Try to move an external link */
+ if (H5Lcreate_external(ext_link_filename, "/", src_grp_id, MOVE_LINK_TEST_EXTERN_LINK_NAME2,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", MOVE_LINK_TEST_EXTERN_LINK_NAME2);
+ PART_ERROR(H5Lmove_external_check);
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_EXTERN_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ MOVE_LINK_TEST_EXTERN_LINK_NAME2);
+ PART_ERROR(H5Lmove_external_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link did not exist\n");
+ PART_ERROR(H5Lmove_external_check);
+ }
+
+ /* Retrieve the link's info */
+ if (H5Lget_info2(src_grp_id, MOVE_LINK_TEST_EXTERN_LINK_NAME2, &orig_info, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve info for link '%s'\n", MOVE_LINK_TEST_EXTERN_LINK_NAME2);
+ PART_ERROR(H5Lmove_external_check);
+ }
+
+ /* Retrieve the link's value */
+ if (H5Lget_val(src_grp_id, MOVE_LINK_TEST_EXTERN_LINK_NAME2, orig_link_val,
+ MOVE_LINK_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve value for external link '%s'\n",
+ MOVE_LINK_TEST_EXTERN_LINK_NAME2);
+ PART_ERROR(H5Lmove_external_check);
+ }
+
+ if (H5Lunpack_elink_val(orig_link_val, orig_info.u.val_size, &unpack_flags, &orig_filename,
+ &orig_objname) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't unpack original external link's value buffer\n");
+ PART_ERROR(H5Lmove_external_check);
+ }
+
+ /* Verify the link doesn't currently exist in the target group */
+ if ((link_exists = H5Lexists(dst_grp_id, MOVE_LINK_TEST_EXTERN_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ MOVE_LINK_TEST_EXTERN_LINK_NAME2);
+ PART_ERROR(H5Lmove_external_check);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link existed in target group before move!\n");
+ PART_ERROR(H5Lmove_external_check);
+ }
+
+ /* Move the link */
+ if (H5Lmove(src_grp_id, MOVE_LINK_TEST_EXTERN_LINK_NAME2, dst_grp_id,
+ MOVE_LINK_TEST_EXTERN_LINK_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to move link '%s'\n", MOVE_LINK_TEST_EXTERN_LINK_NAME2);
+ PART_ERROR(H5Lmove_external_check);
+ }
+
+ /* Verify the link has been moved */
+ if ((link_exists = H5Lexists(dst_grp_id, MOVE_LINK_TEST_EXTERN_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ MOVE_LINK_TEST_EXTERN_LINK_NAME2);
+ PART_ERROR(H5Lmove_external_check);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link did not exist\n");
+ PART_ERROR(H5Lmove_external_check);
+ }
+
+ /* Verify the old link is gone */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_EXTERN_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if old external link '%s' exists\n",
+ MOVE_LINK_TEST_EXTERN_LINK_NAME2);
+ PART_ERROR(H5Lmove_external_check);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" old external link exists\n");
+ PART_ERROR(H5Lmove_external_check);
+ }
+
+ /* Retrieve the moved link's info */
+ if (H5Lget_info2(dst_grp_id, MOVE_LINK_TEST_EXTERN_LINK_NAME2, &new_info, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve info for link '%s'\n", MOVE_LINK_TEST_EXTERN_LINK_NAME2);
+ PART_ERROR(H5Lmove_external_check);
+ }
+
+ if (new_info.type != orig_info.type) {
+ H5_FAILED();
+ HDprintf(" moved link's link type doesn't match original link's type\n");
+ PART_ERROR(H5Lmove_external_check);
+ }
+
+ if (new_info.u.val_size != orig_info.u.val_size) {
+ H5_FAILED();
+ HDprintf(" moved external link's value size of %llu doesn't match original link's value "
+ "size of %llu\n",
+ (unsigned long long)new_info.u.val_size, (unsigned long long)orig_info.u.val_size);
+ PART_ERROR(H5Lmove_external_check);
+ }
+
+ if (new_info.corder_valid != orig_info.corder_valid) {
+ H5_FAILED();
+ HDprintf(" moved link's 'corder_valid' field doesn't match original link's 'corder_valid' "
+ "field\n");
+ PART_ERROR(H5Lmove_external_check);
+ }
+
+ if (new_info.corder_valid && orig_info.corder_valid && (new_info.corder != orig_info.corder)) {
+ H5_FAILED();
+ HDprintf(" moved link's creation order value %lld doesn't match original link's creation "
+ "order value %lld\n",
+ new_info.corder, orig_info.corder);
+ PART_ERROR(H5Lmove_external_check);
+ }
+
+ if (new_info.cset != orig_info.cset) {
+ H5_FAILED();
+ HDprintf(" moved link's character set doesn't match original link's character set\n");
+ PART_ERROR(H5Lmove_external_check);
+ }
+
+ /* Check the external link's value */
+ if (H5Lget_val(dst_grp_id, MOVE_LINK_TEST_EXTERN_LINK_NAME2, new_link_val,
+ MOVE_LINK_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't retrieve value for external link '%s'\n",
+ MOVE_LINK_TEST_EXTERN_LINK_NAME2);
+ PART_ERROR(H5Lmove_external_check);
+ }
+
+ if (H5Lunpack_elink_val(new_link_val, new_info.u.val_size, &unpack_flags, &new_filename,
+ &new_objname) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't unpack moved external link's value buffer\n");
+ PART_ERROR(H5Lmove_external_check);
+ }
+
+ if (HDstrncmp(new_filename, orig_filename, strlen(orig_filename)) < 0) {
+ H5_FAILED();
+ HDprintf(" moved external link's filename '%s' doesn't match original external link's "
+ "filename '%s'\n",
+ new_filename, orig_filename);
+ PART_ERROR(H5Lmove_external_check);
+ }
+
+ if (HDstrncmp(new_objname, orig_objname, strlen(orig_objname)) < 0) {
+ H5_FAILED();
+ HDprintf(" moved external link's object name '%s' doesn't match original external link's "
+ "object name '%s'\n",
+ new_objname, orig_objname);
+ PART_ERROR(H5Lmove_external_check);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lmove_external_check);
+#endif
+ }
+ PART_END(H5Lmove_external_check);
+
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(ext_file_id);
+ ext_file_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lmove_external_same_loc)
+ {
+ TESTING_2("H5Lmove on external link using H5L_SAME_LOC");
+#ifndef NO_EXTERNAL_LINKS
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Lmove_external_same_loc);
+ }
+
+ if (H5Fclose(ext_file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close file '%s'\n", ext_link_filename);
+ PART_ERROR(H5Lmove_external_same_loc);
+ }
+
+ /* Try to move an external link */
+ if (H5Lcreate_external(ext_link_filename, "/", src_grp_id, MOVE_LINK_TEST_EXTERN_LINK_NAME3,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", MOVE_LINK_TEST_EXTERN_LINK_NAME3);
+ PART_ERROR(H5Lmove_external_same_loc);
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_EXTERN_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ MOVE_LINK_TEST_EXTERN_LINK_NAME3);
+ PART_ERROR(H5Lmove_external_same_loc);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link did not exist\n");
+ PART_ERROR(H5Lmove_external_same_loc);
+ }
+
+ /* Verify the link doesn't currently exist in the target group */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_EXTERN_LINK_SAME_LOC_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ MOVE_LINK_TEST_EXTERN_LINK_SAME_LOC_NAME);
+ PART_ERROR(H5Lmove_external_same_loc);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" external link existed in target group before move!\n");
+ PART_ERROR(H5Lmove_external_same_loc);
+ }
+
+ /* Rename the link using H5L_SAME_LOC as the first parameter to H5Lmove */
+ if (H5Lmove(H5L_SAME_LOC, MOVE_LINK_TEST_EXTERN_LINK_NAME3, src_grp_id,
+ MOVE_LINK_TEST_EXTERN_LINK_SAME_LOC_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to move link '%s'\n", MOVE_LINK_TEST_EXTERN_LINK_NAME3);
+ PART_ERROR(H5Lmove_external_same_loc);
+ }
+
+ /* Ensure the link has been renamed */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_EXTERN_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ MOVE_LINK_TEST_EXTERN_LINK_NAME3);
+ PART_ERROR(H5Lmove_external_same_loc);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" original external link existed in target group after move!\n");
+ PART_ERROR(H5Lmove_external_same_loc);
+ }
+
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_EXTERN_LINK_SAME_LOC_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ MOVE_LINK_TEST_EXTERN_LINK_SAME_LOC_NAME);
+ PART_ERROR(H5Lmove_external_same_loc);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link did not exist after move!\n");
+ PART_ERROR(H5Lmove_external_same_loc);
+ }
+
+ /* Rename the link back using H5L_SAME_LOC as the third parameter to H5Lmove */
+ if (H5Lmove(src_grp_id, MOVE_LINK_TEST_EXTERN_LINK_SAME_LOC_NAME, H5L_SAME_LOC,
+ MOVE_LINK_TEST_EXTERN_LINK_NAME3, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to move link '%s'\n", MOVE_LINK_TEST_EXTERN_LINK_SAME_LOC_NAME);
+ PART_ERROR(H5Lmove_external_same_loc);
+ }
+
+ /* Verify the link has been renamed back */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_EXTERN_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ MOVE_LINK_TEST_EXTERN_LINK_NAME3);
+ PART_ERROR(H5Lmove_external_same_loc);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" original external link did not exist after moving the link back!\n");
+ PART_ERROR(H5Lmove_external_same_loc);
+ }
+
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_EXTERN_LINK_SAME_LOC_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if old external link '%s' exists\n",
+ MOVE_LINK_TEST_EXTERN_LINK_SAME_LOC_NAME);
+ PART_ERROR(H5Lmove_external_same_loc);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" renamed external link exists after moving the link back!\n");
+ PART_ERROR(H5Lmove_external_same_loc);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lmove_external_same_loc);
+#endif
+ }
+ PART_END(H5Lmove_external_same_loc);
+
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(ext_file_id);
+ ext_file_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lmove_external_rename)
+ {
+ TESTING_2("H5Lmove to rename external link without moving it");
+#ifndef NO_EXTERNAL_LINKS
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Lmove_external_rename);
+ }
+
+ if (H5Fclose(ext_file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close file '%s'\n", ext_link_filename);
+ PART_ERROR(H5Lmove_external_rename);
+ }
+
+ /* Try to move an external link */
+ if (H5Lcreate_external(ext_link_filename, "/", src_grp_id, MOVE_LINK_TEST_EXTERN_LINK_NAME4,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", MOVE_LINK_TEST_EXTERN_LINK_NAME4);
+ PART_ERROR(H5Lmove_external_rename);
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_EXTERN_LINK_NAME4, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ MOVE_LINK_TEST_EXTERN_LINK_NAME4);
+ PART_ERROR(H5Lmove_external_rename);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link did not exist\n");
+ PART_ERROR(H5Lmove_external_rename);
+ }
+
+ /* Verify the renamed link doesn't currently exist in the source group */
+ if ((link_exists = H5Lexists(dst_grp_id, MOVE_LINK_TEST_EXTERN_LINK_NEW_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if renamed external link '%s' exists\n",
+ MOVE_LINK_TEST_EXTERN_LINK_NEW_NAME);
+ PART_ERROR(H5Lmove_external_rename);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" renamed external link existed in source group before move!\n");
+ PART_ERROR(H5Lmove_external_rename);
+ }
+
+ /* Rename the link */
+ if (H5Lmove(src_grp_id, MOVE_LINK_TEST_EXTERN_LINK_NAME4, src_grp_id,
+ MOVE_LINK_TEST_EXTERN_LINK_NEW_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to rename link '%s'\n", MOVE_LINK_TEST_EXTERN_LINK_NAME4);
+ PART_ERROR(H5Lmove_external_rename);
+ }
+
+ /* Verify the link has been renamed */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_EXTERN_LINK_NEW_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if renamed external link '%s' exists\n",
+ MOVE_LINK_TEST_EXTERN_LINK_NEW_NAME);
+ PART_ERROR(H5Lmove_external_rename);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" renamed external link did not exist\n");
+ PART_ERROR(H5Lmove_external_rename);
+ }
+
+ /* Verify the old link is gone */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_TEST_EXTERN_LINK_NAME4, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if old external link '%s' exists\n",
+ MOVE_LINK_TEST_EXTERN_LINK_NAME4);
+ PART_ERROR(H5Lmove_external_rename);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" old external link exists\n");
+ PART_ERROR(H5Lmove_external_rename);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lmove_external_rename);
+#endif
+ }
+ PART_END(H5Lmove_external_rename);
+
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(ext_file_id);
+ ext_file_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lmove_ud_no_check)
+ {
+ TESTING_2("H5Lmove on user-defined link (moved link's properties not checked)");
+
+ /* TODO */
+
+ SKIPPED();
+ PART_EMPTY(H5Lmove_ud_no_check);
+ }
+ PART_END(H5Lmove_ud_no_check);
+
+ PART_BEGIN(H5Lmove_ud_check)
+ {
+ TESTING_2("H5Lmove on user-defined link (moved link's properties checked)");
+
+ /* TODO */
+
+ SKIPPED();
+ PART_EMPTY(H5Lmove_ud_check);
+ }
+ PART_END(H5Lmove_ud_check);
+
+ PART_BEGIN(H5Lmove_ud_same_loc)
+ {
+ TESTING_2("H5Lmove on user-defined link using H5L_SAME_LOC");
+
+ /* TODO */
+
+ SKIPPED();
+ PART_EMPTY(H5Lmove_ud_same_loc);
+ }
+ PART_END(H5Lmove_ud_same_loc);
+
+ PART_BEGIN(H5Lmove_ud_rename)
+ {
+ TESTING_2("H5Lmove to rename user-defined link without moving it");
+
+ /* TODO */
+
+ SKIPPED();
+ PART_EMPTY(H5Lmove_ud_rename);
+ }
+ PART_END(H5Lmove_ud_rename);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Gclose(dst_grp_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(src_grp_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(dst_grp_id);
+ H5Gclose(src_grp_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ H5Fclose(ext_file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that using H5Lmove to move links into a
+ * group which already contains links will cause the new links
+ * to have creation order values ranging from the target group's
+ * maximum link creation order value and upwards. This is to
+ * check that it is not possible to run into the situation where
+ * H5Lmove might cause a group to have two links with the same
+ * creation order values.
+ */
+static int
+test_move_links_into_group_with_links(void)
+{
+ H5L_info2_t link_info;
+ size_t i;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t src_grp_id = H5I_INVALID_HID, dst_grp_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+ char link_name[MOVE_LINK_INTO_GRP_WITH_LINKS_TEST_BUF_SIZE];
+
+ TESTING("H5Lmove adjusting creation order values for moved links");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_MORE) || !(vol_cap_flags_g & H5VL_CAP_FLAG_HARD_LINKS) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, or basic or hard link, or creation order aren't "
+ "supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, MOVE_LINK_INTO_GRP_WITH_LINKS_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", MOVE_LINK_INTO_GRP_WITH_LINKS_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create GCPL for link creation order tracking\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set link creation order tracking\n");
+ goto error;
+ }
+
+ if ((src_grp_id = H5Gcreate2(group_id, MOVE_LINK_INTO_GRP_WITH_LINKS_TEST_SRC_GRP_NAME, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", MOVE_LINK_INTO_GRP_WITH_LINKS_TEST_SRC_GRP_NAME);
+ goto error;
+ }
+
+ if ((dst_grp_id = H5Gcreate2(group_id, MOVE_LINK_INTO_GRP_WITH_LINKS_TEST_DST_GRP_NAME, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", MOVE_LINK_INTO_GRP_WITH_LINKS_TEST_DST_GRP_NAME);
+ goto error;
+ }
+
+ /* Create several links in the source group */
+ for (i = 0; i < MOVE_LINK_INTO_GRP_WITH_LINKS_TEST_NUM_LINKS; i++) {
+ snprintf(link_name, MOVE_LINK_INTO_GRP_WITH_LINKS_TEST_BUF_SIZE, "link_to_move%d", (int)i);
+
+ if (H5Lcreate_hard(src_grp_id, ".", src_grp_id, link_name, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create link '%s' in source group\n", link_name);
+ goto error;
+ }
+
+ /* Check the current creation order value for each link */
+ memset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info2(src_grp_id, link_name, &link_info, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve info for link '%s'\n", link_name);
+ goto error;
+ }
+
+ if (!link_info.corder_valid) {
+ H5_FAILED();
+ HDprintf(" creation order value for newly-created link '%s' was marked as not valid!\n",
+ link_name);
+ goto error;
+ }
+
+ if (link_info.corder != (int64_t)i) {
+ H5_FAILED();
+ HDprintf(" creation order value %lld for link '%s' did not match expected value %lld\n",
+ (long long)link_info.corder, link_name, (long long)i);
+ goto error;
+ }
+ }
+
+ /* Create several links in the destination group */
+ for (i = 0; i < MOVE_LINK_INTO_GRP_WITH_LINKS_TEST_NUM_LINKS; i++) {
+ snprintf(link_name, MOVE_LINK_INTO_GRP_WITH_LINKS_TEST_BUF_SIZE, "link%d", (int)i);
+
+ if (H5Lcreate_hard(dst_grp_id, ".", dst_grp_id, link_name, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create link '%s' in destination group\n", link_name);
+ goto error;
+ }
+ }
+
+ /* Move all the links from the source group into the destination group */
+ for (i = 0; i < MOVE_LINK_INTO_GRP_WITH_LINKS_TEST_NUM_LINKS; i++) {
+ snprintf(link_name, MOVE_LINK_INTO_GRP_WITH_LINKS_TEST_BUF_SIZE, "link_to_move%d", (int)i);
+
+ if (H5Lmove(src_grp_id, link_name, dst_grp_id, link_name, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to move link '%s' from source group to destination group\n", link_name);
+ goto error;
+ }
+
+ /* Check that the creation order value for each moved link has been adjusted */
+ memset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info2(dst_grp_id, link_name, &link_info, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve info for link '%s'\n", link_name);
+ goto error;
+ }
+
+ if (!link_info.corder_valid) {
+ H5_FAILED();
+ HDprintf(" creation order value for moved link '%s' was marked as not valid!\n", link_name);
+ goto error;
+ }
+
+ if (link_info.corder != (int64_t)(i + MOVE_LINK_INTO_GRP_WITH_LINKS_TEST_NUM_LINKS)) {
+ H5_FAILED();
+ HDprintf(" creation order value for moved link '%s' was not adjusted after move! It should "
+ "have been %lld but was %lld\n",
+ link_name, (long long)(i + MOVE_LINK_INTO_GRP_WITH_LINKS_TEST_NUM_LINKS),
+ (long long)link_info.corder);
+ goto error;
+ }
+ }
+
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(dst_grp_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(src_grp_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(gcpl_id);
+ H5Gclose(dst_grp_id);
+ H5Gclose(src_grp_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check the behavior of moving a link across files.
+ * This should fail for hard links but succeed for soft and
+ * external links (and user-defined links of those types).
+ *
+ * TODO: Ideally, tests should be written to verify that the
+ * moved links retain their original properties.
+ */
+static int
+test_move_link_across_files(void)
+{
+ TESTING("link moving across files");
+
+ /* TODO */
+
+ SKIPPED();
+
+ return 0;
+}
+
+/*
+ * A test to check that a group's always-increasing
+ * maximum link creation order value gets reset once
+ * all the links have been moved out of the group.
+ */
+static int
+test_move_link_reset_grp_max_crt_order(void)
+{
+#ifndef NO_MAX_LINK_CRT_ORDER_RESET
+ H5G_info_t grp_info;
+ size_t i;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t src_grp_id = H5I_INVALID_HID, dst_grp_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+ char link_name[MOVE_LINK_RESET_MAX_CRT_ORDER_TEST_BUF_SIZE];
+#endif
+
+ TESTING("H5Lmove of all links out of group resets group's maximum link creation order value");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_MORE) || !(vol_cap_flags_g & H5VL_CAP_FLAG_HARD_LINKS) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_MORE) || !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, more or hard link, or creation order aren't "
+ "supported with this connector\n");
+ return 0;
+ }
+
+#ifndef NO_MAX_LINK_CRT_ORDER_RESET
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create GCPL for link creation order tracking\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set link creation order tracking\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, MOVE_LINK_RESET_MAX_CRT_ORDER_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ MOVE_LINK_RESET_MAX_CRT_ORDER_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((src_grp_id = H5Gcreate2(group_id, MOVE_LINK_RESET_MAX_CRT_ORDER_TEST_SRC_GRP_NAME, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", MOVE_LINK_RESET_MAX_CRT_ORDER_TEST_SRC_GRP_NAME);
+ goto error;
+ }
+
+ if ((dst_grp_id = H5Gcreate2(group_id, MOVE_LINK_RESET_MAX_CRT_ORDER_TEST_DST_GRP_NAME, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", MOVE_LINK_RESET_MAX_CRT_ORDER_TEST_DST_GRP_NAME);
+ goto error;
+ }
+
+ /* Create several links inside the source group */
+ for (i = 0; i < MOVE_LINK_RESET_MAX_CRT_ORDER_TEST_NUM_LINKS; i++) {
+ snprintf(link_name, MOVE_LINK_RESET_MAX_CRT_ORDER_TEST_BUF_SIZE, "link%d", (int)i);
+
+ if (H5Lcreate_hard(src_grp_id, ".", src_grp_id, link_name, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s' in source group\n", link_name);
+ goto error;
+ }
+ }
+
+ /*
+ * Move links out of the source group and into the destination group, checking the
+ * source group's maximum creation order value each time.
+ */
+ for (i = 0; i < MOVE_LINK_RESET_MAX_CRT_ORDER_TEST_NUM_LINKS; i++) {
+ memset(&grp_info, 0, sizeof(grp_info));
+
+ if (H5Gget_info(src_grp_id, &grp_info) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve source group's info\n");
+ goto error;
+ }
+
+ if (grp_info.max_corder != MOVE_LINK_RESET_MAX_CRT_ORDER_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" source group's maximum creation order value got adjusted to %lld during link "
+ "moving; value should have remained at %lld\n",
+ (long long)grp_info.max_corder, MOVE_LINK_RESET_MAX_CRT_ORDER_TEST_NUM_LINKS);
+ goto error;
+ }
+
+ snprintf(link_name, MOVE_LINK_RESET_MAX_CRT_ORDER_TEST_BUF_SIZE, "link%d", (int)i);
+
+ if (H5Lmove(src_grp_id, link_name, dst_grp_id, link_name, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to move link '%s' to destination group\n", link_name);
+ goto error;
+ }
+ }
+
+ /*
+ * Ensure the source group's maximum creation order value has now
+ * reset to 0 after all the links have been moved out of it.
+ */
+ memset(&grp_info, 0, sizeof(grp_info));
+
+ if (H5Gget_info(src_grp_id, &grp_info) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve source group's info\n");
+ goto error;
+ }
+
+ if (grp_info.max_corder != 0) {
+ H5_FAILED();
+ HDprintf(" source group's maximum creation order value didn't reset to 0 after moving all links "
+ "out of it; value is still %lld\n",
+ (long long)grp_info.max_corder);
+ goto error;
+ }
+
+ /* For good measure, check that destination group's max. creation order value is as expected */
+ memset(&grp_info, 0, sizeof(grp_info));
+
+ if (H5Gget_info(dst_grp_id, &grp_info) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve destination group's info\n");
+ goto error;
+ }
+
+ if (grp_info.max_corder != MOVE_LINK_RESET_MAX_CRT_ORDER_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" destination group's maximum creation order value of %lld didn't match expected value "
+ "of %lld after moving all links into it\n",
+ (long long)grp_info.max_corder, MOVE_LINK_RESET_MAX_CRT_ORDER_TEST_NUM_LINKS);
+ goto error;
+ }
+
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(dst_grp_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(src_grp_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(gcpl_id);
+ H5Gclose(dst_grp_id);
+ H5Gclose(src_grp_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+#else
+ SKIPPED();
+ return 0;
+#endif
+}
+
+/*
+ * A test to check that H5Lmove fails when it is given
+ * invalid parameters.
+ */
+static int
+test_move_link_invalid_params(void)
+{
+ htri_t link_exists;
+ herr_t err_ret = -1;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t src_grp_id = H5I_INVALID_HID, dst_grp_id = H5I_INVALID_HID;
+ char ext_link_filename[H5_API_TEST_FILENAME_MAX_LENGTH];
+ hid_t ext_file_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("H5Lmove with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_MORE) || !(vol_cap_flags_g & H5VL_CAP_FLAG_HARD_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, more or hard link aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, MOVE_LINK_INVALID_PARAMS_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", MOVE_LINK_INVALID_PARAMS_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((src_grp_id = H5Gcreate2(group_id, MOVE_LINK_INVALID_PARAMS_TEST_SRC_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", MOVE_LINK_INVALID_PARAMS_TEST_SRC_GROUP_NAME);
+ goto error;
+ }
+
+ if ((dst_grp_id = H5Gcreate2(group_id, MOVE_LINK_INVALID_PARAMS_TEST_DST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", MOVE_LINK_INVALID_PARAMS_TEST_DST_GROUP_NAME);
+ goto error;
+ }
+
+ if (H5Lcreate_hard(group_id, ".", src_grp_id, MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME);
+ goto error;
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(src_grp_id, MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link did not exist\n");
+ goto error;
+ }
+
+ /* Verify the link doesn't currently exist in the target group */
+ if ((link_exists = H5Lexists(dst_grp_id, MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME);
+ goto error;
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link existed in target group before move!\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Lmove_invalid_src_loc_id)
+ {
+ TESTING_2("H5Lmove with an invalid source location ID");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lmove(H5I_INVALID_HID, MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, dst_grp_id,
+ MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lmove succeeded with an invalid source location ID!\n");
+ PART_ERROR(H5Lmove_invalid_src_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lmove_invalid_src_loc_id);
+
+ PART_BEGIN(H5Lmove_invalid_src_name)
+ {
+ TESTING_2("H5Lmove with an invalid source name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lmove(src_grp_id, NULL, dst_grp_id, MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME,
+ H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lmove succeeded with a NULL source name!\n");
+ PART_ERROR(H5Lmove_invalid_src_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lmove(src_grp_id, "", dst_grp_id, MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME,
+ H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lmove succeeded with an invalid source name of ''!\n");
+ PART_ERROR(H5Lmove_invalid_src_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lmove_invalid_src_name);
+
+ PART_BEGIN(H5Lmove_invalid_dst_loc_id)
+ {
+ TESTING_2("H5Lmove with an invalid destination location ID");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lmove(src_grp_id, MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, H5I_INVALID_HID,
+ MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lmove succeeded with an invalid destination location ID!\n");
+ PART_ERROR(H5Lmove_invalid_dst_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lmove_invalid_dst_loc_id);
+
+ PART_BEGIN(H5Lmove_invalid_dst_name)
+ {
+ TESTING_2("H5Lmove with an invalid destination name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lmove(src_grp_id, MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, dst_grp_id, NULL,
+ H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lmove succeeded with a NULL destination name!\n");
+ PART_ERROR(H5Lmove_invalid_dst_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lmove(src_grp_id, MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, dst_grp_id, "",
+ H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lmove succeeded with an invalid destination name of ''!\n");
+ PART_ERROR(H5Lmove_invalid_dst_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lmove_invalid_dst_name);
+
+ PART_BEGIN(H5Lmove_invalid_lcpl)
+ {
+ TESTING_2("H5Lmove with an invalid LCPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lmove(src_grp_id, MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, dst_grp_id,
+ MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, H5I_INVALID_HID, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lmove succeeded with an invalid LCPL!\n");
+ PART_ERROR(H5Lmove_invalid_lcpl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lmove_invalid_lcpl);
+
+ PART_BEGIN(H5Lmove_invalid_lapl)
+ {
+ TESTING_2("H5Lmove with an invalid LAPL");
+#ifndef NO_INVALID_PROPERTY_LIST_TESTS
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lmove(src_grp_id, MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, dst_grp_id,
+ MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, H5P_DEFAULT, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lmove succeeded with an invalid LAPL!\n");
+ PART_ERROR(H5Lmove_invalid_lapl);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lmove_invalid_lapl);
+#endif
+ }
+ PART_END(H5Lmove_invalid_lapl);
+
+ PART_BEGIN(H5Lmove_existence)
+ {
+ TESTING_2("valid link existence in original group after previous invalid H5Lmove calls");
+
+ /* Verify the link hasn't been moved */
+ if ((link_exists =
+ H5Lexists(src_grp_id, MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lmove_existence);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link didn't exist in source group after invalid move!\n");
+ PART_ERROR(H5Lmove_existence);
+ }
+
+ if ((link_exists =
+ H5Lexists(dst_grp_id, MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lmove_existence);
+ }
+
+ if (link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link existed in target group after invalid move!\n");
+ PART_ERROR(H5Lmove_existence);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lmove_existence);
+
+ PART_BEGIN(H5Lmove_same_location)
+ {
+ TESTING_2("H5Lmove with an invalid same location");
+
+ /* Move a group within the file. Both of source and destination use
+ * H5L_SAME_LOC. Should fail. */
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lmove(H5L_SAME_LOC, MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, H5L_SAME_LOC,
+ MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lmove succeeded with an invalid same location!\n");
+ PART_ERROR(H5Lmove_same_location);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lmove_same_location);
+
+ PART_BEGIN(H5Lmove_across_files)
+ {
+ TESTING_2("H5Lmove into another file");
+
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Lmove_across_files);
+ }
+
+ /* Move a group across files. */
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lmove(src_grp_id, MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, ext_file_id,
+ MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lmove succeeded in moving a hard link across files!\n");
+ PART_ERROR(H5Lmove_across_files);
+ }
+
+ /* Ensure that original link still exists */
+ if ((link_exists =
+ H5Lexists(src_grp_id, MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if original link '%s' exists after invalid link move\n",
+ MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lmove_across_files);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" original link '%s' didn't exist after failed move!\n",
+ MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lmove_across_files);
+ }
+
+ if (H5Fclose(ext_file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close a file!\n");
+ PART_ERROR(H5Lmove_across_files);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lmove_across_files);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Gclose(dst_grp_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(src_grp_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(dst_grp_id);
+ H5Gclose(src_grp_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(ext_file_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a soft or external link's value can
+ * be retrieved by using H5Lget_val and H5Lget_val_by_idx.
+ */
+static int
+test_get_link_val(void)
+{
+ H5L_info2_t link_info;
+#ifndef NO_EXTERNAL_LINKS
+ const char *ext_link_filepath;
+ const char *ext_link_val;
+ unsigned ext_link_flags;
+#endif
+ htri_t link_exists;
+ size_t link_val_size;
+ char link_val_buf[GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE];
+ hid_t file_id = H5I_INVALID_HID, ext_file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t subgroup_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+#ifndef NO_EXTERNAL_LINKS
+ char ext_link_filename[H5_API_TEST_FILENAME_MAX_LENGTH];
+#endif
+
+ TESTING_MULTIPART("link value retrieval");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_EXTERNAL_LINKS) || !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, basic, more, soft, external link, or creation "
+ "order aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create GCPL for link creation order tracking\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set link creation order tracking\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, GET_LINK_VAL_TEST_SUBGROUP_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n", GET_LINK_VAL_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Lget_val_soft)
+ {
+ const char *link_target = "/" LINK_TEST_GROUP_NAME "/" GET_LINK_VAL_TEST_SUBGROUP_NAME;
+
+ TESTING_2("H5Lget_val on soft link");
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_VAL_TEST_SUBGROUP1_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", GET_LINK_VAL_TEST_SUBGROUP1_NAME);
+ PART_ERROR(H5Lget_val_soft);
+ }
+
+ if (H5Lcreate_soft(link_target, subgroup_id, GET_LINK_VAL_TEST_SOFT_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_val_soft);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_VAL_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_val_soft);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_val_soft);
+ }
+
+ if (H5Lget_info2(subgroup_id, GET_LINK_VAL_TEST_SOFT_LINK_NAME, &link_info, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve soft link's info\n");
+ PART_ERROR(H5Lget_val_soft);
+ }
+
+ link_val_size = strlen(link_target) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link value size %zu did not match expected size of %zu\n", link_info.u.val_size,
+ link_val_size);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_increasing);
+ }
+
+ if (H5Lget_val(subgroup_id, GET_LINK_VAL_TEST_SOFT_LINK_NAME, link_val_buf,
+ GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get soft link value\n");
+ PART_ERROR(H5Lget_val_soft);
+ }
+
+ if (HDstrncmp(link_val_buf, link_target, link_val_size)) {
+ H5_FAILED();
+ HDprintf(" soft link value did not match\n");
+ PART_ERROR(H5Lget_val_soft);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", GET_LINK_VAL_TEST_SUBGROUP1_NAME);
+ PART_ERROR(H5Lget_val_soft);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_val_soft);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_val_external)
+ {
+#ifndef NO_EXTERNAL_LINKS
+ const char *ext_obj_name = "/";
+#endif
+
+ TESTING_2("H5Lget_val on external link");
+#ifndef NO_EXTERNAL_LINKS
+ HDmemset(&link_info, 0, sizeof(link_info));
+
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Lget_val_external);
+ }
+
+ if (H5Fclose(ext_file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close file '%s'\n", ext_link_filename);
+ PART_ERROR(H5Lget_val_external);
+ }
+
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_VAL_TEST_SUBGROUP2_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", GET_LINK_VAL_TEST_SUBGROUP2_NAME);
+ PART_ERROR(H5Lget_val_external);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, ext_obj_name, subgroup_id,
+ GET_LINK_VAL_TEST_EXT_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", GET_LINK_VAL_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_val_external);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_VAL_TEST_EXT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ GET_LINK_VAL_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_val_external);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist\n", GET_LINK_VAL_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_val_external);
+ }
+
+ if (H5Lget_info2(subgroup_id, GET_LINK_VAL_TEST_EXT_LINK_NAME, &link_info, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve external link's info\n");
+ PART_ERROR(H5Lget_val_external);
+ }
+
+ link_val_size = 1 + strlen(ext_link_filename) + 1 + strlen(ext_obj_name) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link value size %lld did not match expected size of %lld\n",
+ link_info.u.val_size, link_val_size);
+ PART_ERROR(H5Lget_val_external);
+ }
+
+ if (H5Lget_val(subgroup_id, GET_LINK_VAL_TEST_EXT_LINK_NAME, link_val_buf,
+ GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get external link value\n");
+ PART_ERROR(H5Lget_val_external);
+ }
+
+ if (H5Lunpack_elink_val(link_val_buf, link_info.u.val_size, &ext_link_flags, &ext_link_filepath,
+ &ext_link_val) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't unpack external link value buffer\n");
+ PART_ERROR(H5Lget_val_external);
+ }
+
+ if (HDstrncmp(ext_link_filepath, ext_link_filename, strlen(ext_link_filename) + 1)) {
+ H5_FAILED();
+ HDprintf(" external link target file '%s' did not match expected '%s'\n",
+ ext_link_filepath, ext_link_filename);
+ PART_ERROR(H5Lget_val_external);
+ }
+
+ if (HDstrncmp(ext_link_val, ext_obj_name, strlen(ext_obj_name) + 1)) {
+ H5_FAILED();
+ HDprintf(" external link value '%s' did not match expected '%s'\n", ext_link_val,
+ ext_obj_name);
+ PART_ERROR(H5Lget_val_external);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", GET_LINK_VAL_TEST_SUBGROUP2_NAME);
+ PART_ERROR(H5Lget_val_external);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lget_val_external);
+#endif
+ }
+ PART_END(H5Lget_val_external);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ H5Fclose(ext_file_id);
+ ext_file_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_val_ud)
+ {
+ TESTING_2("H5Lget_val on user-defined link");
+
+ /* TODO */
+
+ SKIPPED();
+ PART_EMPTY(H5Lget_val_ud);
+ }
+ PART_END(H5Lget_val_ud);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_val_by_idx_soft_crt_order_increasing)
+ {
+ const char *link_target_a = "/" LINK_TEST_GROUP_NAME "/" GET_LINK_VAL_TEST_SUBGROUP_NAME
+ "/" GET_LINK_VAL_TEST_SUBGROUP4_NAME "A";
+ const char *link_target_b = "/" LINK_TEST_GROUP_NAME "/" GET_LINK_VAL_TEST_SUBGROUP_NAME
+ "/" GET_LINK_VAL_TEST_SUBGROUP4_NAME "B";
+ const char *link_target_c = "/" LINK_TEST_GROUP_NAME "/" GET_LINK_VAL_TEST_SUBGROUP_NAME
+ "/" GET_LINK_VAL_TEST_SUBGROUP4_NAME "C";
+
+ TESTING_2("H5Lget_val_by_idx on soft link by creation order in increasing order");
+
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_VAL_TEST_SUBGROUP4_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", GET_LINK_VAL_TEST_SUBGROUP4_NAME);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_increasing);
+ }
+
+ /* Create several soft links */
+ if (H5Lcreate_soft(link_target_a, subgroup_id, GET_LINK_VAL_TEST_SOFT_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_increasing);
+ }
+
+ if (H5Lcreate_soft(link_target_b, subgroup_id, GET_LINK_VAL_TEST_SOFT_LINK_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_increasing);
+ }
+
+ if (H5Lcreate_soft(link_target_c, subgroup_id, GET_LINK_VAL_TEST_SOFT_LINK_NAME3, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_increasing);
+ }
+
+ /* Verify the links exist */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_VAL_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_VAL_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_VAL_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_increasing);
+ }
+
+ /* Retrieve the info and value of each link in turn */
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve soft link's info at index %d\n", 0);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_increasing);
+ }
+
+ link_val_size = strlen(link_target_a) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link value size %zu for link at index %d did not match expected size of %zu\n",
+ link_info.u.val_size, 0, link_val_size);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_increasing);
+ }
+
+ HDmemset(link_val_buf, 0, GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE);
+ if (H5Lget_val_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0, link_val_buf,
+ GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get soft link value at index %d\n", 0);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_increasing);
+ }
+
+ if (HDstrncmp(link_val_buf, link_target_a, strlen(link_target_a) + 1)) {
+ H5_FAILED();
+ HDprintf(" link value '%s' for link at index %d did not match expected value '%s'\n",
+ link_val_buf, 0, link_target_a);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_increasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 1, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve soft link's info at index %d\n", 1);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_increasing);
+ }
+
+ link_val_size = strlen(link_target_b) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link value size %zu for link at index %d did not match expected size of %zu\n",
+ link_info.u.val_size, 1, link_val_size);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_increasing);
+ }
+
+ HDmemset(link_val_buf, 0, GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE);
+ if (H5Lget_val_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 1, link_val_buf,
+ GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get soft link value at index %d\n", 1);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_increasing);
+ }
+
+ if (HDstrncmp(link_val_buf, link_target_b, strlen(link_target_b) + 1)) {
+ H5_FAILED();
+ HDprintf(" link value '%s' for link at index %d did not match expected value '%s'\n",
+ link_val_buf, 1, link_target_b);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_increasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 2, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve soft link's info at index %d\n", 2);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_increasing);
+ }
+
+ link_val_size = strlen(link_target_c) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link value size %zu for link at index %d did not match expected size of %zu\n",
+ link_info.u.val_size, 2, link_val_size);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_increasing);
+ }
+
+ HDmemset(link_val_buf, 0, GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE);
+ if (H5Lget_val_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 2, link_val_buf,
+ GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get soft link value at index %d\n", 2);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_increasing);
+ }
+
+ if (HDstrncmp(link_val_buf, link_target_c, strlen(link_target_c) + 1)) {
+ H5_FAILED();
+ HDprintf(" link value '%s' for link at index %d did not match expected value '%s'\n",
+ link_val_buf, 2, link_target_c);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_increasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", GET_LINK_VAL_TEST_SUBGROUP4_NAME);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_val_by_idx_soft_crt_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_val_by_idx_soft_crt_order_decreasing)
+ {
+ const char *link_target_a = "/" LINK_TEST_GROUP_NAME "/" GET_LINK_VAL_TEST_SUBGROUP_NAME
+ "/" GET_LINK_VAL_TEST_SUBGROUP5_NAME "A";
+ const char *link_target_b = "/" LINK_TEST_GROUP_NAME "/" GET_LINK_VAL_TEST_SUBGROUP_NAME
+ "/" GET_LINK_VAL_TEST_SUBGROUP5_NAME "B";
+ const char *link_target_c = "/" LINK_TEST_GROUP_NAME "/" GET_LINK_VAL_TEST_SUBGROUP_NAME
+ "/" GET_LINK_VAL_TEST_SUBGROUP5_NAME "C";
+
+ TESTING_2("H5Lget_val_by_idx on soft link by creation order in decreasing order");
+
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_VAL_TEST_SUBGROUP5_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", GET_LINK_VAL_TEST_SUBGROUP5_NAME);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_decreasing);
+ }
+
+ /* Create several soft links */
+ if (H5Lcreate_soft(link_target_a, subgroup_id, GET_LINK_VAL_TEST_SOFT_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (H5Lcreate_soft(link_target_b, subgroup_id, GET_LINK_VAL_TEST_SOFT_LINK_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (H5Lcreate_soft(link_target_c, subgroup_id, GET_LINK_VAL_TEST_SOFT_LINK_NAME3, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_decreasing);
+ }
+
+ /* Verify the links exist */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_VAL_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_VAL_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_VAL_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_decreasing);
+ }
+
+ /* Retrieve the info and value of each link in turn */
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 2, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve soft link's info at index %d\n", 2);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_decreasing);
+ }
+
+ link_val_size = strlen(link_target_a) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link value size %zu for link at index %d did not match expected size of %zu\n",
+ link_info.u.val_size, 2, link_val_size);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_decreasing);
+ }
+
+ HDmemset(link_val_buf, 0, GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE);
+ if (H5Lget_val_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 2, link_val_buf,
+ GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get soft link value at index %d\n", 2);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (HDstrncmp(link_val_buf, link_target_a, strlen(link_target_a) + 1)) {
+ H5_FAILED();
+ HDprintf(" link value '%s' for link at index %d did not match expected value '%s'\n",
+ link_val_buf, 2, link_target_a);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_decreasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 1, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve soft link's info at index %d\n", 1);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_decreasing);
+ }
+
+ link_val_size = strlen(link_target_b) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link value size %zu for link at index %d did not match expected size of %zu\n",
+ link_info.u.val_size, 1, link_val_size);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_decreasing);
+ }
+
+ HDmemset(link_val_buf, 0, GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE);
+ if (H5Lget_val_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 1, link_val_buf,
+ GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get soft link value at index %d\n", 1);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (HDstrncmp(link_val_buf, link_target_b, strlen(link_target_b) + 1)) {
+ H5_FAILED();
+ HDprintf(" link value '%s' for link at index %d did not match expected value '%s'\n",
+ link_val_buf, 1, link_target_b);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_decreasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 0, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve soft link's info at index %d\n", 0);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_decreasing);
+ }
+
+ link_val_size = strlen(link_target_c) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link value size %zu for link at index %d did not match expected size of %zu\n",
+ link_info.u.val_size, 0, link_val_size);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_decreasing);
+ }
+
+ HDmemset(link_val_buf, 0, GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE);
+ if (H5Lget_val_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 0, link_val_buf,
+ GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get soft link value at index %d\n", 0);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (HDstrncmp(link_val_buf, link_target_c, strlen(link_target_c) + 1)) {
+ H5_FAILED();
+ HDprintf(" link value '%s' for link at index %d did not match expected value '%s'\n",
+ link_val_buf, 0, link_target_c);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", GET_LINK_VAL_TEST_SUBGROUP5_NAME);
+ PART_ERROR(H5Lget_val_by_idx_soft_crt_order_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_val_by_idx_soft_crt_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_val_by_idx_soft_name_order_increasing)
+ {
+ const char *link_target_a = "/" LINK_TEST_GROUP_NAME "/" GET_LINK_VAL_TEST_SUBGROUP_NAME
+ "/" GET_LINK_VAL_TEST_SUBGROUP6_NAME "A";
+ const char *link_target_b = "/" LINK_TEST_GROUP_NAME "/" GET_LINK_VAL_TEST_SUBGROUP_NAME
+ "/" GET_LINK_VAL_TEST_SUBGROUP6_NAME "B";
+ const char *link_target_c = "/" LINK_TEST_GROUP_NAME "/" GET_LINK_VAL_TEST_SUBGROUP_NAME
+ "/" GET_LINK_VAL_TEST_SUBGROUP6_NAME "C";
+
+ TESTING_2("H5Lget_val_by_idx on soft link by alphabetical order in increasing order");
+
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_VAL_TEST_SUBGROUP6_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", GET_LINK_VAL_TEST_SUBGROUP6_NAME);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_increasing);
+ }
+
+ /* Create several soft links */
+ if (H5Lcreate_soft(link_target_a, subgroup_id, GET_LINK_VAL_TEST_SOFT_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_increasing);
+ }
+
+ if (H5Lcreate_soft(link_target_b, subgroup_id, GET_LINK_VAL_TEST_SOFT_LINK_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_increasing);
+ }
+
+ if (H5Lcreate_soft(link_target_c, subgroup_id, GET_LINK_VAL_TEST_SOFT_LINK_NAME3, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_increasing);
+ }
+
+ /* Verify the links exist */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_VAL_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_VAL_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_VAL_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_increasing);
+ }
+
+ /* Retrieve the info and value of each link in turn */
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve soft link's info at index %d\n", 0);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_increasing);
+ }
+
+ link_val_size = strlen(link_target_a) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link value size %zu for link at index %d did not match expected size of %zu\n",
+ link_info.u.val_size, 0, link_val_size);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_increasing);
+ }
+
+ HDmemset(link_val_buf, 0, GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE);
+ if (H5Lget_val_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0, link_val_buf,
+ GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get soft link value at index %d\n", 0);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_increasing);
+ }
+
+ if (HDstrncmp(link_val_buf, link_target_a, strlen(link_target_a) + 1)) {
+ H5_FAILED();
+ HDprintf(" link value '%s' for link at index %d did not match expected value '%s'\n",
+ link_val_buf, 0, link_target_a);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_increasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 1, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve soft link's info at index %d\n", 1);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_increasing);
+ }
+
+ link_val_size = strlen(link_target_b) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link value size %zu for link at index %d did not match expected size of %zu\n",
+ link_info.u.val_size, 1, link_val_size);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_increasing);
+ }
+
+ HDmemset(link_val_buf, 0, GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE);
+ if (H5Lget_val_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 1, link_val_buf,
+ GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get soft link value at index %d\n", 1);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_increasing);
+ }
+
+ if (HDstrncmp(link_val_buf, link_target_b, strlen(link_target_b) + 1)) {
+ H5_FAILED();
+ HDprintf(" link value '%s' for link at index %d did not match expected value '%s'\n",
+ link_val_buf, 1, link_target_b);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_increasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 2, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve soft link's info at index %d\n", 2);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_increasing);
+ }
+
+ link_val_size = strlen(link_target_c) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link value size %zu for link at index %d did not match expected size of %zu\n",
+ link_info.u.val_size, 2, link_val_size);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_increasing);
+ }
+
+ HDmemset(link_val_buf, 0, GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE);
+ if (H5Lget_val_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 2, link_val_buf,
+ GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get soft link value at index %d\n", 2);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_increasing);
+ }
+
+ if (HDstrncmp(link_val_buf, link_target_c, strlen(link_target_c) + 1)) {
+ H5_FAILED();
+ HDprintf(" link value '%s' for link at index %d did not match expected value '%s'\n",
+ link_val_buf, 2, link_target_c);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_increasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", GET_LINK_VAL_TEST_SUBGROUP6_NAME);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_val_by_idx_soft_name_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_val_by_idx_soft_name_order_decreasing)
+ {
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ const char *link_target_a = "/" LINK_TEST_GROUP_NAME "/" GET_LINK_VAL_TEST_SUBGROUP_NAME
+ "/" GET_LINK_VAL_TEST_SUBGROUP7_NAME "A";
+ const char *link_target_b = "/" LINK_TEST_GROUP_NAME "/" GET_LINK_VAL_TEST_SUBGROUP_NAME
+ "/" GET_LINK_VAL_TEST_SUBGROUP7_NAME "B";
+ const char *link_target_c = "/" LINK_TEST_GROUP_NAME "/" GET_LINK_VAL_TEST_SUBGROUP_NAME
+ "/" GET_LINK_VAL_TEST_SUBGROUP7_NAME "C";
+#endif
+
+ TESTING_2("H5Lget_val_by_idx on soft link by alphabetical order in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_VAL_TEST_SUBGROUP7_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", GET_LINK_VAL_TEST_SUBGROUP7_NAME);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_decreasing);
+ }
+
+ /* Create several soft links */
+ if (H5Lcreate_soft(link_target_a, subgroup_id, GET_LINK_VAL_TEST_SOFT_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_decreasing);
+ }
+
+ if (H5Lcreate_soft(link_target_b, subgroup_id, GET_LINK_VAL_TEST_SOFT_LINK_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_decreasing);
+ }
+
+ if (H5Lcreate_soft(link_target_c, subgroup_id, GET_LINK_VAL_TEST_SOFT_LINK_NAME3, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_decreasing);
+ }
+
+ /* Verify the links exist */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_VAL_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_VAL_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_VAL_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", GET_LINK_VAL_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_decreasing);
+ }
+
+ /* Retrieve the info and value of each link in turn */
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 2, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve soft link's info at index %lld\n", 2);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_decreasing);
+ }
+
+ link_val_size = strlen(link_target_a) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(
+ " link value size %lld for link at index %lld did not match expected size of %lld\n",
+ link_info.u.val_size, 2, link_val_size);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_decreasing);
+ }
+
+ HDmemset(link_val_buf, 0, GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE);
+ if (H5Lget_val_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 2, link_val_buf,
+ GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get soft link value at index %lld\n", 2);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_decreasing);
+ }
+
+ if (HDstrncmp(link_val_buf, link_target_a, strlen(link_target_a) + 1)) {
+ H5_FAILED();
+ HDprintf(" link value '%s' for link at index %lld did not match expected value '%s'\n",
+ link_val_buf, 2, link_target_a);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_decreasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 1, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve soft link's info at index %lld\n", 1);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_decreasing);
+ }
+
+ link_val_size = strlen(link_target_b) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(
+ " link value size %lld for link at index %lld did not match expected size of %lld\n",
+ link_info.u.val_size, 1, link_val_size);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_decreasing);
+ }
+
+ HDmemset(link_val_buf, 0, GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE);
+ if (H5Lget_val_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 1, link_val_buf,
+ GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get soft link value at index %lld\n", 1);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_decreasing);
+ }
+
+ if (HDstrncmp(link_val_buf, link_target_b, strlen(link_target_b) + 1)) {
+ H5_FAILED();
+ HDprintf(" link value '%s' for link at index %lld did not match expected value '%s'\n",
+ link_val_buf, 1, link_target_b);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_decreasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 0, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve soft link's info at index %lld\n", 0);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_decreasing);
+ }
+
+ link_val_size = strlen(link_target_c) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(
+ " link value size %lld for link at index %lld did not match expected size of %lld\n",
+ link_info.u.val_size, 0, link_val_size);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_decreasing);
+ }
+
+ HDmemset(link_val_buf, 0, GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE);
+ if (H5Lget_val_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 0, link_val_buf,
+ GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get soft link value at index %lld\n", 0);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_decreasing);
+ }
+
+ if (HDstrncmp(link_val_buf, link_target_c, strlen(link_target_c) + 1)) {
+ H5_FAILED();
+ HDprintf(" link value '%s' for link at index %lld did not match expected value '%s'\n",
+ link_val_buf, 0, link_target_c);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_decreasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", GET_LINK_VAL_TEST_SUBGROUP7_NAME);
+ PART_ERROR(H5Lget_val_by_idx_soft_name_order_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lget_val_by_idx_soft_name_order_decreasing);
+#endif
+ }
+ PART_END(H5Lget_val_by_idx_soft_name_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_val_by_idx_external_crt_order_increasing)
+ {
+#ifndef NO_EXTERNAL_LINKS
+ const char *ext_obj_name_a = "/A";
+ const char *ext_obj_name_b = "/B";
+ const char *ext_obj_name_c = "/C";
+#endif
+
+ TESTING_2("H5Lget_val_by_idx on external link by creation order in increasing order");
+#ifndef NO_EXTERNAL_LINKS
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ if (H5Fclose(ext_file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close file '%s'\n", ext_link_filename);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_VAL_TEST_SUBGROUP8_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", GET_LINK_VAL_TEST_SUBGROUP8_NAME);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ /* Create several external links */
+ if (H5Lcreate_external(ext_link_filename, ext_obj_name_a, subgroup_id,
+ GET_LINK_VAL_TEST_EXT_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", GET_LINK_VAL_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, ext_obj_name_b, subgroup_id,
+ GET_LINK_VAL_TEST_EXT_LINK_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", GET_LINK_VAL_TEST_EXT_LINK_NAME2);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, ext_obj_name_c, subgroup_id,
+ GET_LINK_VAL_TEST_EXT_LINK_NAME3, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", GET_LINK_VAL_TEST_EXT_LINK_NAME3);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ /* Verify the links exist */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_VAL_TEST_EXT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", GET_LINK_VAL_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", GET_LINK_VAL_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_VAL_TEST_EXT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", GET_LINK_VAL_TEST_EXT_LINK_NAME2);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", GET_LINK_VAL_TEST_EXT_LINK_NAME2);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_VAL_TEST_EXT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", GET_LINK_VAL_TEST_EXT_LINK_NAME3);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", GET_LINK_VAL_TEST_EXT_LINK_NAME3);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ /* Retrieve the info and value of each link in turn */
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve external link's info at index %lld\n", 0);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ link_val_size = 1 + strlen(ext_link_filename) + 1 + strlen(ext_obj_name_a) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(
+ " link value size %lld for link at index %lld did not match expected size of %lld\n",
+ link_info.u.val_size, 0, link_val_size);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ HDmemset(link_val_buf, 0, GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE);
+ if (H5Lget_val_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0, link_val_buf,
+ GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get external link value at index %lld\n", 0);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ if (H5Lunpack_elink_val(link_val_buf, link_info.u.val_size, &ext_link_flags, &ext_link_filepath,
+ &ext_link_val) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't unpack external link value buffer\n");
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ if (HDstrncmp(ext_link_filepath, ext_link_filename, strlen(ext_link_filename) + 1)) {
+ H5_FAILED();
+ HDprintf(" external link target file '%s' did not match expected '%s'\n",
+ ext_link_filepath, ext_link_filename);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ if (HDstrncmp(ext_link_val, ext_obj_name_a, strlen(ext_obj_name_a) + 1)) {
+ H5_FAILED();
+ HDprintf(" external link value '%s' did not match expected '%s'\n", ext_link_val,
+ ext_obj_name_a);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 1, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve external link's info at index %lld\n", 1);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ link_val_size = 1 + strlen(ext_link_filename) + 1 + strlen(ext_obj_name_b) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(
+ " link value size %lld for link at index %lld did not match expected size of %lld\n",
+ link_info.u.val_size, 1, link_val_size);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ HDmemset(link_val_buf, 0, GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE);
+ if (H5Lget_val_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 1, link_val_buf,
+ GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get external link value at index %lld\n", 1);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ if (H5Lunpack_elink_val(link_val_buf, link_info.u.val_size, &ext_link_flags, &ext_link_filepath,
+ &ext_link_val) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't unpack external link value buffer\n");
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ if (HDstrncmp(ext_link_filepath, ext_link_filename, strlen(ext_link_filename) + 1)) {
+ H5_FAILED();
+ HDprintf(" external link target file '%s' did not match expected '%s'\n",
+ ext_link_filepath, ext_link_filename);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ if (HDstrncmp(ext_link_val, ext_obj_name_b, strlen(ext_obj_name_b) + 1)) {
+ H5_FAILED();
+ HDprintf(" external link value '%s' did not match expected '%s'\n", ext_link_val,
+ ext_obj_name_b);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 2, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve external link's info at index %lld\n", 2);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ link_val_size = 1 + strlen(ext_link_filename) + 1 + strlen(ext_obj_name_c) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(
+ " link value size %lld for link at index %lld did not match expected size of %lld\n",
+ link_info.u.val_size, 2, link_val_size);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ HDmemset(link_val_buf, 0, GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE);
+ if (H5Lget_val_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 2, link_val_buf,
+ GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get external link value at index %lld\n", 2);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ if (H5Lunpack_elink_val(link_val_buf, link_info.u.val_size, &ext_link_flags, &ext_link_filepath,
+ &ext_link_val) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't unpack external link value buffer\n");
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ if (HDstrncmp(ext_link_filepath, ext_link_filename, strlen(ext_link_filename) + 1)) {
+ H5_FAILED();
+ HDprintf(" external link target file '%s' did not match expected '%s'\n",
+ ext_link_filepath, ext_link_filename);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ if (HDstrncmp(ext_link_val, ext_obj_name_c, strlen(ext_obj_name_c) + 1)) {
+ H5_FAILED();
+ HDprintf(" external link value '%s' did not match expected '%s'\n", ext_link_val,
+ ext_obj_name_c);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", GET_LINK_VAL_TEST_SUBGROUP8_NAME);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_increasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lget_val_by_idx_external_crt_order_increasing);
+#endif
+ }
+ PART_END(H5Lget_val_by_idx_external_crt_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ H5Fclose(ext_file_id);
+ ext_file_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_val_by_idx_external_crt_order_decreasing)
+ {
+#ifndef NO_EXTERNAL_LINKS
+ const char *ext_obj_name_a = "/A";
+ const char *ext_obj_name_b = "/B";
+ const char *ext_obj_name_c = "/C";
+#endif
+
+ TESTING_2("H5Lget_val_by_idx on external link by creation order in decreasing order");
+#ifndef NO_EXTERNAL_LINKS
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ if (H5Fclose(ext_file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close file '%s'\n", ext_link_filename);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_VAL_TEST_SUBGROUP9_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", GET_LINK_VAL_TEST_SUBGROUP9_NAME);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ /* Create several external links */
+ if (H5Lcreate_external(ext_link_filename, ext_obj_name_a, subgroup_id,
+ GET_LINK_VAL_TEST_EXT_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", GET_LINK_VAL_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, ext_obj_name_b, subgroup_id,
+ GET_LINK_VAL_TEST_EXT_LINK_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", GET_LINK_VAL_TEST_EXT_LINK_NAME2);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, ext_obj_name_c, subgroup_id,
+ GET_LINK_VAL_TEST_EXT_LINK_NAME3, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", GET_LINK_VAL_TEST_EXT_LINK_NAME3);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ /* Verify the links exist */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_VAL_TEST_EXT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", GET_LINK_VAL_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", GET_LINK_VAL_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_VAL_TEST_EXT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", GET_LINK_VAL_TEST_EXT_LINK_NAME2);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", GET_LINK_VAL_TEST_EXT_LINK_NAME2);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_VAL_TEST_EXT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", GET_LINK_VAL_TEST_EXT_LINK_NAME3);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", GET_LINK_VAL_TEST_EXT_LINK_NAME3);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ /* Retrieve the info and value of each link in turn */
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 2, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve external link's info at index %lld\n", 2);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ link_val_size = 1 + strlen(ext_link_filename) + 1 + strlen(ext_obj_name_a) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(
+ " link value size %lld for link at index %lld did not match expected size of %lld\n",
+ link_info.u.val_size, 2, link_val_size);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ HDmemset(link_val_buf, 0, GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE);
+ if (H5Lget_val_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 2, link_val_buf,
+ GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get external link value at index %lld\n", 2);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ if (H5Lunpack_elink_val(link_val_buf, link_info.u.val_size, &ext_link_flags, &ext_link_filepath,
+ &ext_link_val) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't unpack external link value buffer\n");
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ if (HDstrncmp(ext_link_filepath, ext_link_filename, strlen(ext_link_filename) + 1)) {
+ H5_FAILED();
+ HDprintf(" external link target file '%s' did not match expected '%s'\n",
+ ext_link_filepath, ext_link_filename);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ if (HDstrncmp(ext_link_val, ext_obj_name_a, strlen(ext_obj_name_a) + 1)) {
+ H5_FAILED();
+ HDprintf(" external link value '%s' did not match expected '%s'\n", ext_link_val,
+ ext_obj_name_a);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 1, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve external link's info at index %lld\n", 1);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ link_val_size = 1 + strlen(ext_link_filename) + 1 + strlen(ext_obj_name_b) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(
+ " link value size %lld for link at index %lld did not match expected size of %lld\n",
+ link_info.u.val_size, 1, link_val_size);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ HDmemset(link_val_buf, 0, GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE);
+ if (H5Lget_val_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 1, link_val_buf,
+ GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get external link value at index %lld\n", 1);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ if (H5Lunpack_elink_val(link_val_buf, link_info.u.val_size, &ext_link_flags, &ext_link_filepath,
+ &ext_link_val) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't unpack external link value buffer\n");
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ if (HDstrncmp(ext_link_filepath, ext_link_filename, strlen(ext_link_filename) + 1)) {
+ H5_FAILED();
+ HDprintf(" external link target file '%s' did not match expected '%s'\n",
+ ext_link_filepath, ext_link_filename);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ if (HDstrncmp(ext_link_val, ext_obj_name_b, strlen(ext_obj_name_b) + 1)) {
+ H5_FAILED();
+ HDprintf(" external link value '%s' did not match expected '%s'\n", ext_link_val,
+ ext_obj_name_b);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 0, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve external link's info at index %lld\n", 0);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ link_val_size = 1 + strlen(ext_link_filename) + 1 + strlen(ext_obj_name_c) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(
+ " link value size %lld for link at index %lld did not match expected size of %lld\n",
+ link_info.u.val_size, 0, link_val_size);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ HDmemset(link_val_buf, 0, GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE);
+ if (H5Lget_val_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 0, link_val_buf,
+ GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get external link value at index %lld\n", 0);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ if (H5Lunpack_elink_val(link_val_buf, link_info.u.val_size, &ext_link_flags, &ext_link_filepath,
+ &ext_link_val) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't unpack external link value buffer\n");
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ if (HDstrncmp(ext_link_filepath, ext_link_filename, strlen(ext_link_filename) + 1)) {
+ H5_FAILED();
+ HDprintf(" external link target file '%s' did not match expected '%s'\n",
+ ext_link_filepath, ext_link_filename);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ if (HDstrncmp(ext_link_val, ext_obj_name_c, strlen(ext_obj_name_c) + 1)) {
+ H5_FAILED();
+ HDprintf(" external link value '%s' did not match expected '%s'\n", ext_link_val,
+ ext_obj_name_c);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", GET_LINK_VAL_TEST_SUBGROUP9_NAME);
+ PART_ERROR(H5Lget_val_by_idx_external_crt_order_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lget_val_by_idx_external_crt_order_decreasing);
+#endif
+ }
+ PART_END(H5Lget_val_by_idx_external_crt_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ H5Fclose(ext_file_id);
+ ext_file_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_val_by_idx_external_name_order_increasing)
+ {
+#ifndef NO_EXTERNAL_LINKS
+ const char *ext_obj_name_a = "/A";
+ const char *ext_obj_name_b = "/B";
+ const char *ext_obj_name_c = "/C";
+#endif
+
+ TESTING_2("H5Lget_val_by_idx on external link by alphabetical order in increasing order");
+#ifndef NO_EXTERNAL_LINKS
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ if (H5Fclose(ext_file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close file '%s'\n", ext_link_filename);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_VAL_TEST_SUBGROUP10_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", GET_LINK_VAL_TEST_SUBGROUP10_NAME);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ /* Create several external links */
+ if (H5Lcreate_external(ext_link_filename, ext_obj_name_a, subgroup_id,
+ GET_LINK_VAL_TEST_EXT_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", GET_LINK_VAL_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, ext_obj_name_b, subgroup_id,
+ GET_LINK_VAL_TEST_EXT_LINK_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", GET_LINK_VAL_TEST_EXT_LINK_NAME2);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, ext_obj_name_c, subgroup_id,
+ GET_LINK_VAL_TEST_EXT_LINK_NAME3, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", GET_LINK_VAL_TEST_EXT_LINK_NAME3);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ /* Verify the links exist */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_VAL_TEST_EXT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", GET_LINK_VAL_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", GET_LINK_VAL_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_VAL_TEST_EXT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", GET_LINK_VAL_TEST_EXT_LINK_NAME2);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", GET_LINK_VAL_TEST_EXT_LINK_NAME2);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_VAL_TEST_EXT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", GET_LINK_VAL_TEST_EXT_LINK_NAME3);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", GET_LINK_VAL_TEST_EXT_LINK_NAME3);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ /* Retrieve the info and value of each link in turn */
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve external link's info at index %lld\n", 0);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ link_val_size = 1 + strlen(ext_link_filename) + 1 + strlen(ext_obj_name_a) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(
+ " link value size %lld for link at index %lld did not match expected size of %lld\n",
+ link_info.u.val_size, 0, link_val_size);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ HDmemset(link_val_buf, 0, GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE);
+ if (H5Lget_val_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0, link_val_buf,
+ GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get external link value at index %lld\n", 0);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ if (H5Lunpack_elink_val(link_val_buf, link_info.u.val_size, &ext_link_flags, &ext_link_filepath,
+ &ext_link_val) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't unpack external link value buffer\n");
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ if (HDstrncmp(ext_link_filepath, ext_link_filename, strlen(ext_link_filename) + 1)) {
+ H5_FAILED();
+ HDprintf(" external link target file '%s' did not match expected '%s'\n",
+ ext_link_filepath, ext_link_filename);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ if (HDstrncmp(ext_link_val, ext_obj_name_a, strlen(ext_obj_name_a) + 1)) {
+ H5_FAILED();
+ HDprintf(" external link value '%s' did not match expected '%s'\n", ext_link_val,
+ ext_obj_name_a);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 1, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve external link's info at index %lld\n", 1);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ link_val_size = 1 + strlen(ext_link_filename) + 1 + strlen(ext_obj_name_b) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(
+ " link value size %lld for link at index %lld did not match expected size of %lld\n",
+ link_info.u.val_size, 1, link_val_size);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ HDmemset(link_val_buf, 0, GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE);
+ if (H5Lget_val_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 1, link_val_buf,
+ GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get external link value at index %lld\n", 1);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ if (H5Lunpack_elink_val(link_val_buf, link_info.u.val_size, &ext_link_flags, &ext_link_filepath,
+ &ext_link_val) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't unpack external link value buffer\n");
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ if (HDstrncmp(ext_link_filepath, ext_link_filename, strlen(ext_link_filename) + 1)) {
+ H5_FAILED();
+ HDprintf(" external link target file '%s' did not match expected '%s'\n",
+ ext_link_filepath, ext_link_filename);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ if (HDstrncmp(ext_link_val, ext_obj_name_b, strlen(ext_obj_name_b) + 1)) {
+ H5_FAILED();
+ HDprintf(" external link value '%s' did not match expected '%s'\n", ext_link_val,
+ ext_obj_name_b);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 2, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve external link's info at index %lld\n", 2);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ link_val_size = 1 + strlen(ext_link_filename) + 1 + strlen(ext_obj_name_c) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(
+ " link value size %lld for link at index %lld did not match expected size of %lld\n",
+ link_info.u.val_size, 2, link_val_size);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ HDmemset(link_val_buf, 0, GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE);
+ if (H5Lget_val_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 2, link_val_buf,
+ GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get external link value at index %lld\n", 2);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ if (H5Lunpack_elink_val(link_val_buf, link_info.u.val_size, &ext_link_flags, &ext_link_filepath,
+ &ext_link_val) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't unpack external link value buffer\n");
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ if (HDstrncmp(ext_link_filepath, ext_link_filename, strlen(ext_link_filename) + 1)) {
+ H5_FAILED();
+ HDprintf(" external link target file '%s' did not match expected '%s'\n",
+ ext_link_filepath, ext_link_filename);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ if (HDstrncmp(ext_link_val, ext_obj_name_c, strlen(ext_obj_name_c) + 1)) {
+ H5_FAILED();
+ HDprintf(" external link value '%s' did not match expected '%s'\n", ext_link_val,
+ ext_obj_name_c);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", GET_LINK_VAL_TEST_SUBGROUP10_NAME);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_increasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lget_val_by_idx_external_name_order_increasing);
+#endif
+ }
+ PART_END(H5Lget_val_by_idx_external_name_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ H5Fclose(ext_file_id);
+ ext_file_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_val_by_idx_external_name_order_decreasing)
+ {
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ const char *ext_obj_name_a = "/A";
+ const char *ext_obj_name_b = "/B";
+ const char *ext_obj_name_c = "/C";
+#endif
+
+ TESTING_2("H5Lget_val_by_idx on external link by alphabetical order in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ if (H5Fclose(ext_file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close file '%s'\n", ext_link_filename);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_VAL_TEST_SUBGROUP11_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", GET_LINK_VAL_TEST_SUBGROUP11_NAME);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ /* Create several external links */
+ if (H5Lcreate_external(ext_link_filename, ext_obj_name_a, subgroup_id,
+ GET_LINK_VAL_TEST_EXT_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", GET_LINK_VAL_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, ext_obj_name_b, subgroup_id,
+ GET_LINK_VAL_TEST_EXT_LINK_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", GET_LINK_VAL_TEST_EXT_LINK_NAME2);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, ext_obj_name_c, subgroup_id,
+ GET_LINK_VAL_TEST_EXT_LINK_NAME3, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", GET_LINK_VAL_TEST_EXT_LINK_NAME3);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ /* Verify the links exist */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_VAL_TEST_EXT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", GET_LINK_VAL_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", GET_LINK_VAL_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_VAL_TEST_EXT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", GET_LINK_VAL_TEST_EXT_LINK_NAME2);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", GET_LINK_VAL_TEST_EXT_LINK_NAME2);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_VAL_TEST_EXT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", GET_LINK_VAL_TEST_EXT_LINK_NAME3);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", GET_LINK_VAL_TEST_EXT_LINK_NAME3);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ /* Retrieve the info and value of each link in turn */
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 2, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve external link's info at index %lld\n", 2);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ link_val_size = 1 + strlen(ext_link_filename) + 1 + strlen(ext_obj_name_a) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(
+ " link value size %lld for link at index %lld did not match expected size of %lld\n",
+ link_info.u.val_size, 2, link_val_size);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ HDmemset(link_val_buf, 0, GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE);
+ if (H5Lget_val_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 2, link_val_buf,
+ GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get external link value at index %lld\n", 2);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ if (H5Lunpack_elink_val(link_val_buf, link_info.u.val_size, &ext_link_flags, &ext_link_filepath,
+ &ext_link_val) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't unpack external link value buffer\n");
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ if (HDstrncmp(ext_link_filepath, ext_link_filename, strlen(ext_link_filename) + 1)) {
+ H5_FAILED();
+ HDprintf(" external link target file '%s' did not match expected '%s'\n",
+ ext_link_filepath, ext_link_filename);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ if (HDstrncmp(ext_link_val, ext_obj_name_a, strlen(ext_obj_name_a) + 1)) {
+ H5_FAILED();
+ HDprintf(" external link value '%s' did not match expected '%s'\n", ext_link_val,
+ ext_obj_name_a);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 1, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve external link's info at index %lld\n", 1);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ link_val_size = 1 + strlen(ext_link_filename) + 1 + strlen(ext_obj_name_b) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(
+ " link value size %lld for link at index %lld did not match expected size of %lld\n",
+ link_info.u.val_size, 1, link_val_size);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ HDmemset(link_val_buf, 0, GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE);
+ if (H5Lget_val_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 1, link_val_buf,
+ GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get external link value at index %lld\n", 1);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ if (H5Lunpack_elink_val(link_val_buf, link_info.u.val_size, &ext_link_flags, &ext_link_filepath,
+ &ext_link_val) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't unpack external link value buffer\n");
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ if (HDstrncmp(ext_link_filepath, ext_link_filename, strlen(ext_link_filename) + 1)) {
+ H5_FAILED();
+ HDprintf(" external link target file '%s' did not match expected '%s'\n",
+ ext_link_filepath, ext_link_filename);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ if (HDstrncmp(ext_link_val, ext_obj_name_b, strlen(ext_obj_name_b) + 1)) {
+ H5_FAILED();
+ HDprintf(" external link value '%s' did not match expected '%s'\n", ext_link_val,
+ ext_obj_name_b);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 0, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve external link's info at index %lld\n", 0);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ link_val_size = 1 + strlen(ext_link_filename) + 1 + strlen(ext_obj_name_c) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(
+ " link value size %lld for link at index %lld did not match expected size of %lld\n",
+ link_info.u.val_size, 0, link_val_size);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ HDmemset(link_val_buf, 0, GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE);
+ if (H5Lget_val_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 0, link_val_buf,
+ GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get external link value at index %lld\n", 0);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ if (H5Lunpack_elink_val(link_val_buf, link_info.u.val_size, &ext_link_flags, &ext_link_filepath,
+ &ext_link_val) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't unpack external link value buffer\n");
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ if (HDstrncmp(ext_link_filepath, ext_link_filename, strlen(ext_link_filename) + 1)) {
+ H5_FAILED();
+ HDprintf(" external link target file '%s' did not match expected '%s'\n",
+ ext_link_filepath, ext_link_filename);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ if (HDstrncmp(ext_link_val, ext_obj_name_c, strlen(ext_obj_name_c) + 1)) {
+ H5_FAILED();
+ HDprintf(" external link value '%s' did not match expected '%s'\n", ext_link_val,
+ ext_obj_name_c);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", GET_LINK_VAL_TEST_SUBGROUP11_NAME);
+ PART_ERROR(H5Lget_val_by_idx_external_name_order_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lget_val_by_idx_external_name_order_decreasing);
+#endif
+ }
+ PART_END(H5Lget_val_by_idx_external_name_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ H5Fclose(ext_file_id);
+ ext_file_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_val_by_idx_ud_crt_order_increasing)
+ {
+ TESTING_2("H5Lget_val_by_idx on user-defined link by creation order in increasing order");
+
+ SKIPPED();
+ PART_EMPTY(H5Lget_val_by_idx_ud_crt_order_increasing);
+ }
+ PART_END(H5Lget_val_by_idx_ud_crt_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_val_by_idx_ud_crt_order_decreasing)
+ {
+ TESTING_2("H5Lget_val_by_idx on user-defined link by creation order in decreasing order");
+
+ SKIPPED();
+ PART_EMPTY(H5Lget_val_by_idx_ud_crt_order_decreasing);
+ }
+ PART_END(H5Lget_val_by_idx_ud_crt_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_val_by_idx_ud_name_order_increasing)
+ {
+ TESTING_2("H5Lget_val_by_idx on user-defined link by alphabetical order in increasing order");
+
+ SKIPPED();
+ PART_EMPTY(H5Lget_val_by_idx_ud_name_order_increasing);
+ }
+ PART_END(H5Lget_val_by_idx_ud_name_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_val_by_idx_ud_name_order_decreasing)
+ {
+ TESTING_2("H5Lget_val_by_idx on user-defined link by alphabetical order in decreasing order");
+
+ SKIPPED();
+ PART_EMPTY(H5Lget_val_by_idx_ud_name_order_decreasing);
+ }
+ PART_END(H5Lget_val_by_idx_ud_name_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(gcpl_id);
+ H5Gclose(subgroup_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(ext_file_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a soft or external link's value can't be
+ * retrieved when H5Lget_val(_by_idx) is passed invalid parameters.
+ */
+static int
+test_get_link_val_invalid_params(void)
+{
+ H5L_info2_t link_info;
+ htri_t link_exists;
+ herr_t err_ret = -1;
+ size_t link_val_buf_size = 0;
+ char *link_val_buf = NULL;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("link value retrieval with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER) || !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, basic, more, soft, external link, or creation "
+ "order aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create a GCPL\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't enable link creation order tracking and indexing on GCPL\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, GET_LINK_VAL_INVALID_PARAMS_TEST_GROUP_NAME, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ GET_LINK_VAL_INVALID_PARAMS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" GET_LINK_VAL_INVALID_PARAMS_TEST_GROUP_NAME, group_id,
+ GET_LINK_VAL_INVALID_PARAMS_TEST_SOFT_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", GET_LINK_VAL_INVALID_PARAMS_TEST_SOFT_LINK_NAME);
+ goto error;
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(group_id, GET_LINK_VAL_INVALID_PARAMS_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n",
+ GET_LINK_VAL_INVALID_PARAMS_TEST_SOFT_LINK_NAME);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link did not exist\n");
+ goto error;
+ }
+
+ link_val_buf_size = 100;
+ if (NULL == (link_val_buf = (char *)HDmalloc(link_val_buf_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for storing link value\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Lget_val_invalid_loc_id)
+ {
+ TESTING_2("H5Lget_val with an invalid location ID");
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lget_val(H5I_INVALID_HID, GET_LINK_VAL_INVALID_PARAMS_TEST_SOFT_LINK_NAME,
+ link_val_buf, link_val_buf_size, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_val succeeded with an invalid location ID\n");
+ PART_ERROR(H5Lget_val_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_val_invalid_loc_id);
+
+ PART_BEGIN(H5Lget_val_invalid_link_name)
+ {
+ TESTING_2("H5Lget_val with an invalid link name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lget_val(group_id, NULL, link_val_buf, link_val_buf_size, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_val succeeded with a NULL link name\n");
+ PART_ERROR(H5Lget_val_invalid_link_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lget_val(group_id, "", link_val_buf, link_val_buf_size, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_val succeeded with an invalid link name of ''\n");
+ PART_ERROR(H5Lget_val_invalid_link_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_val_invalid_link_name);
+
+ PART_BEGIN(H5Lget_val_invalid_lapl)
+ {
+ TESTING_2("H5Lget_val with an invalid LAPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lget_val(group_id, GET_LINK_VAL_INVALID_PARAMS_TEST_SOFT_LINK_NAME, link_val_buf,
+ link_val_buf_size, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_val succeeded with an invalid LAPL\n");
+ PART_ERROR(H5Lget_val_invalid_lapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_val_invalid_lapl);
+
+ PART_BEGIN(H5Lget_val_by_idx_invalid_loc_id)
+ {
+ TESTING_2("H5Lget_val_by_idx with an invalid location ID");
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lget_val_by_idx(H5I_INVALID_HID, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0,
+ link_val_buf, link_val_buf_size, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_val_by_idx succeeded with an invalid location ID!\n");
+ PART_ERROR(H5Lget_val_by_idx_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_val_by_idx_invalid_loc_id);
+
+ PART_BEGIN(H5Lget_val_by_idx_invalid_grp_name)
+ {
+ TESTING_2("H5Lget_val_by_idx with an invalid group name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lget_val_by_idx(group_id, NULL, H5_INDEX_CRT_ORDER, H5_ITER_INC, 0, link_val_buf,
+ link_val_buf_size, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_val_by_idx succeeded with a NULL group name!\n");
+ PART_ERROR(H5Lget_val_by_idx_invalid_grp_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lget_val_by_idx(group_id, "", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0, link_val_buf,
+ link_val_buf_size, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_val_by_idx succeeded with an invalid group name of ''!\n");
+ PART_ERROR(H5Lget_val_by_idx_invalid_grp_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_val_by_idx_invalid_grp_name);
+
+ PART_BEGIN(H5Lget_val_by_idx_invalid_index_type)
+ {
+ TESTING_2("H5Lget_val_by_idx with an invalid index type");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lget_val_by_idx(group_id, ".", H5_INDEX_UNKNOWN, H5_ITER_INC, 0, link_val_buf,
+ link_val_buf_size, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_val_by_idx succeeded with invalid index type H5_INDEX_UNKNOWN!\n");
+ PART_ERROR(H5Lget_val_by_idx_invalid_index_type);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lget_val_by_idx(group_id, ".", H5_INDEX_N, H5_ITER_INC, 0, link_val_buf,
+ link_val_buf_size, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_val_by_idx succeeded with invalid index type H5_INDEX_N!\n");
+ PART_ERROR(H5Lget_val_by_idx_invalid_index_type);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_val_by_idx_invalid_index_type);
+
+ PART_BEGIN(H5Lget_val_by_idx_invalid_iter_order)
+ {
+ TESTING_2("H5Lget_val_by_idx with an invalid iteration ordering");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lget_val_by_idx(group_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_UNKNOWN, 0,
+ link_val_buf, link_val_buf_size, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(
+ " H5Lget_val_by_idx succeeded with invalid iteration ordering H5_ITER_UNKNOWN!\n");
+ PART_ERROR(H5Lget_val_by_idx_invalid_iter_order);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lget_val_by_idx(group_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_N, 0, link_val_buf,
+ link_val_buf_size, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_val_by_idx succeeded with invalid iteration ordering H5_ITER_N!\n");
+ PART_ERROR(H5Lget_val_by_idx_invalid_iter_order);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_val_by_idx_invalid_iter_order);
+
+ PART_BEGIN(H5Lget_val_by_idx_invalid_lapl)
+ {
+ TESTING_2("H5Lget_val_by_idx with an invalid LAPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lget_val_by_idx(group_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0, link_val_buf,
+ link_val_buf_size, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_val_by_idx succeeded with an invalid LAPL!\n");
+ PART_ERROR(H5Lget_val_by_idx_invalid_lapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_val_by_idx_invalid_lapl);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (link_val_buf) {
+ HDfree(link_val_buf);
+ link_val_buf = NULL;
+ }
+
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (link_val_buf)
+ HDfree(link_val_buf);
+ H5Pclose(gcpl_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check the functionality of H5Lget_info2 and
+ * H5Lget_info_by_idx2.
+ */
+static int
+test_get_link_info(void)
+{
+ H5L_info2_t link_info;
+ htri_t link_exists;
+ size_t link_val_size;
+ hid_t file_id = H5I_INVALID_HID, ext_file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t subgroup_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+#ifndef NO_EXTERNAL_LINKS
+ char *ext_objname;
+ char ext_link_filename[H5_API_TEST_FILENAME_MAX_LENGTH];
+#endif
+
+ TESTING_MULTIPART("link info retrieval");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_HARD_LINKS) || !(vol_cap_flags_g & H5VL_CAP_FLAG_EXTERNAL_LINKS) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS) || !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, basic, more, soft, hard, external link, or "
+ "creation order aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create GCPL for link creation order tracking\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set link creation order tracking\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, GET_LINK_INFO_TEST_GROUP_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n", GET_LINK_INFO_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Lget_info_hard)
+ {
+ TESTING_2("H5Lget_info2 on hard link");
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_INFO_TEST_SUBGROUP1_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", GET_LINK_INFO_TEST_SUBGROUP1_NAME);
+ PART_ERROR(H5Lget_info_hard);
+ }
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, GET_LINK_INFO_TEST_HARD_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", GET_LINK_INFO_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_info_hard);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ GET_LINK_INFO_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_info_hard);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link did not exist\n");
+ PART_ERROR(H5Lget_info_hard);
+ }
+
+ if (H5Lget_info2(subgroup_id, GET_LINK_INFO_TEST_HARD_LINK_NAME, &link_info, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get hard link info\n");
+ PART_ERROR(H5Lget_info_hard);
+ }
+
+ if (link_info.type != H5L_TYPE_HARD) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_hard);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 0)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)0);
+ PART_ERROR(H5Lget_info_hard);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", GET_LINK_INFO_TEST_SUBGROUP1_NAME);
+ PART_ERROR(H5Lget_info_hard);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_info_hard);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_info_soft)
+ {
+ TESTING_2("H5Lget_info2 on soft link");
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_INFO_TEST_SUBGROUP2_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", GET_LINK_INFO_TEST_SUBGROUP2_NAME);
+ PART_ERROR(H5Lget_info_soft);
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" GET_LINK_INFO_TEST_GROUP_NAME
+ "/" GET_LINK_INFO_TEST_SUBGROUP2_NAME,
+ subgroup_id, GET_LINK_INFO_TEST_SOFT_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", GET_LINK_INFO_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_info_soft);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ GET_LINK_INFO_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_info_soft);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link did not exist\n");
+ PART_ERROR(H5Lget_info_soft);
+ }
+
+ if (H5Lget_info2(subgroup_id, GET_LINK_INFO_TEST_SOFT_LINK_NAME, &link_info, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get soft link info\n");
+ PART_ERROR(H5Lget_info_soft);
+ }
+
+ if (link_info.type != H5L_TYPE_SOFT) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_soft);
+ }
+
+ link_val_size = strlen("/" LINK_TEST_GROUP_NAME "/" GET_LINK_INFO_TEST_GROUP_NAME
+ "/" GET_LINK_INFO_TEST_SUBGROUP2_NAME) +
+ 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link's value size '%zu' did not match expected value '%zu'\n",
+ link_info.u.val_size, link_val_size);
+ PART_ERROR(H5Lget_info_soft);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 0)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)0);
+ PART_ERROR(H5Lget_info_soft);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", GET_LINK_INFO_TEST_SUBGROUP2_NAME);
+ PART_ERROR(H5Lget_info_soft);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_info_soft);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_info_external)
+ {
+ TESTING_2("H5Lget_info2 on external link");
+#ifndef NO_EXTERNAL_LINKS
+ HDmemset(&link_info, 0, sizeof(link_info));
+
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Lget_info_external);
+ }
+
+ if (H5Fclose(ext_file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close file '%s'\n", ext_link_filename);
+ PART_ERROR(H5Lget_info_external);
+ }
+
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_INFO_TEST_SUBGROUP3_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", GET_LINK_INFO_TEST_SUBGROUP3_NAME);
+ PART_ERROR(H5Lget_info_external);
+ }
+
+ ext_objname = "/";
+ if (H5Lcreate_external(ext_link_filename, ext_objname, subgroup_id,
+ GET_LINK_INFO_TEST_EXT_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", GET_LINK_INFO_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_info_external);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_EXT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ GET_LINK_INFO_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_info_external);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link did not exist\n");
+ PART_ERROR(H5Lget_info_external);
+ }
+
+ if (H5Lget_info2(subgroup_id, GET_LINK_INFO_TEST_EXT_LINK_NAME, &link_info, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get external link info\n");
+ PART_ERROR(H5Lget_info_external);
+ }
+
+ if (link_info.type != H5L_TYPE_EXTERNAL) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_external);
+ }
+
+ link_val_size = 1 + strlen(ext_link_filename) + 1 + strlen(ext_objname) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link's value size '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.u.val_size, link_val_size);
+ PART_ERROR(H5Lget_info_external);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 0)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)0);
+ PART_ERROR(H5Lget_info_external);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", GET_LINK_INFO_TEST_SUBGROUP3_NAME);
+ PART_ERROR(H5Lget_info_external);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lget_info_external);
+#endif
+ }
+ PART_END(H5Lget_info_external);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ H5Fclose(ext_file_id);
+ ext_file_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_info_ud)
+ {
+ TESTING_2("H5Lget_info2 on user-defined link");
+
+ /* TODO */
+
+ SKIPPED();
+ PART_EMPTY(H5Lget_info_ud);
+ }
+ PART_END(H5Lget_info_ud);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_info_by_idx_hard_crt_order_increasing)
+ {
+ TESTING_2("H5Lget_info_by_idx2 on hard link by creation order in increasing order");
+
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_INFO_TEST_SUBGROUP5_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", GET_LINK_INFO_TEST_SUBGROUP5_NAME);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_increasing);
+ }
+
+ /* Create several hard links */
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, GET_LINK_INFO_TEST_HARD_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", GET_LINK_INFO_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_increasing);
+ }
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, GET_LINK_INFO_TEST_HARD_LINK_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", GET_LINK_INFO_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_increasing);
+ }
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, GET_LINK_INFO_TEST_HARD_LINK_NAME3, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", GET_LINK_INFO_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_increasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ GET_LINK_INFO_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ GET_LINK_INFO_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ GET_LINK_INFO_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_increasing);
+ }
+
+ /* Retrieve info of links in turn */
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get hard link info for index %d\n", 0);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_increasing);
+ }
+
+ if (link_info.type != H5L_TYPE_HARD) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_increasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 0)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)0);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_increasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 1, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get hard link info for index %d\n", 1);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_increasing);
+ }
+
+ if (link_info.type != H5L_TYPE_HARD) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_increasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 1)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)1);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_increasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 2, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get hard link info for index %d\n", 2);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_increasing);
+ }
+
+ if (link_info.type != H5L_TYPE_HARD) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_increasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 2)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)2);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_increasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", GET_LINK_INFO_TEST_SUBGROUP5_NAME);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_info_by_idx_hard_crt_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_info_by_idx_hard_crt_order_decreasing)
+ {
+ TESTING_2("H5Lget_info_by_idx2 on hard link by creation order in decreasing order");
+
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_INFO_TEST_SUBGROUP6_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", GET_LINK_INFO_TEST_SUBGROUP6_NAME);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_decreasing);
+ }
+
+ /* Create several hard links */
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, GET_LINK_INFO_TEST_HARD_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", GET_LINK_INFO_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, GET_LINK_INFO_TEST_HARD_LINK_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", GET_LINK_INFO_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, GET_LINK_INFO_TEST_HARD_LINK_NAME3, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", GET_LINK_INFO_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_decreasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ GET_LINK_INFO_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ GET_LINK_INFO_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ GET_LINK_INFO_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_decreasing);
+ }
+
+ /* Retrieve info of links in turn */
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 2, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get hard link info for index %d\n", 2);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (link_info.type != H5L_TYPE_HARD) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 0)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)0);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_decreasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 1, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get hard link info for index %d\n", 1);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (link_info.type != H5L_TYPE_HARD) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 1)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)1);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_decreasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 0, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get hard link info for index %d\n", 0);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (link_info.type != H5L_TYPE_HARD) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 2)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)2);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", GET_LINK_INFO_TEST_SUBGROUP6_NAME);
+ PART_ERROR(H5Lget_info_by_idx_hard_crt_order_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_info_by_idx_hard_crt_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_info_by_idx_hard_name_order_increasing)
+ {
+ TESTING_2("H5Lget_info_by_idx2 on hard link by alphabetical order in increasing order");
+
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_INFO_TEST_SUBGROUP7_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", GET_LINK_INFO_TEST_SUBGROUP7_NAME);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_increasing);
+ }
+
+ /* Create several hard links */
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, GET_LINK_INFO_TEST_HARD_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", GET_LINK_INFO_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_increasing);
+ }
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, GET_LINK_INFO_TEST_HARD_LINK_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", GET_LINK_INFO_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_increasing);
+ }
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, GET_LINK_INFO_TEST_HARD_LINK_NAME3, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", GET_LINK_INFO_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_increasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ GET_LINK_INFO_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ GET_LINK_INFO_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ GET_LINK_INFO_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_increasing);
+ }
+
+ /* Retrieve info of links in turn */
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get hard link info for index %d\n", 0);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_increasing);
+ }
+
+ if (link_info.type != H5L_TYPE_HARD) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_increasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 0)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)0);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_increasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 1, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get hard link info for index %d\n", 1);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_increasing);
+ }
+
+ if (link_info.type != H5L_TYPE_HARD) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_increasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 1)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)1);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_increasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 2, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get hard link info for index %d\n", 2);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_increasing);
+ }
+
+ if (link_info.type != H5L_TYPE_HARD) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_increasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 2)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)2);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_increasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", GET_LINK_INFO_TEST_SUBGROUP7_NAME);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_info_by_idx_hard_name_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_info_by_idx_hard_name_order_decreasing)
+ {
+ TESTING_2("H5Lget_info_by_idx2 on hard link by alphabetical order in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_INFO_TEST_SUBGROUP8_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", GET_LINK_INFO_TEST_SUBGROUP8_NAME);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_decreasing);
+ }
+
+ /* Create several hard links */
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, GET_LINK_INFO_TEST_HARD_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", GET_LINK_INFO_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_decreasing);
+ }
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, GET_LINK_INFO_TEST_HARD_LINK_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", GET_LINK_INFO_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_decreasing);
+ }
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, GET_LINK_INFO_TEST_HARD_LINK_NAME3, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", GET_LINK_INFO_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_decreasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ GET_LINK_INFO_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ GET_LINK_INFO_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ GET_LINK_INFO_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_decreasing);
+ }
+
+ /* Retrieve info of links in turn */
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 2, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get hard link info for index %lld\n", 2);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_decreasing);
+ }
+
+ if (link_info.type != H5L_TYPE_HARD) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_decreasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 0)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)0);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_decreasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 1, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get hard link info for index %lld\n", 1);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_decreasing);
+ }
+
+ if (link_info.type != H5L_TYPE_HARD) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_decreasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 1)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)1);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_decreasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 0, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get hard link info for index %lld\n", 0);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_decreasing);
+ }
+
+ if (link_info.type != H5L_TYPE_HARD) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_decreasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 2)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)2);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_decreasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", GET_LINK_INFO_TEST_SUBGROUP8_NAME);
+ PART_ERROR(H5Lget_info_by_idx_hard_name_order_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lget_info_by_idx_hard_name_order_decreasing);
+#endif
+ }
+ PART_END(H5Lget_info_by_idx_hard_name_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_info_by_idx_soft_crt_order_increasing)
+ {
+ TESTING_2("H5Lget_info_by_idx2 on soft link by creation order in increasing order");
+
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_INFO_TEST_SUBGROUP9_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", GET_LINK_INFO_TEST_SUBGROUP9_NAME);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_increasing);
+ }
+
+ /* Create several soft links */
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" GET_LINK_INFO_TEST_GROUP_NAME
+ "/" GET_LINK_INFO_TEST_SUBGROUP9_NAME,
+ subgroup_id, GET_LINK_INFO_TEST_SOFT_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", GET_LINK_INFO_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_increasing);
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" GET_LINK_INFO_TEST_GROUP_NAME
+ "/" GET_LINK_INFO_TEST_SUBGROUP9_NAME,
+ subgroup_id, GET_LINK_INFO_TEST_SOFT_LINK_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", GET_LINK_INFO_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_increasing);
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" GET_LINK_INFO_TEST_GROUP_NAME
+ "/" GET_LINK_INFO_TEST_SUBGROUP9_NAME,
+ subgroup_id, GET_LINK_INFO_TEST_SOFT_LINK_NAME3, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", GET_LINK_INFO_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_increasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ GET_LINK_INFO_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ GET_LINK_INFO_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ GET_LINK_INFO_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_increasing);
+ }
+
+ /* Retrieve info of links in turn */
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get soft link info for index %d\n", 0);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_increasing);
+ }
+
+ if (link_info.type != H5L_TYPE_SOFT) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_increasing);
+ }
+
+ link_val_size = strlen("/" LINK_TEST_GROUP_NAME "/" GET_LINK_INFO_TEST_GROUP_NAME
+ "/" GET_LINK_INFO_TEST_SUBGROUP9_NAME) +
+ 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link's value size '%zu' did not match expected value '%zu'\n",
+ link_info.u.val_size, link_val_size);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_increasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 0)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)0);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_increasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 1, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get soft link info for index %d\n", 1);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_increasing);
+ }
+
+ if (link_info.type != H5L_TYPE_SOFT) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_increasing);
+ }
+
+ link_val_size = strlen("/" LINK_TEST_GROUP_NAME "/" GET_LINK_INFO_TEST_GROUP_NAME
+ "/" GET_LINK_INFO_TEST_SUBGROUP9_NAME) +
+ 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link's value size '%zu' did not match expected value '%zu'\n",
+ link_info.u.val_size, link_val_size);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_increasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 1)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)1);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_increasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 2, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get soft link info for index %d\n", 2);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_increasing);
+ }
+
+ if (link_info.type != H5L_TYPE_SOFT) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_increasing);
+ }
+
+ link_val_size = strlen("/" LINK_TEST_GROUP_NAME "/" GET_LINK_INFO_TEST_GROUP_NAME
+ "/" GET_LINK_INFO_TEST_SUBGROUP9_NAME) +
+ 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link's value size '%zu' did not match expected value '%zu'\n",
+ link_info.u.val_size, link_val_size);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_increasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 2)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)2);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_increasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", GET_LINK_INFO_TEST_SUBGROUP9_NAME);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_info_by_idx_soft_crt_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_info_by_idx_soft_crt_order_decreasing)
+ {
+ TESTING_2("H5Lget_info_by_idx2 on soft link by creation order in decreasing order");
+
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_INFO_TEST_SUBGROUP10_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", GET_LINK_INFO_TEST_SUBGROUP10_NAME);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_decreasing);
+ }
+
+ /* Create several soft links */
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" GET_LINK_INFO_TEST_GROUP_NAME
+ "/" GET_LINK_INFO_TEST_SUBGROUP10_NAME,
+ subgroup_id, GET_LINK_INFO_TEST_SOFT_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", GET_LINK_INFO_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" GET_LINK_INFO_TEST_GROUP_NAME
+ "/" GET_LINK_INFO_TEST_SUBGROUP10_NAME,
+ subgroup_id, GET_LINK_INFO_TEST_SOFT_LINK_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", GET_LINK_INFO_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" GET_LINK_INFO_TEST_GROUP_NAME
+ "/" GET_LINK_INFO_TEST_SUBGROUP10_NAME,
+ subgroup_id, GET_LINK_INFO_TEST_SOFT_LINK_NAME3, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", GET_LINK_INFO_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_decreasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ GET_LINK_INFO_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ GET_LINK_INFO_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ GET_LINK_INFO_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_decreasing);
+ }
+
+ /* Retrieve info of links in turn */
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 2, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get soft link info for index %d\n", 2);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (link_info.type != H5L_TYPE_SOFT) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_decreasing);
+ }
+
+ link_val_size = strlen("/" LINK_TEST_GROUP_NAME "/" GET_LINK_INFO_TEST_GROUP_NAME
+ "/" GET_LINK_INFO_TEST_SUBGROUP10_NAME) +
+ 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link's value size '%zu' did not match expected value '%zu'\n",
+ link_info.u.val_size, link_val_size);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 0)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)0);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_decreasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 1, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get soft link info for index %d\n", 1);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (link_info.type != H5L_TYPE_SOFT) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_decreasing);
+ }
+
+ link_val_size = strlen("/" LINK_TEST_GROUP_NAME "/" GET_LINK_INFO_TEST_GROUP_NAME
+ "/" GET_LINK_INFO_TEST_SUBGROUP10_NAME) +
+ 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link's value size '%zu' did not match expected value '%zu'\n",
+ link_info.u.val_size, link_val_size);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 1)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)1);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_decreasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 0, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get soft link info for index %d\n", 0);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (link_info.type != H5L_TYPE_SOFT) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_decreasing);
+ }
+
+ link_val_size = strlen("/" LINK_TEST_GROUP_NAME "/" GET_LINK_INFO_TEST_GROUP_NAME
+ "/" GET_LINK_INFO_TEST_SUBGROUP10_NAME) +
+ 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link's value size '%zu' did not match expected value '%zu'\n",
+ link_info.u.val_size, link_val_size);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 2)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)2);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", GET_LINK_INFO_TEST_SUBGROUP10_NAME);
+ PART_ERROR(H5Lget_info_by_idx_soft_crt_order_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_info_by_idx_soft_crt_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_info_by_idx_soft_name_order_increasing)
+ {
+ TESTING_2("H5Lget_info_by_idx2 on soft link by alphabetical order in increasing order");
+
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_INFO_TEST_SUBGROUP11_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", GET_LINK_INFO_TEST_SUBGROUP11_NAME);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_increasing);
+ }
+
+ /* Create several soft links */
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" GET_LINK_INFO_TEST_GROUP_NAME
+ "/" GET_LINK_INFO_TEST_SUBGROUP11_NAME,
+ subgroup_id, GET_LINK_INFO_TEST_SOFT_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", GET_LINK_INFO_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_increasing);
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" GET_LINK_INFO_TEST_GROUP_NAME
+ "/" GET_LINK_INFO_TEST_SUBGROUP11_NAME,
+ subgroup_id, GET_LINK_INFO_TEST_SOFT_LINK_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", GET_LINK_INFO_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_increasing);
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" GET_LINK_INFO_TEST_GROUP_NAME
+ "/" GET_LINK_INFO_TEST_SUBGROUP11_NAME,
+ subgroup_id, GET_LINK_INFO_TEST_SOFT_LINK_NAME3, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", GET_LINK_INFO_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_increasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ GET_LINK_INFO_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ GET_LINK_INFO_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ GET_LINK_INFO_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_increasing);
+ }
+
+ /* Retrieve info of links in turn */
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get soft link info for index %d\n", 0);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_increasing);
+ }
+
+ if (link_info.type != H5L_TYPE_SOFT) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_increasing);
+ }
+
+ link_val_size = strlen("/" LINK_TEST_GROUP_NAME "/" GET_LINK_INFO_TEST_GROUP_NAME
+ "/" GET_LINK_INFO_TEST_SUBGROUP11_NAME) +
+ 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link's value size '%zu' did not match expected value '%zu'\n",
+ link_info.u.val_size, link_val_size);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_increasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 0)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)0);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_increasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 1, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get soft link info for index %d\n", 1);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_increasing);
+ }
+
+ if (link_info.type != H5L_TYPE_SOFT) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_increasing);
+ }
+
+ link_val_size = strlen("/" LINK_TEST_GROUP_NAME "/" GET_LINK_INFO_TEST_GROUP_NAME
+ "/" GET_LINK_INFO_TEST_SUBGROUP11_NAME) +
+ 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link's value size '%zu' did not match expected value '%zu'\n",
+ link_info.u.val_size, link_val_size);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_increasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 1)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)1);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_increasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 2, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get soft link info for index %d\n", 2);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_increasing);
+ }
+
+ if (link_info.type != H5L_TYPE_SOFT) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_increasing);
+ }
+
+ link_val_size = strlen("/" LINK_TEST_GROUP_NAME "/" GET_LINK_INFO_TEST_GROUP_NAME
+ "/" GET_LINK_INFO_TEST_SUBGROUP11_NAME) +
+ 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link's value size '%zu' did not match expected value '%zu'\n",
+ link_info.u.val_size, link_val_size);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_increasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 2)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)2);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_increasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", GET_LINK_INFO_TEST_SUBGROUP11_NAME);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_info_by_idx_soft_name_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_info_by_idx_soft_name_order_decreasing)
+ {
+ TESTING_2("H5Lget_info_by_idx2 on soft link by alphabetical order in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_INFO_TEST_SUBGROUP12_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", GET_LINK_INFO_TEST_SUBGROUP12_NAME);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_decreasing);
+ }
+
+ /* Create several soft links */
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" GET_LINK_INFO_TEST_GROUP_NAME
+ "/" GET_LINK_INFO_TEST_SUBGROUP12_NAME,
+ subgroup_id, GET_LINK_INFO_TEST_SOFT_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", GET_LINK_INFO_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_decreasing);
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" GET_LINK_INFO_TEST_GROUP_NAME
+ "/" GET_LINK_INFO_TEST_SUBGROUP12_NAME,
+ subgroup_id, GET_LINK_INFO_TEST_SOFT_LINK_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", GET_LINK_INFO_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_decreasing);
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" GET_LINK_INFO_TEST_GROUP_NAME
+ "/" GET_LINK_INFO_TEST_SUBGROUP12_NAME,
+ subgroup_id, GET_LINK_INFO_TEST_SOFT_LINK_NAME3, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", GET_LINK_INFO_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_decreasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ GET_LINK_INFO_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ GET_LINK_INFO_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ GET_LINK_INFO_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_decreasing);
+ }
+
+ /* Retrieve info of links in turn */
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 2, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get soft link info for index %lld\n", 2);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_decreasing);
+ }
+
+ if (link_info.type != H5L_TYPE_SOFT) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_decreasing);
+ }
+
+ link_val_size = strlen("/" LINK_TEST_GROUP_NAME "/" GET_LINK_INFO_TEST_GROUP_NAME
+ "/" GET_LINK_INFO_TEST_SUBGROUP12_NAME) +
+ 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link's value size '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.u.val_size, link_val_size);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_decreasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 0)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)0);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_decreasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 1, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get soft link info for index %lld\n", 1);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_decreasing);
+ }
+
+ if (link_info.type != H5L_TYPE_SOFT) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_decreasing);
+ }
+
+ link_val_size = strlen("/" LINK_TEST_GROUP_NAME "/" GET_LINK_INFO_TEST_GROUP_NAME
+ "/" GET_LINK_INFO_TEST_SUBGROUP12_NAME) +
+ 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link's value size '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.u.val_size, link_val_size);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_decreasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 1)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)1);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_decreasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 0, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get soft link info for index %lld\n", 0);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_decreasing);
+ }
+
+ if (link_info.type != H5L_TYPE_SOFT) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_decreasing);
+ }
+
+ link_val_size = strlen("/" LINK_TEST_GROUP_NAME "/" GET_LINK_INFO_TEST_GROUP_NAME
+ "/" GET_LINK_INFO_TEST_SUBGROUP12_NAME) +
+ 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link's value size '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.u.val_size, link_val_size);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_decreasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 2)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)2);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_decreasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", GET_LINK_INFO_TEST_SUBGROUP12_NAME);
+ PART_ERROR(H5Lget_info_by_idx_soft_name_order_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lget_info_by_idx_soft_name_order_decreasing);
+#endif
+ }
+ PART_END(H5Lget_info_by_idx_soft_name_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_info_by_idx_external_crt_order_increasing)
+ {
+ TESTING_2("H5Lget_info_by_idx2 on external link by creation order in increasing order");
+#ifndef NO_EXTERNAL_LINKS
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_increasing);
+ }
+
+ if (H5Fclose(ext_file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close file '%s'\n", ext_link_filename);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_increasing);
+ }
+
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_INFO_TEST_SUBGROUP13_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", GET_LINK_INFO_TEST_SUBGROUP13_NAME);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_increasing);
+ }
+
+ /* Create several external links */
+ ext_objname = "/";
+ if (H5Lcreate_external(ext_link_filename, ext_objname, subgroup_id,
+ GET_LINK_INFO_TEST_EXT_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", GET_LINK_INFO_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_increasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, ext_objname, subgroup_id,
+ GET_LINK_INFO_TEST_EXT_LINK_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", GET_LINK_INFO_TEST_EXT_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_increasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, ext_objname, subgroup_id,
+ GET_LINK_INFO_TEST_EXT_LINK_NAME3, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", GET_LINK_INFO_TEST_EXT_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_increasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_EXT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ GET_LINK_INFO_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_EXT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ GET_LINK_INFO_TEST_EXT_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_EXT_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_EXT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ GET_LINK_INFO_TEST_EXT_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_EXT_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_increasing);
+ }
+
+ /* Retrieve info of links in turn */
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get external link info for index %lld\n", 0);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_increasing);
+ }
+
+ if (link_info.type != H5L_TYPE_EXTERNAL) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_increasing);
+ }
+
+ link_val_size = 1 + strlen(ext_link_filename) + 1 + strlen(ext_objname) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link's value size '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.u.val_size, link_val_size);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_increasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 0)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)0);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_increasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 1, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get external link info for index %lld\n", 1);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_increasing);
+ }
+
+ if (link_info.type != H5L_TYPE_EXTERNAL) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_increasing);
+ }
+
+ link_val_size = 1 + strlen(ext_link_filename) + 1 + strlen(ext_objname) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link's value size '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.u.val_size, link_val_size);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_increasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 1)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)1);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_increasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 2, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get external link info for index %lld\n", 2);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_increasing);
+ }
+
+ if (link_info.type != H5L_TYPE_EXTERNAL) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_increasing);
+ }
+
+ link_val_size = 1 + strlen(ext_link_filename) + 1 + strlen(ext_objname) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link's value size '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.u.val_size, link_val_size);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_increasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 2)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)2);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_increasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", GET_LINK_INFO_TEST_SUBGROUP13_NAME);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_increasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lget_info_by_idx_external_crt_order_increasing);
+#endif
+ }
+ PART_END(H5Lget_info_by_idx_external_crt_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ H5Fclose(ext_file_id);
+ ext_file_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_info_by_idx_external_crt_order_decreasing)
+ {
+ TESTING_2("H5Lget_info_by_idx2 on external link by creation order in decreasing order");
+#ifndef NO_EXTERNAL_LINKS
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_decreasing);
+ }
+
+ if (H5Fclose(ext_file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close file '%s'\n", ext_link_filename);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_decreasing);
+ }
+
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_INFO_TEST_SUBGROUP14_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", GET_LINK_INFO_TEST_SUBGROUP14_NAME);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_decreasing);
+ }
+
+ /* Create several external links */
+ ext_objname = "/";
+ if (H5Lcreate_external(ext_link_filename, ext_objname, subgroup_id,
+ GET_LINK_INFO_TEST_EXT_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", GET_LINK_INFO_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_decreasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, ext_objname, subgroup_id,
+ GET_LINK_INFO_TEST_EXT_LINK_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", GET_LINK_INFO_TEST_EXT_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_decreasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, ext_objname, subgroup_id,
+ GET_LINK_INFO_TEST_EXT_LINK_NAME3, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", GET_LINK_INFO_TEST_EXT_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_decreasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_EXT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ GET_LINK_INFO_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_EXT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ GET_LINK_INFO_TEST_EXT_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_EXT_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_EXT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ GET_LINK_INFO_TEST_EXT_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_EXT_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_decreasing);
+ }
+
+ /* Retrieve info of links in turn */
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 2, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get external link info for index %lld\n", 2);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_decreasing);
+ }
+
+ if (link_info.type != H5L_TYPE_EXTERNAL) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_decreasing);
+ }
+
+ link_val_size = 1 + strlen(ext_link_filename) + 1 + strlen(ext_objname) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link's value size '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.u.val_size, link_val_size);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_decreasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 0)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)0);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_decreasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 1, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get external link info for index %lld\n", 1);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_decreasing);
+ }
+
+ if (link_info.type != H5L_TYPE_EXTERNAL) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_decreasing);
+ }
+
+ link_val_size = 1 + strlen(ext_link_filename) + 1 + strlen(ext_objname) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link's value size '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.u.val_size, link_val_size);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_decreasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 1)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)1);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_decreasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 0, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get external link info for index %lld\n", 0);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_decreasing);
+ }
+
+ if (link_info.type != H5L_TYPE_EXTERNAL) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_decreasing);
+ }
+
+ link_val_size = 1 + strlen(ext_link_filename) + 1 + strlen(ext_objname) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link's value size '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.u.val_size, link_val_size);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_decreasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 2)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)2);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_decreasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", GET_LINK_INFO_TEST_SUBGROUP14_NAME);
+ PART_ERROR(H5Lget_info_by_idx_external_crt_order_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lget_info_by_idx_external_crt_order_decreasing);
+#endif
+ }
+ PART_END(H5Lget_info_by_idx_external_crt_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ H5Fclose(ext_file_id);
+ ext_file_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_info_by_idx_external_name_order_increasing)
+ {
+ TESTING_2("H5Lget_info_by_idx2 on external link by alphabetical order in increasing order");
+#ifndef NO_EXTERNAL_LINKS
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_increasing);
+ }
+
+ if (H5Fclose(ext_file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close file '%s'\n", ext_link_filename);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_increasing);
+ }
+
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_INFO_TEST_SUBGROUP15_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", GET_LINK_INFO_TEST_SUBGROUP15_NAME);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_increasing);
+ }
+
+ /* Create several external links */
+ ext_objname = "/";
+ if (H5Lcreate_external(ext_link_filename, ext_objname, subgroup_id,
+ GET_LINK_INFO_TEST_EXT_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", GET_LINK_INFO_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_increasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, ext_objname, subgroup_id,
+ GET_LINK_INFO_TEST_EXT_LINK_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", GET_LINK_INFO_TEST_EXT_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_increasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, ext_objname, subgroup_id,
+ GET_LINK_INFO_TEST_EXT_LINK_NAME3, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", GET_LINK_INFO_TEST_EXT_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_increasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_EXT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ GET_LINK_INFO_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_EXT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ GET_LINK_INFO_TEST_EXT_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_EXT_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_EXT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ GET_LINK_INFO_TEST_EXT_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_EXT_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_increasing);
+ }
+
+ /* Retrieve info of links in turn */
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get external link info for index %lld\n", 0);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_increasing);
+ }
+
+ if (link_info.type != H5L_TYPE_EXTERNAL) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_increasing);
+ }
+
+ link_val_size = 1 + strlen(ext_link_filename) + 1 + strlen(ext_objname) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link's value size '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.u.val_size, link_val_size);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_increasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 0)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)0);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_increasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 1, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get external link info for index %lld\n", 1);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_increasing);
+ }
+
+ if (link_info.type != H5L_TYPE_EXTERNAL) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_increasing);
+ }
+
+ link_val_size = 1 + strlen(ext_link_filename) + 1 + strlen(ext_objname) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link's value size '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.u.val_size, link_val_size);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_increasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 1)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)1);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_increasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 2, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get external link info for index %lld\n", 2);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_increasing);
+ }
+
+ if (link_info.type != H5L_TYPE_EXTERNAL) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_increasing);
+ }
+
+ link_val_size = 1 + strlen(ext_link_filename) + 1 + strlen(ext_objname) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link's value size '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.u.val_size, link_val_size);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_increasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 2)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)2);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_increasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", GET_LINK_INFO_TEST_SUBGROUP15_NAME);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_increasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lget_info_by_idx_external_name_order_increasing);
+#endif
+ }
+ PART_END(H5Lget_info_by_idx_external_name_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ H5Fclose(ext_file_id);
+ ext_file_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_info_by_idx_external_name_order_decreasing)
+ {
+ TESTING_2("H5Lget_info_by_idx2 on external link by alphabetical order in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_decreasing);
+ }
+
+ if (H5Fclose(ext_file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close file '%s'\n", ext_link_filename);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_decreasing);
+ }
+
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_INFO_TEST_SUBGROUP16_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", GET_LINK_INFO_TEST_SUBGROUP16_NAME);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_decreasing);
+ }
+
+ /* Create several external links */
+ ext_objname = "/";
+ if (H5Lcreate_external(ext_link_filename, ext_objname, subgroup_id,
+ GET_LINK_INFO_TEST_EXT_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", GET_LINK_INFO_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_decreasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, ext_objname, subgroup_id,
+ GET_LINK_INFO_TEST_EXT_LINK_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", GET_LINK_INFO_TEST_EXT_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_decreasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, ext_objname, subgroup_id,
+ GET_LINK_INFO_TEST_EXT_LINK_NAME3, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", GET_LINK_INFO_TEST_EXT_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_decreasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_EXT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ GET_LINK_INFO_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_EXT_LINK_NAME);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_EXT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ GET_LINK_INFO_TEST_EXT_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_EXT_LINK_NAME2);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_INFO_TEST_EXT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ GET_LINK_INFO_TEST_EXT_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before deletion\n",
+ GET_LINK_INFO_TEST_EXT_LINK_NAME3);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_decreasing);
+ }
+
+ /* Retrieve info of links in turn */
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 2, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get external link info for index %lld\n", 2);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_decreasing);
+ }
+
+ if (link_info.type != H5L_TYPE_EXTERNAL) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_decreasing);
+ }
+
+ link_val_size = 1 + strlen(ext_link_filename) + 1 + strlen(ext_objname) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link's value size '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.u.val_size, link_val_size);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_decreasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 0)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)0);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_decreasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 1, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get external link info for index %lld\n", 1);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_decreasing);
+ }
+
+ if (link_info.type != H5L_TYPE_EXTERNAL) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_decreasing);
+ }
+
+ link_val_size = 1 + strlen(ext_link_filename) + 1 + strlen(ext_objname) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link's value size '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.u.val_size, link_val_size);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_decreasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 1)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)1);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_decreasing);
+ }
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info_by_idx2(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 0, &link_info,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get external link info for index %lld\n", 0);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_decreasing);
+ }
+
+ if (link_info.type != H5L_TYPE_EXTERNAL) {
+ H5_FAILED();
+ HDprintf(" incorrect link type returned\n");
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_decreasing);
+ }
+
+ link_val_size = 1 + strlen(ext_link_filename) + 1 + strlen(ext_objname) + 1;
+ if (link_info.u.val_size != link_val_size) {
+ H5_FAILED();
+ HDprintf(" link's value size '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.u.val_size, link_val_size);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_decreasing);
+ }
+
+ if (link_info.corder_valid && (link_info.corder != 2)) {
+ H5_FAILED();
+ HDprintf(" link's creation order value '%lld' did not match expected value '%lld'\n",
+ (long long)link_info.corder, (long long)2);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_decreasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", GET_LINK_INFO_TEST_SUBGROUP16_NAME);
+ PART_ERROR(H5Lget_info_by_idx_external_name_order_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lget_info_by_idx_external_name_order_decreasing);
+#endif
+ }
+ PART_END(H5Lget_info_by_idx_external_name_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ H5Fclose(ext_file_id);
+ ext_file_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_info_by_idx_ud_crt_order_increasing)
+ {
+ TESTING_2("H5Lget_info_by_idx2 on user-defined link by creation order in increasing order");
+
+ SKIPPED();
+ PART_EMPTY(H5Lget_info_by_idx_ud_crt_order_increasing);
+ }
+ PART_END(H5Lget_info_by_idx_ud_crt_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_info_by_idx_ud_crt_order_decreasing)
+ {
+ TESTING_2("H5Lget_info_by_idx2 on user-defined link by creation order in decreasing order");
+
+ SKIPPED();
+ PART_EMPTY(H5Lget_info_by_idx_ud_crt_order_decreasing);
+ }
+ PART_END(H5Lget_info_by_idx_ud_crt_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_info_by_idx_ud_name_order_increasing)
+ {
+ TESTING_2("H5Lget_info_by_idx2 on user-defined link by alphabetical order in increasing order");
+
+ SKIPPED();
+ PART_EMPTY(H5Lget_info_by_idx_ud_name_order_increasing);
+ }
+ PART_END(H5Lget_info_by_idx_ud_name_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_info_by_idx_ud_name_order_decreasing)
+ {
+ TESTING_2("H5Lget_info_by_idx2 on user-defined link by alphabetical order in decreasing order");
+
+ SKIPPED();
+ PART_EMPTY(H5Lget_info_by_idx_ud_name_order_decreasing);
+ }
+ PART_END(H5Lget_info_by_idx_ud_name_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(gcpl_id);
+ H5Gclose(subgroup_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(ext_file_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a link's info can't be retrieved
+ * when H5Lget_info(_by_idx)2 is passed invalid parameters.
+ */
+static int
+test_get_link_info_invalid_params(void)
+{
+ H5L_info2_t link_info;
+ herr_t err_ret = -1;
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("link info retrieval with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_HARD_LINKS) || !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, basic, more, soft, hard, external link, or "
+ "creation order aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create a GCPL\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't enable link creation order tracking and indexing on GCPL\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, GET_LINK_INFO_INVALID_PARAMS_TEST_GROUP_NAME, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ GET_LINK_INFO_INVALID_PARAMS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if (H5Lcreate_hard(group_id, ".", group_id, GET_LINK_INFO_INVALID_PARAMS_TEST_HARD_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", GET_LINK_INFO_INVALID_PARAMS_TEST_HARD_LINK_NAME);
+ goto error;
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(group_id, GET_LINK_INFO_INVALID_PARAMS_TEST_HARD_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ GET_LINK_INFO_INVALID_PARAMS_TEST_HARD_LINK_NAME);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link did not exist\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Lget_info_invalid_loc_id)
+ {
+ TESTING_2("H5Lget_info2 with an invalid location ID");
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lget_info2(H5I_INVALID_HID, GET_LINK_INFO_INVALID_PARAMS_TEST_HARD_LINK_NAME,
+ &link_info, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_info2 succeeded with an invalid location ID!\n");
+ PART_ERROR(H5Lget_info_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_info_invalid_loc_id);
+
+ PART_BEGIN(H5Lget_info_invalid_link_name)
+ {
+ TESTING_2("H5Lget_info2 with an invalid link name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lget_info2(group_id, NULL, &link_info, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_info2 succeeded with a NULL link name!\n");
+ PART_ERROR(H5Lget_info_invalid_link_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lget_info2(group_id, "", &link_info, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_info2 succeeded with an invalid link name of ''!\n");
+ PART_ERROR(H5Lget_info_invalid_link_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_info_invalid_link_name);
+
+ PART_BEGIN(H5Lget_info_invalid_lapl)
+ {
+ TESTING_2("H5Lget_info2 with an invalid LAPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lget_info2(group_id, GET_LINK_INFO_INVALID_PARAMS_TEST_HARD_LINK_NAME, &link_info,
+ H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_info2 succeeded with an invalid LAPL!\n");
+ PART_ERROR(H5Lget_info_invalid_lapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_info_invalid_lapl);
+
+ HDmemset(&link_info, 0, sizeof(link_info));
+
+ PART_BEGIN(H5Lget_info_by_idx_invalid_loc_id)
+ {
+ TESTING_2("H5Lget_info_by_idx2 with an invalid location ID");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lget_info_by_idx2(H5I_INVALID_HID, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0,
+ &link_info, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_info_by_idx2 succeeded with an invalid location ID!\n");
+ PART_ERROR(H5Lget_info_by_idx_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_info_by_idx_invalid_loc_id);
+
+ PART_BEGIN(H5Lget_info_by_idx_invalid_grp_name)
+ {
+ TESTING_2("H5Lget_info_by_idx2 with an invalid group name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lget_info_by_idx2(group_id, NULL, H5_INDEX_CRT_ORDER, H5_ITER_INC, 0, &link_info,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_info_by_idx2 succeeded with a NULL group name!\n");
+ PART_ERROR(H5Lget_info_by_idx_invalid_grp_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lget_info_by_idx2(group_id, "", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0, &link_info,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_info_by_idx2 succeeded with an invalid group name of ''!\n");
+ PART_ERROR(H5Lget_info_by_idx_invalid_grp_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_info_by_idx_invalid_grp_name);
+
+ PART_BEGIN(H5Lget_info_by_idx_invalid_index_type)
+ {
+ TESTING_2("H5Lget_info_by_idx2 with an invalid index type");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lget_info_by_idx2(group_id, ".", H5_INDEX_UNKNOWN, H5_ITER_INC, 0, &link_info,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_info_by_idx2 succeeded with invalid index type H5_INDEX_UNKNOWN!\n");
+ PART_ERROR(H5Lget_info_by_idx_invalid_index_type);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Lget_info_by_idx2(group_id, ".", H5_INDEX_N, H5_ITER_INC, 0, &link_info, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_info_by_idx2 succeeded with invalid index type H5_INDEX_N!\n");
+ PART_ERROR(H5Lget_info_by_idx_invalid_index_type);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_info_by_idx_invalid_index_type);
+
+ PART_BEGIN(H5Lget_info_by_idx_invalid_iter_order)
+ {
+ TESTING_2("H5Lget_info_by_idx2 with an invalid iteration ordering");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lget_info_by_idx2(group_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_UNKNOWN, 0,
+ &link_info, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(
+ " H5Lget_info_by_idx2 succeeded with invalid iteration ordering H5_ITER_UNKNOWN!\n");
+ PART_ERROR(H5Lget_info_by_idx_invalid_iter_order);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lget_info_by_idx2(group_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_N, 0, &link_info,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_info_by_idx2 succeeded with invalid iteration ordering H5_ITER_N!\n");
+ PART_ERROR(H5Lget_info_by_idx_invalid_iter_order);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_info_by_idx_invalid_iter_order);
+
+ PART_BEGIN(H5Lget_info_by_idx_invalid_lapl)
+ {
+ TESTING_2("H5Lget_info_by_idx2 with an invalid LAPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lget_info_by_idx2(group_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0, &link_info,
+ H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_info_by_idx2 succeeded with an invalid LAPL!\n");
+ PART_ERROR(H5Lget_info_by_idx_invalid_lapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_info_by_idx_invalid_lapl);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(gcpl_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a link's name can be correctly
+ * retrieved by using H5Lget_name_by_idx.
+ */
+static int
+test_get_link_name(void)
+{
+ ssize_t link_name_buf_size = 0;
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID, ext_file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t subgroup_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+ char link_name_buf[GET_LINK_NAME_TEST_BUF_SIZE];
+#ifndef NO_EXTERNAL_LINKS
+ char ext_link_filename[H5_API_TEST_FILENAME_MAX_LENGTH];
+#endif
+
+ TESTING_MULTIPART("link name retrieval");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_HARD_LINKS) || !(vol_cap_flags_g & H5VL_CAP_FLAG_EXTERNAL_LINKS) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS) || !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, basic, more, soft, hard, external link, or "
+ "creation order aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create GCPL for link creation order tracking\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set link creation order tracking\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, GET_LINK_NAME_TEST_GROUP_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n", GET_LINK_NAME_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Lget_name_by_idx_hard_crt_order_increasing)
+ {
+ TESTING_2("H5Lget_name_by_idx on hard link by creation order in increasing order");
+
+ /* Create group to hold some links */
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_NAME_TEST_HARD_SUBGROUP_NAME, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ GET_LINK_NAME_TEST_HARD_SUBGROUP_NAME);
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_increasing);
+ }
+
+ /* Create several hard links in reverse order to test creation order */
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, GET_LINK_NAME_TEST_HARD_LINK_NAME3, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create hard link '%s'\n", GET_LINK_NAME_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_increasing);
+ }
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, GET_LINK_NAME_TEST_HARD_LINK_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create hard link '%s'\n", GET_LINK_NAME_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_increasing);
+ }
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, GET_LINK_NAME_TEST_HARD_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create hard link '%s'\n", GET_LINK_NAME_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_increasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ GET_LINK_NAME_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ GET_LINK_NAME_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ GET_LINK_NAME_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_increasing);
+ }
+
+ /* Retrieve link names */
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_increasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_increasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_HARD_LINK_NAME3,
+ strlen(GET_LINK_NAME_TEST_HARD_LINK_NAME3) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_increasing);
+ }
+
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 1,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_increasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 1, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_increasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_HARD_LINK_NAME2,
+ strlen(GET_LINK_NAME_TEST_HARD_LINK_NAME2) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_increasing);
+ }
+
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 2,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_increasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 2, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_increasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_HARD_LINK_NAME,
+ strlen(GET_LINK_NAME_TEST_HARD_LINK_NAME) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_increasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group\n");
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_name_by_idx_hard_crt_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_name_by_idx_hard_crt_order_decreasing)
+ {
+ TESTING_2("H5Lget_name_by_idx on hard link by creation order in decreasing order");
+
+ /* Create group to hold some links */
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_NAME_TEST_HARD_SUBGROUP_NAME2, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ GET_LINK_NAME_TEST_HARD_SUBGROUP_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_decreasing);
+ }
+
+ /* Create several hard links in reverse order to test creation order */
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, GET_LINK_NAME_TEST_HARD_LINK_NAME3, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create hard link '%s'\n", GET_LINK_NAME_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, GET_LINK_NAME_TEST_HARD_LINK_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create hard link '%s'\n", GET_LINK_NAME_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, GET_LINK_NAME_TEST_HARD_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create hard link '%s'\n", GET_LINK_NAME_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_decreasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ GET_LINK_NAME_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ GET_LINK_NAME_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ GET_LINK_NAME_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_decreasing);
+ }
+
+ /* Retrieve link names */
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 0,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 0, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_HARD_LINK_NAME,
+ strlen(GET_LINK_NAME_TEST_HARD_LINK_NAME) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_decreasing);
+ }
+
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 1,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 1, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_HARD_LINK_NAME2,
+ strlen(GET_LINK_NAME_TEST_HARD_LINK_NAME2) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_decreasing);
+ }
+
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 2,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 2, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_HARD_LINK_NAME3,
+ strlen(GET_LINK_NAME_TEST_HARD_LINK_NAME3) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_decreasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group\n");
+ PART_ERROR(H5Lget_name_by_idx_hard_crt_order_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_name_by_idx_hard_crt_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_name_by_idx_hard_name_order_increasing)
+ {
+ TESTING_2("H5Lget_name_by_idx on hard link by alphabetical order in increasing order");
+
+ /* Create group to hold some links */
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_NAME_TEST_HARD_SUBGROUP_NAME3, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ GET_LINK_NAME_TEST_HARD_SUBGROUP_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_increasing);
+ }
+
+ /* Create several hard links */
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, GET_LINK_NAME_TEST_HARD_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create hard link '%s'\n", GET_LINK_NAME_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_increasing);
+ }
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, GET_LINK_NAME_TEST_HARD_LINK_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create hard link '%s'\n", GET_LINK_NAME_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_increasing);
+ }
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, GET_LINK_NAME_TEST_HARD_LINK_NAME3, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create hard link '%s'\n", GET_LINK_NAME_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_increasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ GET_LINK_NAME_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ GET_LINK_NAME_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ GET_LINK_NAME_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_increasing);
+ }
+
+ /* Retrieve link names */
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_increasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_increasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_HARD_LINK_NAME,
+ strlen(GET_LINK_NAME_TEST_HARD_LINK_NAME) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_increasing);
+ }
+
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 1,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_increasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 1, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_increasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_HARD_LINK_NAME2,
+ strlen(GET_LINK_NAME_TEST_HARD_LINK_NAME2) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_increasing);
+ }
+
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 2,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_increasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 2, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_increasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_HARD_LINK_NAME3,
+ strlen(GET_LINK_NAME_TEST_HARD_LINK_NAME3) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_increasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group\n");
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_name_by_idx_hard_name_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_name_by_idx_hard_name_order_decreasing)
+ {
+ TESTING_2("H5Lget_name_by_idx on hard link by alphabetical order in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Create group to hold some links */
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_NAME_TEST_HARD_SUBGROUP_NAME4, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ GET_LINK_NAME_TEST_HARD_SUBGROUP_NAME4);
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_decreasing);
+ }
+
+ /* Create several hard links */
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, GET_LINK_NAME_TEST_HARD_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create hard link '%s'\n", GET_LINK_NAME_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_decreasing);
+ }
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, GET_LINK_NAME_TEST_HARD_LINK_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create hard link '%s'\n", GET_LINK_NAME_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_decreasing);
+ }
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, GET_LINK_NAME_TEST_HARD_LINK_NAME3, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create hard link '%s'\n", GET_LINK_NAME_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_decreasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ GET_LINK_NAME_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_HARD_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ GET_LINK_NAME_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_HARD_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if hard link '%s' exists\n",
+ GET_LINK_NAME_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" hard link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_decreasing);
+ }
+
+ /* Retrieve link names */
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 0,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_decreasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 0, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_decreasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_HARD_LINK_NAME3,
+ strlen(GET_LINK_NAME_TEST_HARD_LINK_NAME3) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_HARD_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_decreasing);
+ }
+
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 1,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_decreasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 1, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_decreasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_HARD_LINK_NAME2,
+ strlen(GET_LINK_NAME_TEST_HARD_LINK_NAME2) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_HARD_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_decreasing);
+ }
+
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 2,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_decreasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 2, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_decreasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_HARD_LINK_NAME,
+ strlen(GET_LINK_NAME_TEST_HARD_LINK_NAME) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_HARD_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_decreasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group\n");
+ PART_ERROR(H5Lget_name_by_idx_hard_name_order_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lget_name_by_idx_hard_name_order_decreasing);
+#endif
+ }
+ PART_END(H5Lget_name_by_idx_hard_name_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_name_by_idx_soft_crt_order_increasing)
+ {
+ TESTING_2("H5Lget_name_by_idx on soft link by creation order in increasing order");
+
+ /* Create group to hold some links */
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_NAME_TEST_SOFT_SUBGROUP_NAME, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ GET_LINK_NAME_TEST_SOFT_SUBGROUP_NAME);
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_increasing);
+ }
+
+ /* Create several soft links in reverse order to test creation order */
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" GET_LINK_NAME_TEST_GROUP_NAME, subgroup_id,
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME3, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create soft link '%s'\n", GET_LINK_NAME_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_increasing);
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" GET_LINK_NAME_TEST_GROUP_NAME, subgroup_id,
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create soft link '%s'\n", GET_LINK_NAME_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_increasing);
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" GET_LINK_NAME_TEST_GROUP_NAME, subgroup_id,
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create soft link '%s'\n", GET_LINK_NAME_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_increasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_increasing);
+ }
+
+ /* Retrieve link names */
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_increasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_increasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_SOFT_LINK_NAME3,
+ strlen(GET_LINK_NAME_TEST_SOFT_LINK_NAME3) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_increasing);
+ }
+
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 1,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_increasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 1, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_increasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_SOFT_LINK_NAME2,
+ strlen(GET_LINK_NAME_TEST_SOFT_LINK_NAME2) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_increasing);
+ }
+
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 2,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_increasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 2, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_increasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_SOFT_LINK_NAME,
+ strlen(GET_LINK_NAME_TEST_SOFT_LINK_NAME) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_increasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group\n");
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_name_by_idx_soft_crt_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_name_by_idx_soft_crt_order_decreasing)
+ {
+ TESTING_2("H5Lget_name_by_idx on soft link by creation order in decreasing order");
+
+ /* Create group to hold some links */
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_NAME_TEST_SOFT_SUBGROUP_NAME2, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ GET_LINK_NAME_TEST_SOFT_SUBGROUP_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_decreasing);
+ }
+
+ /* Create several soft links in reverse order to test creation order */
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" GET_LINK_NAME_TEST_GROUP_NAME, subgroup_id,
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME3, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create soft link '%s'\n", GET_LINK_NAME_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" GET_LINK_NAME_TEST_GROUP_NAME, subgroup_id,
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create soft link '%s'\n", GET_LINK_NAME_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" GET_LINK_NAME_TEST_GROUP_NAME, subgroup_id,
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create soft link '%s'\n", GET_LINK_NAME_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_decreasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_decreasing);
+ }
+
+ /* Retrieve link names */
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 0,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 0, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_SOFT_LINK_NAME,
+ strlen(GET_LINK_NAME_TEST_SOFT_LINK_NAME) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_decreasing);
+ }
+
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 1,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 1, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_SOFT_LINK_NAME2,
+ strlen(GET_LINK_NAME_TEST_SOFT_LINK_NAME2) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_decreasing);
+ }
+
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 2,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 2, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_SOFT_LINK_NAME3,
+ strlen(GET_LINK_NAME_TEST_SOFT_LINK_NAME3) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_decreasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group\n");
+ PART_ERROR(H5Lget_name_by_idx_soft_crt_order_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_name_by_idx_soft_crt_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_name_by_idx_soft_name_order_increasing)
+ {
+ TESTING_2("H5Lget_name_by_idx on soft link by alphabetical order in increasing order");
+
+ /* Create group to hold some links */
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_NAME_TEST_SOFT_SUBGROUP_NAME3, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ GET_LINK_NAME_TEST_SOFT_SUBGROUP_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_increasing);
+ }
+
+ /* Create several soft links */
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" GET_LINK_NAME_TEST_GROUP_NAME, subgroup_id,
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create soft link '%s'\n", GET_LINK_NAME_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_increasing);
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" GET_LINK_NAME_TEST_GROUP_NAME, subgroup_id,
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create soft link '%s'\n", GET_LINK_NAME_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_increasing);
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" GET_LINK_NAME_TEST_GROUP_NAME, subgroup_id,
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME3, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create soft link '%s'\n", GET_LINK_NAME_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_increasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_increasing);
+ }
+
+ /* Retrieve link names */
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_increasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_increasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_SOFT_LINK_NAME,
+ strlen(GET_LINK_NAME_TEST_SOFT_LINK_NAME) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_increasing);
+ }
+
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 1,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_increasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 1, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_increasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_SOFT_LINK_NAME2,
+ strlen(GET_LINK_NAME_TEST_SOFT_LINK_NAME2) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_increasing);
+ }
+
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 2,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_increasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 2, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_increasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_SOFT_LINK_NAME3,
+ strlen(GET_LINK_NAME_TEST_SOFT_LINK_NAME3) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_increasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group\n");
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_name_by_idx_soft_name_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_name_by_idx_soft_name_order_decreasing)
+ {
+ TESTING_2("H5Lget_name_by_idx on soft link by alphabetical order in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Create group to hold some links */
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_NAME_TEST_SOFT_SUBGROUP_NAME4, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ GET_LINK_NAME_TEST_SOFT_SUBGROUP_NAME4);
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_decreasing);
+ }
+
+ /* Create several soft links */
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" GET_LINK_NAME_TEST_GROUP_NAME, subgroup_id,
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create soft link '%s'\n", GET_LINK_NAME_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_decreasing);
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" GET_LINK_NAME_TEST_GROUP_NAME, subgroup_id,
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create soft link '%s'\n", GET_LINK_NAME_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_decreasing);
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" GET_LINK_NAME_TEST_GROUP_NAME, subgroup_id,
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME3, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create soft link '%s'\n", GET_LINK_NAME_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_decreasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_SOFT_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_SOFT_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if soft link '%s' exists\n",
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" soft link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_decreasing);
+ }
+
+ /* Retrieve link names */
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 0,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_decreasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 0, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_decreasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_SOFT_LINK_NAME3,
+ strlen(GET_LINK_NAME_TEST_SOFT_LINK_NAME3) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_decreasing);
+ }
+
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 1,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_decreasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 1, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_decreasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_SOFT_LINK_NAME2,
+ strlen(GET_LINK_NAME_TEST_SOFT_LINK_NAME2) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_decreasing);
+ }
+
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 2,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_decreasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 2, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_decreasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_SOFT_LINK_NAME,
+ strlen(GET_LINK_NAME_TEST_SOFT_LINK_NAME) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_decreasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group\n");
+ PART_ERROR(H5Lget_name_by_idx_soft_name_order_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lget_name_by_idx_soft_name_order_decreasing);
+#endif
+ }
+ PART_END(H5Lget_name_by_idx_soft_name_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_name_by_idx_external_crt_order_increasing)
+ {
+ TESTING_2("H5Lget_name_by_idx on external link by creation order in increasing order");
+#ifndef NO_EXTERNAL_LINKS
+ /* Create file for external link to reference */
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_increasing);
+ }
+
+ if (H5Fclose(ext_file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close file '%s'\n", ext_link_filename);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_increasing);
+ }
+
+ /* Create group to hold some links */
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_NAME_TEST_EXTERNAL_SUBGROUP_NAME, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ GET_LINK_NAME_TEST_EXTERNAL_SUBGROUP_NAME);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_increasing);
+ }
+
+ /* Create several external links in reverse order to test creation order */
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup_id,
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create external link '%s'\n", GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_increasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup_id,
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create external link '%s'\n", GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_increasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup_id, GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create external link '%s'\n", GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_increasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_increasing);
+ }
+
+ /* Retrieve link names */
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_increasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 0, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_increasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3,
+ strlen(GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_increasing);
+ }
+
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 1,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_increasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 1, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_increasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2,
+ strlen(GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_increasing);
+ }
+
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 2,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_increasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 2, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_increasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME,
+ strlen(GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_increasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group\n");
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_increasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lget_name_by_idx_external_crt_order_increasing);
+#endif
+ }
+ PART_END(H5Lget_name_by_idx_external_crt_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ H5Fclose(ext_file_id);
+ ext_file_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_name_by_idx_external_crt_order_decreasing)
+ {
+ TESTING_2("H5Lget_name_by_idx on external link by creation order in decreasing order");
+#ifndef NO_EXTERNAL_LINKS
+ /* Create file for external link to reference */
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_decreasing);
+ }
+
+ if (H5Fclose(ext_file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close file '%s'\n", ext_link_filename);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_decreasing);
+ }
+
+ /* Create group to hold some links */
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_NAME_TEST_EXTERNAL_SUBGROUP_NAME2, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ GET_LINK_NAME_TEST_EXTERNAL_SUBGROUP_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_decreasing);
+ }
+
+ /* Create several external links in reverse order to test creation order */
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup_id,
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create external link '%s'\n", GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_decreasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup_id,
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create external link '%s'\n", GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_decreasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup_id, GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create external link '%s'\n", GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_decreasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_decreasing);
+ }
+
+ /* Retrieve link names */
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 0,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_decreasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 0, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_decreasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME,
+ strlen(GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_decreasing);
+ }
+
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 1,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_decreasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 1, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_decreasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2,
+ strlen(GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_decreasing);
+ }
+
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 2,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_decreasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, 2, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_decreasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3,
+ strlen(GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_decreasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group\n");
+ PART_ERROR(H5Lget_name_by_idx_external_crt_order_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lget_name_by_idx_external_crt_order_decreasing);
+#endif
+ }
+ PART_END(H5Lget_name_by_idx_external_crt_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ H5Fclose(ext_file_id);
+ ext_file_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_name_by_idx_external_name_order_increasing)
+ {
+ TESTING_2("H5Lget_name_by_idx on external link by alphabetical order in increasing order");
+#ifndef NO_EXTERNAL_LINKS
+ /* Create file for external link to reference */
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_increasing);
+ }
+
+ if (H5Fclose(ext_file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close file '%s'\n", ext_link_filename);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_increasing);
+ }
+
+ /* Create group to hold some links */
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_NAME_TEST_EXTERNAL_SUBGROUP_NAME3, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ GET_LINK_NAME_TEST_EXTERNAL_SUBGROUP_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_increasing);
+ }
+
+ /* Create several external links */
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup_id, GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create external link '%s'\n", GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_increasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup_id,
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create external link '%s'\n", GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_increasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup_id,
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create external link '%s'\n", GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_increasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_increasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_increasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_increasing);
+ }
+
+ /* Retrieve link names */
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_increasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_increasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME,
+ strlen(GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_increasing);
+ }
+
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 1,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_increasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 1, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_increasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2,
+ strlen(GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_increasing);
+ }
+
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 2,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_increasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC, 2, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_increasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3,
+ strlen(GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_increasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group\n");
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_increasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lget_name_by_idx_external_name_order_increasing);
+#endif
+ }
+ PART_END(H5Lget_name_by_idx_external_name_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ H5Fclose(ext_file_id);
+ ext_file_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_name_by_idx_external_name_order_decreasing)
+ {
+ TESTING_2("H5Lget_name_by_idx on external link by alphabetical order in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Create file for external link to reference */
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s",
+ EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((ext_file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_decreasing);
+ }
+
+ if (H5Fclose(ext_file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close file '%s'\n", ext_link_filename);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_decreasing);
+ }
+
+ /* Create group to hold some links */
+ if ((subgroup_id = H5Gcreate2(group_id, GET_LINK_NAME_TEST_EXTERNAL_SUBGROUP_NAME4, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ GET_LINK_NAME_TEST_EXTERNAL_SUBGROUP_NAME4);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_decreasing);
+ }
+
+ /* Create several external links */
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup_id, GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create external link '%s'\n", GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_decreasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup_id,
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create external link '%s'\n", GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_decreasing);
+ }
+
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup_id,
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create external link '%s'\n", GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_decreasing);
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_decreasing);
+ }
+
+ if ((link_exists = H5Lexists(subgroup_id, GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if external link '%s' exists\n",
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_decreasing);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" external link '%s' did not exist before name retrieval\n",
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_decreasing);
+ }
+
+ /* Retrieve link names */
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 0,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_decreasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 0, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_decreasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3,
+ strlen(GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_decreasing);
+ }
+
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 1,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_decreasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 1, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_decreasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2,
+ strlen(GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_decreasing);
+ }
+
+ if ((link_name_buf_size = H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 2,
+ NULL, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_decreasing);
+ }
+
+ if (H5Lget_name_by_idx(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC, 2, link_name_buf,
+ (size_t)link_name_buf_size + 1, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name\n");
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_decreasing);
+ }
+
+ if (HDstrncmp(link_name_buf, GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME,
+ strlen(GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME) + 1)) {
+ H5_FAILED();
+ HDprintf(" link name '%s' did not match expected name '%s'\n", link_name_buf,
+ GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME);
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_decreasing);
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group\n");
+ PART_ERROR(H5Lget_name_by_idx_external_name_order_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lget_name_by_idx_external_name_order_decreasing);
+#endif
+ }
+ PART_END(H5Lget_name_by_idx_external_name_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ H5Fclose(ext_file_id);
+ ext_file_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_name_by_idx_ud_crt_order_increasing)
+ {
+ TESTING_2("H5Lget_name_by_idx on user-defined link by creation order in increasing order");
+
+ /* TODO */
+
+ SKIPPED();
+ PART_EMPTY(H5Lget_name_by_idx_ud_crt_order_increasing);
+ }
+ PART_END(H5Lget_name_by_idx_ud_crt_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_name_by_idx_ud_crt_order_decreasing)
+ {
+ TESTING_2("H5Lget_name_by_idx on user-defined link by creation order in decreasing order");
+
+ /* TODO */
+
+ SKIPPED();
+ PART_EMPTY(H5Lget_name_by_idx_ud_crt_order_decreasing);
+ }
+ PART_END(H5Lget_name_by_idx_ud_crt_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_name_by_idx_ud_name_order_increasing)
+ {
+ TESTING_2("H5Lget_name_by_idx on user-defined link by alphabetical order in increasing order");
+
+ /* TODO */
+
+ SKIPPED();
+ PART_EMPTY(H5Lget_name_by_idx_ud_name_order_increasing);
+ }
+ PART_END(H5Lget_name_by_idx_ud_name_order_increasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+
+ PART_BEGIN(H5Lget_name_by_idx_ud_name_order_decreasing)
+ {
+ TESTING_2("H5Lget_name_by_idx on user-defined link by alphabetical order in decreasing order");
+
+ /* TODO */
+
+ SKIPPED();
+ PART_EMPTY(H5Lget_name_by_idx_ud_name_order_decreasing);
+ }
+ PART_END(H5Lget_name_by_idx_ud_name_order_decreasing);
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ subgroup_id = H5I_INVALID_HID;
+ }
+ H5E_END_TRY;
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(gcpl_id);
+ H5Gclose(subgroup_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(ext_file_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a link's name can't be retrieved
+ * when H5Lget_name_by_idx is passed invalid parameters.
+ */
+static int
+test_get_link_name_invalid_params(void)
+{
+ ssize_t ret;
+ htri_t link_exists;
+ size_t link_name_buf_size = 0;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ char *link_name_buf = NULL;
+
+ TESTING_MULTIPART("link name retrieval with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_HARD_LINKS) || !(vol_cap_flags_g & H5VL_CAP_FLAG_EXTERNAL_LINKS) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS) || !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, basic, more, soft, hard, external link, or "
+ "creation order aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, GET_LINK_NAME_INVALID_PARAMS_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ GET_LINK_NAME_INVALID_PARAMS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if (H5Lcreate_hard(group_id, ".", group_id, GET_LINK_NAME_INVALID_PARAMS_TEST_HARD_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create hard link '%s'\n", GET_LINK_NAME_INVALID_PARAMS_TEST_HARD_LINK_NAME);
+ goto error;
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(group_id, GET_LINK_NAME_INVALID_PARAMS_TEST_HARD_LINK_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link exists\n");
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link did not exist\n");
+ goto error;
+ }
+
+ if ((ret = H5Lget_name_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0, NULL, link_name_buf_size,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve link name size\n");
+ goto error;
+ }
+
+ link_name_buf_size = (size_t)ret;
+ if (NULL == (link_name_buf = (char *)HDmalloc(link_name_buf_size + 1)))
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Lget_name_by_idx_invalid_loc_id)
+ {
+ TESTING_2("H5Lget_name_by_idx with an invalid location ID");
+
+ H5E_BEGIN_TRY
+ {
+ ret = H5Lget_name_by_idx(H5I_INVALID_HID, ".", H5_INDEX_NAME, H5_ITER_INC, 0, link_name_buf,
+ link_name_buf_size + 1, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_name_by_idx succeeded with an invalid location ID!\n");
+ PART_ERROR(H5Lget_name_by_idx_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_name_by_idx_invalid_loc_id);
+
+ PART_BEGIN(H5Lget_name_by_idx_invalid_grp_name)
+ {
+ TESTING_2("H5Lget_name_by_idx with an invalid group name");
+
+ H5E_BEGIN_TRY
+ {
+ ret = H5Lget_name_by_idx(group_id, NULL, H5_INDEX_NAME, H5_ITER_INC, 0, link_name_buf,
+ link_name_buf_size + 1, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_name_by_idx succeeded with a NULL group name!\n");
+ PART_ERROR(H5Lget_name_by_idx_invalid_grp_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ ret = H5Lget_name_by_idx(group_id, "", H5_INDEX_NAME, H5_ITER_INC, 0, link_name_buf,
+ link_name_buf_size + 1, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_name_by_idx succeeded with an invalid group name of ''!\n");
+ PART_ERROR(H5Lget_name_by_idx_invalid_grp_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_name_by_idx_invalid_grp_name);
+
+ PART_BEGIN(H5Lget_name_by_idx_invalid_index_type)
+ {
+ TESTING_2("H5Lget_name_by_idx with an invalid index type");
+
+ H5E_BEGIN_TRY
+ {
+ ret = H5Lget_name_by_idx(group_id, ".", H5_INDEX_UNKNOWN, H5_ITER_INC, 0, link_name_buf,
+ link_name_buf_size + 1, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_name_by_idx succeeded with invalid index type H5_INDEX_UNKNOWN!\n");
+ PART_ERROR(H5Lget_name_by_idx_invalid_index_type);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ ret = H5Lget_name_by_idx(group_id, ".", H5_INDEX_N, H5_ITER_INC, 0, link_name_buf,
+ link_name_buf_size + 1, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_name_by_idx succeeded with invalid index type H5_INDEX_N!\n");
+ PART_ERROR(H5Lget_name_by_idx_invalid_index_type);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_name_by_idx_invalid_index_type);
+
+ PART_BEGIN(H5Lget_name_by_idx_invalid_iter_order)
+ {
+ TESTING_2("H5Lget_name_by_idx with an invalid iteration ordering");
+
+ H5E_BEGIN_TRY
+ {
+ ret = H5Lget_name_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_UNKNOWN, 0, link_name_buf,
+ link_name_buf_size + 1, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (ret >= 0) {
+ H5_FAILED();
+ HDprintf(
+ " H5Lget_name_by_idx succeeded with invalid iteration ordering H5_ITER_UNKNOWN!\n");
+ PART_ERROR(H5Lget_name_by_idx_invalid_iter_order);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ ret = H5Lget_name_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_N, 0, link_name_buf,
+ link_name_buf_size + 1, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_name_by_idx succeeded with invalid iteration ordering H5_ITER_N!\n");
+ PART_ERROR(H5Lget_name_by_idx_invalid_iter_order);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_name_by_idx_invalid_iter_order);
+
+ PART_BEGIN(H5Lget_name_by_idx_invalid_lapl)
+ {
+ TESTING_2("H5Lget_name_by_idx with an invalid LAPL");
+
+ H5E_BEGIN_TRY
+ {
+ ret = H5Lget_name_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_INC, 0, link_name_buf,
+ link_name_buf_size + 1, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lget_name_by_idx succeeded with an invalid LAPL!\n");
+ PART_ERROR(H5Lget_name_by_idx_invalid_lapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lget_name_by_idx_invalid_lapl);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (link_name_buf) {
+ HDfree(link_name_buf);
+ link_name_buf = NULL;
+ }
+
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (link_name_buf)
+ HDfree(link_name_buf);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check the functionality of link
+ * iteration using H5Literate(_by_name)2 with
+ * only hard links. Iteration is done in
+ * increasing and decreasing order of both link
+ * name and link creation order.
+ */
+static int
+test_link_iterate_hard_links(void)
+{
+ size_t i;
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t dset_dspace = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("link iteration (only hard links)");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) || !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, link, or iterate aren't supported with "
+ "this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create GCPL for link creation order tracking\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set link creation order tracking\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, LINK_ITER_HARD_LINKS_TEST_SUBGROUP_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n", LINK_ITER_HARD_LINKS_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_dspace =
+ generate_random_dataspace(LINK_ITER_HARD_LINKS_TEST_DSET_SPACE_RANK, NULL, NULL, FALSE)) < 0)
+ TEST_ERROR;
+
+ for (i = 0; i < LINK_ITER_HARD_LINKS_TEST_NUM_LINKS; i++) {
+ char dset_name[LINK_ITER_HARD_LINKS_TEST_BUF_SIZE];
+
+ /* Create the datasets with a reverse-ordering naming scheme to test creation order later */
+ HDsnprintf(dset_name, LINK_ITER_HARD_LINKS_TEST_BUF_SIZE, LINK_ITER_HARD_LINKS_TEST_LINK_NAME "%d",
+ (int)(LINK_ITER_HARD_LINKS_TEST_NUM_LINKS - i - 1));
+
+ if ((dset_id = H5Dcreate2(group_id, dset_name, dset_dtype, dset_dspace, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", dset_name);
+ goto error;
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(group_id, dset_name, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", dset_name);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", dset_name);
+ goto error;
+ }
+
+ if (H5Dclose(dset_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close dataset '%s'\n", dset_name);
+ goto error;
+ }
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ /*
+ * NOTE: A counter is passed to the iteration callback to try to match up the
+ * expected links with a given step throughout all of the following
+ * iterations. This is to try and check that the links are indeed being
+ * returned in the correct order.
+ */
+
+ PART_BEGIN(H5Literate_link_name_increasing)
+ {
+ TESTING_2("H5Literate2 by link name in increasing order");
+
+ i = 0;
+
+ /* Test basic link iteration capability using both index types and both index orders */
+ if (H5Literate2(group_id, H5_INDEX_NAME, H5_ITER_INC, NULL, link_iter_hard_links_cb, &i) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Literate_link_name_increasing);
+ }
+
+ if (i != LINK_ITER_HARD_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_link_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_link_name_increasing);
+
+ PART_BEGIN(H5Literate_link_name_decreasing)
+ {
+ TESTING_2("H5Literate2 by link name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ i = LINK_ITER_HARD_LINKS_TEST_NUM_LINKS;
+
+ if (H5Literate2(group_id, H5_INDEX_NAME, H5_ITER_DEC, NULL, link_iter_hard_links_cb, &i) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Literate_link_name_decreasing);
+ }
+
+ if (i != 2 * LINK_ITER_HARD_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_link_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Literate_link_name_decreasing);
+#endif
+ }
+ PART_END(H5Literate_link_name_decreasing);
+
+ PART_BEGIN(H5Literate_link_creation_increasing)
+ {
+ TESTING_2("H5Literate2 by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 2 * LINK_ITER_HARD_LINKS_TEST_NUM_LINKS;
+
+ if (H5Literate2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_INC, NULL, link_iter_hard_links_cb, &i) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Literate_link_creation_increasing);
+ }
+
+ if (i != 3 * LINK_ITER_HARD_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_link_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_link_creation_increasing);
+
+ PART_BEGIN(H5Literate_link_creation_decreasing)
+ {
+ TESTING_2("H5Literate2 by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 3 * LINK_ITER_HARD_LINKS_TEST_NUM_LINKS;
+
+ if (H5Literate2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_DEC, NULL, link_iter_hard_links_cb, &i) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Literate_link_creation_decreasing);
+ }
+
+ if (i != 4 * LINK_ITER_HARD_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_link_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_link_creation_decreasing);
+
+ PART_BEGIN(H5Literate_by_name_link_name_increasing)
+ {
+ TESTING_2("H5Literate_by_name2 by link name in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 0;
+
+ if (H5Literate_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_ITER_HARD_LINKS_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_INC, NULL, link_iter_hard_links_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate_by_name2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Literate_by_name_link_name_increasing);
+ }
+
+ if (i != LINK_ITER_HARD_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_by_name_link_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_by_name_link_name_increasing);
+
+ PART_BEGIN(H5Literate_by_name_link_name_decreasing)
+ {
+ TESTING_2("H5Literate_by_name2 by link name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ i = LINK_ITER_HARD_LINKS_TEST_NUM_LINKS;
+
+ if (H5Literate_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_ITER_HARD_LINKS_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_DEC, NULL, link_iter_hard_links_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate_by_name2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Literate_by_name_link_name_decreasing);
+ }
+
+ if (i != 2 * LINK_ITER_HARD_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_by_name_link_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Literate_by_name_link_name_decreasing);
+#endif
+ }
+ PART_END(H5Literate_by_name_link_name_decreasing);
+
+ PART_BEGIN(H5Literate_by_name_creation_increasing)
+ {
+ TESTING_2("H5Literate_by_name2 by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 2 * LINK_ITER_HARD_LINKS_TEST_NUM_LINKS;
+
+ if (H5Literate_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_ITER_HARD_LINKS_TEST_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_INC, NULL, link_iter_hard_links_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate_by_name2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Literate_by_name_creation_increasing);
+ }
+
+ if (i != 3 * LINK_ITER_HARD_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_by_name_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_by_name_creation_increasing);
+
+ PART_BEGIN(H5Literate_by_name_creation_decreasing)
+ {
+ TESTING_2("H5Literate_by_name2 by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 3 * LINK_ITER_HARD_LINKS_TEST_NUM_LINKS;
+
+ if (H5Literate_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_ITER_HARD_LINKS_TEST_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_DEC, NULL, link_iter_hard_links_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate_by_name2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Literate_by_name_creation_decreasing);
+ }
+
+ if (i != 4 * LINK_ITER_HARD_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_by_name_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_by_name_creation_decreasing);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(dset_dspace) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(dset_dspace);
+ H5Tclose(dset_dtype);
+ H5Dclose(dset_id);
+ H5Pclose(gcpl_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check the functionality of link
+ * iteration using H5Literate(_by_name)2 with
+ * only soft links. Iteration is done in
+ * increasing and decreasing order of both link
+ * name and link creation order.
+ */
+static int
+test_link_iterate_soft_links(void)
+{
+ size_t i;
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("link iteration (only soft links)");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) || !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, link, or iterate aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create GCPL for link creation order tracking\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set link creation order tracking\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, LINK_ITER_SOFT_LINKS_TEST_SUBGROUP_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n", LINK_ITER_SOFT_LINKS_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < LINK_ITER_SOFT_LINKS_TEST_NUM_LINKS; i++) {
+ char link_name[LINK_ITER_SOFT_LINKS_TEST_BUF_SIZE];
+ char link_target[LINK_ITER_SOFT_LINKS_TEST_BUF_SIZE];
+
+ /* Create the links with a reverse-ordering naming scheme to test creation order later */
+ HDsnprintf(link_name, LINK_ITER_SOFT_LINKS_TEST_BUF_SIZE, LINK_ITER_SOFT_LINKS_TEST_LINK_NAME "%d",
+ (int)(LINK_ITER_SOFT_LINKS_TEST_NUM_LINKS - i - 1));
+
+ HDsnprintf(link_target, LINK_ITER_SOFT_LINKS_TEST_BUF_SIZE, "target%d",
+ (int)(LINK_ITER_SOFT_LINKS_TEST_NUM_LINKS - i - 1));
+
+ if (H5Lcreate_soft(link_target, group_id, link_name, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", link_name);
+ goto error;
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(group_id, link_name, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", link_name);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", link_name);
+ goto error;
+ }
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ /*
+ * NOTE: A counter is passed to the iteration callback to try to match up the
+ * expected links with a given step throughout all of the following
+ * iterations. This is to try and check that the links are indeed being
+ * returned in the correct order.
+ */
+
+ PART_BEGIN(H5Literate_link_name_increasing)
+ {
+ TESTING_2("H5Literate2 by link name in increasing order");
+
+ i = 0;
+
+ /* Test basic link iteration capability using both index types and both index orders */
+ if (H5Literate2(group_id, H5_INDEX_NAME, H5_ITER_INC, NULL, link_iter_soft_links_cb, &i) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Literate_link_name_increasing);
+ }
+
+ if (i != LINK_ITER_SOFT_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_link_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_link_name_increasing);
+
+ PART_BEGIN(H5Literate_link_name_decreasing)
+ {
+ TESTING_2("H5Literate2 by link name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ i = LINK_ITER_SOFT_LINKS_TEST_NUM_LINKS;
+
+ if (H5Literate2(group_id, H5_INDEX_NAME, H5_ITER_DEC, NULL, link_iter_soft_links_cb, &i) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Literate_link_name_decreasing);
+ }
+
+ if (i != 2 * LINK_ITER_SOFT_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_link_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Literate_link_name_decreasing);
+#endif
+ }
+ PART_END(H5Literate_link_name_decreasing);
+
+ PART_BEGIN(H5Literate_link_creation_increasing)
+ {
+ TESTING_2("H5Literate2 by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 2 * LINK_ITER_SOFT_LINKS_TEST_NUM_LINKS;
+
+ if (H5Literate2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_INC, NULL, link_iter_soft_links_cb, &i) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Literate_link_creation_increasing);
+ }
+
+ if (i != 3 * LINK_ITER_SOFT_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_link_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_link_creation_increasing);
+
+ PART_BEGIN(H5Literate_link_creation_decreasing)
+ {
+ TESTING_2("H5Literate2 by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 3 * LINK_ITER_SOFT_LINKS_TEST_NUM_LINKS;
+
+ if (H5Literate2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_DEC, NULL, link_iter_soft_links_cb, &i) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Literate_link_creation_decreasing);
+ }
+
+ if (i != 4 * LINK_ITER_SOFT_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_link_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_link_creation_decreasing);
+
+ PART_BEGIN(H5Literate_by_name_link_name_increasing)
+ {
+ TESTING_2("H5Literate_by_name2 by link name in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 0;
+
+ if (H5Literate_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_ITER_SOFT_LINKS_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_INC, NULL, link_iter_soft_links_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate_by_name2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Literate_by_name_link_name_increasing);
+ }
+
+ if (i != LINK_ITER_SOFT_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_by_name_link_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_by_name_link_name_increasing);
+
+ PART_BEGIN(H5Literate_by_name_link_name_decreasing)
+ {
+ TESTING_2("H5Literate_by_name2 by link name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ i = LINK_ITER_SOFT_LINKS_TEST_NUM_LINKS;
+
+ if (H5Literate_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_ITER_SOFT_LINKS_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_DEC, NULL, link_iter_soft_links_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate_by_name2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Literate_by_name_link_name_decreasing);
+ }
+
+ if (i != 2 * LINK_ITER_SOFT_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_by_name_link_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Literate_by_name_link_name_decreasing);
+#endif
+ }
+ PART_END(H5Literate_by_name_link_name_decreasing);
+
+ PART_BEGIN(H5Literate_by_name_creation_increasing)
+ {
+ TESTING_2("H5Literate_by_name2 by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 2 * LINK_ITER_SOFT_LINKS_TEST_NUM_LINKS;
+
+ if (H5Literate_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_ITER_SOFT_LINKS_TEST_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_INC, NULL, link_iter_soft_links_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate_by_name2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Literate_by_name_creation_increasing);
+ }
+
+ if (i != 3 * LINK_ITER_SOFT_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_by_name_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_by_name_creation_increasing);
+
+ PART_BEGIN(H5Literate_by_name_creation_decreasing)
+ {
+ TESTING_2("H5Literate_by_name2 by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 3 * LINK_ITER_SOFT_LINKS_TEST_NUM_LINKS;
+
+ if (H5Literate_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_ITER_SOFT_LINKS_TEST_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_DEC, NULL, link_iter_soft_links_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate_by_name2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Literate_by_name_creation_decreasing);
+ }
+
+ if (i != 4 * LINK_ITER_SOFT_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_by_name_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_by_name_creation_decreasing);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(gcpl_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check the functionality of link
+ * iteration using H5Literate(_by_name)2 with
+ * only external links. Iteration is done in
+ * increasing and decreasing order of both link
+ * name and link creation order.
+ */
+static int
+test_link_iterate_external_links(void)
+{
+#ifndef NO_EXTERNAL_LINKS
+ size_t i;
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+ char ext_link_filename[H5_API_TEST_FILENAME_MAX_LENGTH];
+#endif
+
+ TESTING_MULTIPART("link iteration (only external links)");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_EXTERNAL_LINKS) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) || !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, link, or iterate aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+#ifndef NO_EXTERNAL_LINKS
+ TESTING_2("test setup");
+
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s", EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ goto error;
+ }
+
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create GCPL for link creation order tracking\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set link creation order tracking\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, LINK_ITER_EXT_LINKS_TEST_SUBGROUP_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n", LINK_ITER_EXT_LINKS_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < LINK_ITER_EXT_LINKS_TEST_NUM_LINKS; i++) {
+ char link_name[LINK_ITER_EXT_LINKS_TEST_BUF_SIZE];
+
+ /* Create the links with a reverse-ordering naming scheme to test creation order later */
+ HDsnprintf(link_name, LINK_ITER_EXT_LINKS_TEST_BUF_SIZE, LINK_ITER_EXT_LINKS_TEST_LINK_NAME "%d",
+ (int)(LINK_ITER_EXT_LINKS_TEST_NUM_LINKS - i - 1));
+
+ if (H5Lcreate_external(ext_link_filename, "/", group_id, link_name, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", link_name);
+ goto error;
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(group_id, link_name, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", link_name);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", link_name);
+ goto error;
+ }
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ /*
+ * NOTE: A counter is passed to the iteration callback to try to match up the
+ * expected links with a given step throughout all of the following
+ * iterations. This is to try and check that the links are indeed being
+ * returned in the correct order.
+ */
+
+ PART_BEGIN(H5Literate_link_name_increasing)
+ {
+ TESTING_2("H5Literate2 by link name in increasing order");
+
+ i = 0;
+
+ /* Test basic link iteration capability using both index types and both index orders */
+ if (H5Literate2(group_id, H5_INDEX_NAME, H5_ITER_INC, NULL, link_iter_external_links_cb, &i) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Literate_link_name_increasing);
+ }
+
+ if (i != LINK_ITER_EXT_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_link_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_link_name_increasing);
+
+ PART_BEGIN(H5Literate_link_name_decreasing)
+ {
+ TESTING_2("H5Literate2 by link name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ i = LINK_ITER_EXT_LINKS_TEST_NUM_LINKS;
+
+ if (H5Literate2(group_id, H5_INDEX_NAME, H5_ITER_DEC, NULL, link_iter_external_links_cb, &i) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Literate_link_name_decreasing);
+ }
+
+ if (i != 2 * LINK_ITER_EXT_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_link_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Literate_link_name_decreasing);
+#endif
+ }
+ PART_END(H5Literate_link_name_decreasing);
+
+ PART_BEGIN(H5Literate_link_creation_increasing)
+ {
+ TESTING_2("H5Literate2 by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 2 * LINK_ITER_EXT_LINKS_TEST_NUM_LINKS;
+
+ if (H5Literate2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_INC, NULL, link_iter_external_links_cb,
+ &i) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Literate_link_creation_increasing);
+ }
+
+ if (i != 3 * LINK_ITER_EXT_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_link_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_link_creation_increasing);
+
+ PART_BEGIN(H5Literate_link_creation_decreasing)
+ {
+ TESTING_2("H5Literate2 by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 3 * LINK_ITER_EXT_LINKS_TEST_NUM_LINKS;
+
+ if (H5Literate2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_DEC, NULL, link_iter_external_links_cb,
+ &i) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Literate_link_creation_decreasing);
+ }
+
+ if (i != 4 * LINK_ITER_EXT_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_link_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_link_creation_decreasing);
+
+ PART_BEGIN(H5Literate_by_name_link_name_increasing)
+ {
+ TESTING_2("H5Literate_by_name2 by link name in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 0;
+
+ if (H5Literate_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_ITER_EXT_LINKS_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_INC, NULL, link_iter_external_links_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate_by_name2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Literate_by_name_link_name_increasing);
+ }
+
+ if (i != LINK_ITER_EXT_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_by_name_link_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_by_name_link_name_increasing);
+
+ PART_BEGIN(H5Literate_by_name_link_name_decreasing)
+ {
+ TESTING_2("H5Literate_by_name2 by link name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ i = LINK_ITER_EXT_LINKS_TEST_NUM_LINKS;
+
+ if (H5Literate_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_ITER_EXT_LINKS_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_DEC, NULL, link_iter_external_links_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate_by_name2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Literate_by_name_link_name_decreasing);
+ }
+
+ if (i != 2 * LINK_ITER_EXT_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_by_name_link_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Literate_by_name_link_name_decreasing);
+#endif
+ }
+ PART_END(H5Literate_by_name_link_name_decreasing);
+
+ PART_BEGIN(H5Literate_by_name_creation_increasing)
+ {
+ TESTING_2("H5Literate_by_name2 by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 2 * LINK_ITER_EXT_LINKS_TEST_NUM_LINKS;
+
+ if (H5Literate_by_name2(file_id,
+ "/" LINK_TEST_GROUP_NAME "/" LINK_ITER_EXT_LINKS_TEST_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_INC, NULL, link_iter_external_links_cb, &i,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate_by_name2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Literate_by_name_creation_increasing);
+ }
+
+ if (i != 3 * LINK_ITER_EXT_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_by_name_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_by_name_creation_increasing);
+
+ PART_BEGIN(H5Literate_by_name_creation_decreasing)
+ {
+ TESTING_2("H5Literate_by_name2 by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 3 * LINK_ITER_EXT_LINKS_TEST_NUM_LINKS;
+
+ if (H5Literate_by_name2(file_id,
+ "/" LINK_TEST_GROUP_NAME "/" LINK_ITER_EXT_LINKS_TEST_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_DEC, NULL, link_iter_external_links_cb, &i,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate_by_name2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Literate_by_name_creation_decreasing);
+ }
+
+ if (i != 4 * LINK_ITER_EXT_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_by_name_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_by_name_creation_decreasing);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(gcpl_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+#else
+ SKIPPED();
+ return 0;
+#endif
+}
+
+/*
+ * A test to check the functionality of link
+ * iteration using H5Literate(_by_name)2 with
+ * only user-defined links. Iteration is done
+ * in increasing and decreasing order of both
+ * link name and link creation order.
+ *
+ * TODO refactor test so that creation order tests
+ * actually test the order that objects were created in.
+ */
+static int
+test_link_iterate_ud_links(void)
+{
+ TESTING("link iteration (only user-defined links)");
+
+ SKIPPED();
+
+ return 1;
+}
+
+/*
+ * A test to check the functionality of link
+ * iteration using H5Literate(_by_name)2 with
+ * mixed link types. Iteration is done in
+ * increasing and decreasing order of both link
+ * name and link creation order.
+ *
+ * TODO refactor test so that creation order tests
+ * actually test the order that objects were created in.
+ *
+ * TODO add UD links
+ *
+ * TODO refactor link saving portion into its own test
+ */
+static int
+test_link_iterate_mixed_links(void)
+{
+#if !defined(NO_EXTERNAL_LINKS) && !defined(NO_USER_DEFINED_LINKS)
+ hsize_t saved_idx;
+ size_t i;
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t dset_dspace = H5I_INVALID_HID;
+ int halted;
+ char ext_link_filename[H5_API_TEST_FILENAME_MAX_LENGTH];
+#endif
+
+ TESTING_MULTIPART("link iteration (mixed link types)");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_EXTERNAL_LINKS) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, link, soft or external link, iterate, or creation "
+ "order aren't supported with this connector\n");
+ return 0;
+ }
+
+#if !defined(NO_EXTERNAL_LINKS) && !defined(NO_USER_DEFINED_LINKS)
+ TESTING_2("test setup");
+
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s", EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ goto error;
+ }
+
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create GCPL for link creation order tracking\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set link creation order tracking\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, LINK_ITER_MIXED_LINKS_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n", LINK_ITER_MIXED_LINKS_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_dspace =
+ generate_random_dataspace(LINK_ITER_MIXED_LINKS_TEST_DSET_SPACE_RANK, NULL, NULL, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, LINK_ITER_MIXED_LINKS_TEST_HARD_LINK_NAME, dset_dtype, dset_dspace,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", LINK_ITER_MIXED_LINKS_TEST_HARD_LINK_NAME);
+ goto error;
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" LINK_ITER_MIXED_LINKS_TEST_SUBGROUP_NAME
+ "/" LINK_ITER_MIXED_LINKS_TEST_HARD_LINK_NAME,
+ group_id, LINK_ITER_MIXED_LINKS_TEST_SOFT_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", LINK_ITER_MIXED_LINKS_TEST_SOFT_LINK_NAME);
+ goto error;
+ }
+
+ if (H5Lcreate_external(ext_link_filename, "/", group_id, LINK_ITER_MIXED_LINKS_TEST_EXT_LINK_NAME,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", LINK_ITER_MIXED_LINKS_TEST_EXT_LINK_NAME);
+ goto error;
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(group_id, LINK_ITER_MIXED_LINKS_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", LINK_ITER_MIXED_LINKS_TEST_HARD_LINK_NAME);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" first link did not exist\n");
+ goto error;
+ }
+
+ if ((link_exists = H5Lexists(group_id, LINK_ITER_MIXED_LINKS_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", LINK_ITER_MIXED_LINKS_TEST_SOFT_LINK_NAME);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" second link did not exist\n");
+ goto error;
+ }
+
+ if ((link_exists = H5Lexists(group_id, LINK_ITER_MIXED_LINKS_TEST_EXT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", LINK_ITER_MIXED_LINKS_TEST_EXT_LINK_NAME);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" third link did not exist\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ /*
+ * NOTE: A counter is passed to the iteration callback to try to match up the
+ * expected links with a given step throughout all of the following
+ * iterations. This is to try and check that the links are indeed being
+ * returned in the correct order.
+ */
+
+ PART_BEGIN(H5Literate_link_name_increasing)
+ {
+ TESTING_2("H5Literate2 by link name in increasing order");
+
+ i = 0;
+
+ /* Test basic link iteration capability using both index types and both index orders */
+ if (H5Literate2(group_id, H5_INDEX_NAME, H5_ITER_INC, NULL, link_iter_mixed_links_cb, &i) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Literate_link_name_increasing);
+ }
+
+ if (i != LINK_ITER_MIXED_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_link_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_link_name_increasing);
+
+ PART_BEGIN(H5Literate_link_name_decreasing)
+ {
+ TESTING_2("H5Literate2 by link name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ i = LINK_ITER_MIXED_LINKS_TEST_NUM_LINKS;
+
+ if (H5Literate2(group_id, H5_INDEX_NAME, H5_ITER_DEC, NULL, link_iter_mixed_links_cb, &i) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Literate_link_name_decreasing);
+ }
+
+ if (i != 2 * LINK_ITER_MIXED_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_link_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Literate_link_name_decreasing);
+#endif
+ }
+ PART_END(H5Literate_link_name_decreasing);
+
+ PART_BEGIN(H5Literate_link_creation_increasing)
+ {
+ TESTING_2("H5Literate2 by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 2 * LINK_ITER_MIXED_LINKS_TEST_NUM_LINKS;
+
+ if (H5Literate2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_INC, NULL, link_iter_mixed_links_cb, &i) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Literate_link_creation_increasing);
+ }
+
+ if (i != 3 * LINK_ITER_MIXED_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_link_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_link_creation_increasing);
+
+ PART_BEGIN(H5Literate_link_creation_decreasing)
+ {
+ TESTING_2("H5Literate2 by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 3 * LINK_ITER_MIXED_LINKS_TEST_NUM_LINKS;
+
+ if (H5Literate2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_DEC, NULL, link_iter_mixed_links_cb, &i) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Literate_link_creation_decreasing);
+ }
+
+ if (i != 4 * LINK_ITER_MIXED_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_link_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_link_creation_decreasing);
+
+ PART_BEGIN(H5Literate_by_name_link_name_increasing)
+ {
+ TESTING_2("H5Literate_by_name2 by link name in increasing order");
+
+ i = 0;
+
+ if (H5Literate_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_ITER_MIXED_LINKS_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_INC, NULL, link_iter_mixed_links_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate_by_name2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Literate_by_name_link_name_increasing);
+ }
+
+ if (i != LINK_ITER_MIXED_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_by_name_link_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_by_name_link_name_increasing);
+
+ PART_BEGIN(H5Literate_by_name_link_name_decreasing)
+ {
+ TESTING_2("H5Literate_by_name2 by link name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ i = LINK_ITER_MIXED_LINKS_TEST_NUM_LINKS;
+
+ if (H5Literate_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_ITER_MIXED_LINKS_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_DEC, NULL, link_iter_mixed_links_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate_by_name2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Literate_by_name_link_name_decreasing);
+ }
+
+ if (i != 2 * LINK_ITER_MIXED_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_by_name_link_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Literate_by_name_link_name_decreasing);
+#endif
+ }
+ PART_END(H5Literate_by_name_link_name_decreasing);
+
+ PART_BEGIN(H5Literate_by_name_creation_increasing)
+ {
+ TESTING_2("H5Literate_by_name2 by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 2 * LINK_ITER_MIXED_LINKS_TEST_NUM_LINKS;
+
+ if (H5Literate_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_ITER_MIXED_LINKS_TEST_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_INC, NULL, link_iter_mixed_links_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate_by_name2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Literate_by_name_creation_increasing);
+ }
+
+ if (i != 3 * LINK_ITER_MIXED_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_by_name_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_by_name_creation_increasing);
+
+ PART_BEGIN(H5Literate_by_name_creation_decreasing)
+ {
+ TESTING_2("H5Literate_by_name2 by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 3 * LINK_ITER_MIXED_LINKS_TEST_NUM_LINKS;
+
+ if (H5Literate_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_ITER_MIXED_LINKS_TEST_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_DEC, NULL, link_iter_mixed_links_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate_by_name2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Literate_by_name_creation_decreasing);
+ }
+
+ if (i != 4 * LINK_ITER_MIXED_LINKS_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not iterated over!\n");
+ PART_ERROR(H5Literate_by_name_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_by_name_creation_decreasing);
+
+ PART_BEGIN(H5Literate_index_saving_increasing)
+ {
+ TESTING_2("H5Literate2 index-saving capabilities in increasing order");
+
+ /* Test the H5Literate2 index-saving capabilities */
+ saved_idx = 0;
+ halted = 0;
+
+ if (H5Literate2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_INC, &saved_idx, link_iter_idx_saving_cb,
+ &halted) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate2 index-saving capability test failed\n");
+ PART_ERROR(H5Literate_index_saving_increasing);
+ }
+
+ if (saved_idx != 2) {
+ H5_FAILED();
+ HDprintf(" saved index after iteration was wrong\n");
+ PART_ERROR(H5Literate_index_saving_increasing);
+ }
+
+ if (H5Literate2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_INC, &saved_idx, link_iter_idx_saving_cb,
+ &halted) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't finish iterating when beginning from saved index\n");
+ PART_ERROR(H5Literate_index_saving_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_index_saving_increasing);
+
+ PART_BEGIN(H5Literate_index_saving_decreasing)
+ {
+ TESTING_2("H5Literate2 index-saving capabilities in decreasing order");
+
+ saved_idx = LINK_ITER_MIXED_LINKS_TEST_NUM_LINKS - 1;
+ halted = 0;
+
+ if (H5Literate2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_DEC, &saved_idx, link_iter_idx_saving_cb,
+ &halted) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate2 index-saving capability test failed\n");
+ PART_ERROR(H5Literate_index_saving_decreasing);
+ }
+
+ if (saved_idx != 2) {
+ H5_FAILED();
+ HDprintf(" saved index after iteration was wrong\n");
+ PART_ERROR(H5Literate_index_saving_decreasing);
+ }
+
+ if (H5Literate2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_DEC, &saved_idx, link_iter_idx_saving_cb,
+ &halted) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't finish iterating when beginning from saved index\n");
+ PART_ERROR(H5Literate_index_saving_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_index_saving_decreasing);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(dset_dspace) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(dset_dspace);
+ H5Tclose(dset_dtype);
+ H5Dclose(dset_id);
+ H5Pclose(gcpl_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+#else
+ SKIPPED();
+ return 0;
+#endif
+}
+
+/*
+ * A test to check that H5Literate(_by_name)2 fails
+ * when given invalid parameters.
+ */
+static int
+test_link_iterate_invalid_params(void)
+{
+ herr_t err_ret = -1;
+ size_t i;
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t dset_dspace = H5I_INVALID_HID;
+ char ext_link_filename[H5_API_TEST_FILENAME_MAX_LENGTH];
+
+ TESTING_MULTIPART("link iteration with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_EXTERNAL_LINKS) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, link, soft or external link, iterate, or "
+ "creation order aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s", EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ goto error;
+ }
+
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create GCPL for link creation order tracking\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set link creation order tracking\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, LINK_ITER_INVALID_PARAMS_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ LINK_ITER_INVALID_PARAMS_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_dspace =
+ generate_random_dataspace(LINK_ITER_INVALID_PARAMS_TEST_DSET_SPACE_RANK, NULL, NULL, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, LINK_ITER_INVALID_PARAMS_TEST_HARD_LINK_NAME, dset_dtype, dset_dspace,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", LINK_ITER_INVALID_PARAMS_TEST_HARD_LINK_NAME);
+ goto error;
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" LINK_ITER_INVALID_PARAMS_TEST_SUBGROUP_NAME
+ "/" LINK_ITER_INVALID_PARAMS_TEST_HARD_LINK_NAME,
+ group_id, LINK_ITER_INVALID_PARAMS_TEST_SOFT_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", LINK_ITER_INVALID_PARAMS_TEST_SOFT_LINK_NAME);
+ goto error;
+ }
+#ifndef NO_EXTERNAL_LINKS
+ if (H5Lcreate_external(ext_link_filename, "/", group_id, LINK_ITER_INVALID_PARAMS_TEST_EXT_LINK_NAME,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", LINK_ITER_INVALID_PARAMS_TEST_EXT_LINK_NAME);
+ goto error;
+ }
+#endif
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(group_id, LINK_ITER_INVALID_PARAMS_TEST_HARD_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n",
+ LINK_ITER_INVALID_PARAMS_TEST_HARD_LINK_NAME);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" first link did not exist\n");
+ goto error;
+ }
+
+ if ((link_exists = H5Lexists(group_id, LINK_ITER_INVALID_PARAMS_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n",
+ LINK_ITER_INVALID_PARAMS_TEST_SOFT_LINK_NAME);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" second link did not exist\n");
+ goto error;
+ }
+#ifndef NO_EXTERNAL_LINKS
+ if ((link_exists = H5Lexists(group_id, LINK_ITER_INVALID_PARAMS_TEST_EXT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", LINK_ITER_INVALID_PARAMS_TEST_EXT_LINK_NAME);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" third link did not exist\n");
+ goto error;
+ }
+#endif
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ i = 0;
+
+ PART_BEGIN(H5Literate_invalid_grp_id)
+ {
+ TESTING_2("H5Literate2 with an invalid group ID");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Literate2(H5I_INVALID_HID, H5_INDEX_NAME, H5_ITER_INC, NULL,
+ link_iter_invalid_params_cb, NULL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate2 succeeded with an invalid group ID!\n");
+ PART_ERROR(H5Literate_invalid_grp_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_invalid_grp_id);
+
+ PART_BEGIN(H5Literate_invalid_index_type)
+ {
+ TESTING_2("H5Literate2 with an invalid index type");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Literate2(group_id, H5_INDEX_UNKNOWN, H5_ITER_INC, NULL,
+ link_iter_invalid_params_cb, NULL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate2 succeeded with invalid index type H5_INDEX_UNKNOWN!\n");
+ PART_ERROR(H5Literate_invalid_index_type);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Literate2(group_id, H5_INDEX_N, H5_ITER_INC, NULL, link_iter_invalid_params_cb, NULL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate2 succeeded with invalid index type H5_INDEX_N!\n");
+ PART_ERROR(H5Literate_invalid_index_type);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_invalid_index_type);
+
+ PART_BEGIN(H5Literate_invalid_iter_order)
+ {
+ TESTING_2("H5Literate2 with an invalid iteration ordering");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Literate2(group_id, H5_INDEX_NAME, H5_ITER_UNKNOWN, NULL,
+ link_iter_invalid_params_cb, NULL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate2 succeeded with invalid iteration ordering H5_ITER_UNKNOWN!\n");
+ PART_ERROR(H5Literate_invalid_iter_order);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Literate2(group_id, H5_INDEX_NAME, H5_ITER_N, NULL, link_iter_invalid_params_cb, NULL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate2 succeeded with invalid iteration ordering H5_ITER_N!\n");
+ PART_ERROR(H5Literate_invalid_iter_order);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_invalid_iter_order);
+
+ PART_BEGIN(H5Literate_by_name_invalid_loc_id)
+ {
+ TESTING_2("H5Literate_by_name2 with an invalid location ID");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Literate_by_name2(
+ H5I_INVALID_HID, "/" LINK_TEST_GROUP_NAME "/" LINK_ITER_INVALID_PARAMS_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_INC, NULL, link_iter_invalid_params_cb, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate_by_name2 succeeded with an invalid location ID!\n");
+ PART_ERROR(H5Literate_by_name_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_by_name_invalid_loc_id);
+
+ PART_BEGIN(H5Literate_by_name_invalid_grp_name)
+ {
+ TESTING_2("H5Literate_by_name2 with an invalid group name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Literate_by_name2(file_id, NULL, H5_INDEX_NAME, H5_ITER_INC, NULL,
+ link_iter_invalid_params_cb, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate_by_name2 succeeded with a NULL group name!\n");
+ PART_ERROR(H5Literate_by_name_invalid_grp_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Literate_by_name2(file_id, "", H5_INDEX_NAME, H5_ITER_INC, NULL,
+ link_iter_invalid_params_cb, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate_by_name2 succeeded with an invalid group name of ''!\n");
+ PART_ERROR(H5Literate_by_name_invalid_grp_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_by_name_invalid_grp_name);
+
+ PART_BEGIN(H5Literate_by_name_invalid_index_type)
+ {
+ TESTING_2("H5Literate_by_name2 with an invalid index type");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Literate_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_ITER_INVALID_PARAMS_TEST_SUBGROUP_NAME,
+ H5_INDEX_UNKNOWN, H5_ITER_INC, NULL, link_iter_invalid_params_cb, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate_by_name2 succeeded with invalid index type H5_INDEX_UNKNOWN!\n");
+ PART_ERROR(H5Literate_by_name_invalid_index_type);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Literate_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_ITER_INVALID_PARAMS_TEST_SUBGROUP_NAME,
+ H5_INDEX_N, H5_ITER_INC, NULL, link_iter_invalid_params_cb, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate_by_name2 succeeded with invalid index type H5_INDEX_N!\n");
+ PART_ERROR(H5Literate_by_name_invalid_index_type);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_by_name_invalid_index_type);
+
+ PART_BEGIN(H5Literate_by_name_invalid_iter_order)
+ {
+ TESTING_2("H5Literate_by_name2 with an invalid iteration ordering");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Literate_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_ITER_INVALID_PARAMS_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_UNKNOWN, NULL, link_iter_invalid_params_cb, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(
+ " H5Literate_by_name2 succeeded with invalid iteration ordering H5_ITER_UNKNOWN!\n");
+ PART_ERROR(H5Literate_by_name_invalid_iter_order);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Literate_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_ITER_INVALID_PARAMS_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_N, NULL, link_iter_invalid_params_cb, &i, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate_by_name2 succeeded with invalid iteration ordering H5_ITER_N!\n");
+ PART_ERROR(H5Literate_by_name_invalid_iter_order);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_by_name_invalid_iter_order);
+
+ PART_BEGIN(H5Literate_by_name_invalid_lapl)
+ {
+ TESTING_2("H5Literate_by_name2 with an invalid LAPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Literate_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_ITER_INVALID_PARAMS_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_INC, NULL, link_iter_invalid_params_cb, NULL, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate_by_name2 succeeded with an invalid LAPL!\n");
+ PART_ERROR(H5Literate_by_name_invalid_lapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_by_name_invalid_lapl);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(dset_dspace) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(dset_dspace);
+ H5Tclose(dset_dtype);
+ H5Dclose(dset_id);
+ H5Pclose(gcpl_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that link iteration performed on a
+ * group with no links in it is not problematic.
+ */
+static int
+test_link_iterate_0_links(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("link iteration on group with 0 links");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) || !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, link iterate, or creation order aren't supported "
+ "with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create GCPL for link creation order tracking\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set link creation order tracking\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, LINK_ITER_0_LINKS_TEST_SUBGROUP_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n", LINK_ITER_0_LINKS_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Literate_0_links_name_increasing)
+ {
+ TESTING_2("H5Literate2 by link name in increasing order");
+
+ /* Test basic link iteration capability using both index types and both index orders */
+ if (H5Literate2(group_id, H5_INDEX_NAME, H5_ITER_INC, NULL, link_iter_0_links_cb, NULL) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Literate_0_links_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_0_links_name_increasing);
+
+ PART_BEGIN(H5Literate_0_links_name_decreasing)
+ {
+ TESTING_2("H5Literate2 by link name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ if (H5Literate2(group_id, H5_INDEX_NAME, H5_ITER_DEC, NULL, link_iter_0_links_cb, NULL) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Literate_0_links_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Literate_0_links_name_decreasing);
+#endif
+ }
+ PART_END(H5Literate_0_links_name_decreasing);
+
+ PART_BEGIN(H5Literate_0_links_creation_increasing)
+ {
+ TESTING_2("H5Literate2 by creation order in increasing order");
+
+ if (H5Literate2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_INC, NULL, link_iter_0_links_cb, NULL) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Literate_0_links_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_0_links_creation_increasing);
+
+ PART_BEGIN(H5Literate_0_links_creation_decreasing)
+ {
+ TESTING_2("H5Literate2 by creation order in decreasing order");
+
+ if (H5Literate2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_DEC, NULL, link_iter_0_links_cb, NULL) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Literate_0_links_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_0_links_creation_decreasing);
+
+ PART_BEGIN(H5Literate_by_name_0_links_name_increasing)
+ {
+ TESTING_2("H5Literate_by_name2 by link name in increasing order");
+
+ if (H5Literate_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_ITER_0_LINKS_TEST_SUBGROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_INC, NULL, link_iter_0_links_cb, NULL, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate_by_name2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Literate_by_name_0_links_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_by_name_0_links_name_increasing);
+
+ PART_BEGIN(H5Literate_by_name_0_links_name_decreasing)
+ {
+ TESTING_2("H5Literate_by_name2 by link name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ if (H5Literate_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_ITER_0_LINKS_TEST_SUBGROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_DEC, NULL, link_iter_0_links_cb, NULL, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate_by_name2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Literate_by_name_0_links_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Literate_by_name_0_links_name_decreasing);
+#endif
+ }
+ PART_END(H5Literate_by_name_0_links_name_decreasing);
+
+ PART_BEGIN(H5Literate_by_name_0_links_creation_increasing)
+ {
+ TESTING_2("H5Literate_by_name2 by creation order in increasing order");
+
+ if (H5Literate_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_ITER_0_LINKS_TEST_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_INC, NULL, link_iter_0_links_cb, NULL, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate_by_name2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Literate_by_name_0_links_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_by_name_0_links_creation_increasing);
+
+ PART_BEGIN(H5Literate_by_name_0_links_creation_decreasing)
+ {
+ TESTING_2("H5Literate_by_name2 by creation order in decreasing order");
+
+ if (H5Literate_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_ITER_0_LINKS_TEST_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_DEC, NULL, link_iter_0_links_cb, NULL, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Literate_by_name2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Literate_by_name_0_links_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Literate_by_name_0_links_creation_decreasing);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(gcpl_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check the functionality of recursive
+ * link iteration using H5Lvisit(_by_name)2 with
+ * only hard links and where there are no cyclic
+ * links. Iteration is done in increasing and
+ * decreasing order of both link name and link
+ * creation order.
+ */
+static int
+test_link_visit_hard_links_no_cycles(void)
+{
+ size_t i;
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t subgroup_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t dset_dspace = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("link visiting without cycles (only hard links)");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, link iterate, or creation order aren't supported "
+ "with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create a GCPL\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't enable link creation order tracking and indexing on GCPL\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME,
+ H5P_DEFAULT, gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_dspace = generate_random_dataspace(LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_DSET_SPACE_RANK, NULL,
+ NULL, FALSE)) < 0)
+ TEST_ERROR;
+
+ for (i = 0; i < LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_SUBGROUPS; i++) {
+ size_t j;
+ char grp_name[LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_BUF_SIZE];
+
+ /* Create the groups with a reverse-ordering naming scheme to test creation order later */
+ HDsnprintf(grp_name, LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NESTED_GRP_NAME "%d",
+ (int)(LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_SUBGROUPS - i - 1));
+
+ if ((subgroup_id = H5Gcreate2(group_id, grp_name, H5P_DEFAULT, gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", grp_name);
+ goto error;
+ }
+
+ for (j = 0; j < LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_GROUP; j++) {
+ char dset_name[LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_BUF_SIZE];
+
+ /* Create the datasets with a reverse-ordering naming scheme to test creation order later */
+ HDsnprintf(dset_name, LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_LINK_NAME "%d",
+ (int)(LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_GROUP - j - 1));
+
+ if ((dset_id = H5Dcreate2(subgroup_id, dset_name, dset_dtype, dset_dspace, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", dset_name);
+ goto error;
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(subgroup_id, dset_name, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", dset_name);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", dset_name);
+ goto error;
+ }
+
+ if (H5Dclose(dset_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close dataset '%s'\n", dset_name);
+ goto error;
+ }
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close subgroup '%s'\n", grp_name);
+ goto error;
+ }
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ /*
+ * NOTE: A counter is passed to the iteration callback to try to match up the
+ * expected links with a given step throughout all of the following
+ * iterations. This is to try and check that the links are indeed being
+ * returned in the correct order.
+ */
+
+ PART_BEGIN(H5Lvisit_no_cycles_link_name_increasing)
+ {
+ TESTING_2("H5Lvisit2 by link name in increasing order");
+
+ i = 0;
+
+ if (H5Lvisit2(group_id, H5_INDEX_NAME, H5_ITER_INC, link_visit_hard_links_no_cycles_cb, &i) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_name_increasing);
+ }
+
+ if (i != LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_no_cycles_link_name_increasing);
+
+ PART_BEGIN(H5Lvisit_no_cycles_link_name_decreasing)
+ {
+ TESTING_2("H5Lvisit2 by link name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ i = LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit2(group_id, H5_INDEX_NAME, H5_ITER_DEC, link_visit_hard_links_no_cycles_cb, &i) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_name_decreasing);
+ }
+
+ if (i != 2 * LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lvisit_no_cycles_link_name_decreasing);
+#endif
+ }
+ PART_END(H5Lvisit_no_cycles_link_name_decreasing);
+
+ PART_BEGIN(H5Lvisit_no_cycles_link_creation_increasing)
+ {
+ TESTING_2("H5Lvisit2 by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 2 * LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_INC, link_visit_hard_links_no_cycles_cb, &i) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_creation_increasing);
+ }
+
+ if (i != 3 * LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_no_cycles_link_creation_increasing);
+
+ PART_BEGIN(H5Lvisit_no_cycles_link_creation_decreasing)
+ {
+ TESTING_2("H5Lvisit2 by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 3 * LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_DEC, link_visit_hard_links_no_cycles_cb, &i) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_creation_decreasing);
+ }
+
+ if (i != 4 * LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_no_cycles_link_creation_decreasing);
+
+ PART_BEGIN(H5Lvisit_by_name_no_cycles_link_name_increasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by link name in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 0;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_INC, link_visit_hard_links_no_cycles_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_name_increasing);
+ }
+
+ if (i != LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_no_cycles_link_name_increasing);
+
+ PART_BEGIN(H5Lvisit_by_name_no_cycles_link_name_decreasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by link name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ i = LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_DEC, link_visit_hard_links_no_cycles_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_name_decreasing);
+ }
+
+ if (i != 2 * LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lvisit_by_name_no_cycles_link_name_decreasing);
+#endif
+ }
+ PART_END(H5Lvisit_by_name_no_cycles_link_name_decreasing);
+
+ PART_BEGIN(H5Lvisit_by_name_no_cycles_link_creation_increasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 2 * LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_INC, link_visit_hard_links_no_cycles_cb, &i,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_creation_increasing);
+ }
+
+ if (i != 3 * LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_no_cycles_link_creation_increasing);
+
+ PART_BEGIN(H5Lvisit_by_name_no_cycles_link_creation_decreasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 3 * LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_DEC, link_visit_hard_links_no_cycles_cb, &i,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_creation_decreasing);
+ }
+
+ if (i != 4 * LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_no_cycles_link_creation_decreasing);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(dset_dspace) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(dset_dspace);
+ H5Tclose(dset_dtype);
+ H5Dclose(dset_id);
+ H5Pclose(gcpl_id);
+ H5Gclose(subgroup_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check the functionality of recursive
+ * link iteration using H5Lvisit(_by_name)2 with
+ * only soft links and where there are no cyclic
+ * links. Iteration is done in increasing and
+ * decreasing order of both link name and link
+ * creation order.
+ */
+static int
+test_link_visit_soft_links_no_cycles(void)
+{
+ size_t i;
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t subgroup_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("link visiting without cycles (only soft links)");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) || !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, link, soft link, iterate, or creation order "
+ "aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create a GCPL\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't enable link creation order tracking and indexing on GCPL\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME,
+ H5P_DEFAULT, gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_SUBGROUPS; i++) {
+ size_t j;
+ char grp_name[LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_BUF_SIZE];
+
+ /* Create the groups with a reverse-ordering naming scheme to test creation order later */
+ HDsnprintf(grp_name, LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NESTED_GRP_NAME "%d",
+ (int)(LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_SUBGROUPS - i - 1));
+
+ if ((subgroup_id = H5Gcreate2(group_id, grp_name, H5P_DEFAULT, gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", grp_name);
+ goto error;
+ }
+
+ for (j = 0; j < LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_GROUP; j++) {
+ char link_name[LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_BUF_SIZE];
+ char link_target[LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_BUF_SIZE];
+
+ /* Create the links with a reverse-ordering naming scheme to test creation order later */
+ HDsnprintf(link_name, LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_LINK_NAME "%d",
+ (int)(LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_GROUP - j - 1));
+
+ HDsnprintf(link_target, LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_BUF_SIZE, "target%d",
+ (int)(LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_GROUP - j - 1));
+
+ if (H5Lcreate_soft(link_target, subgroup_id, link_name, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", link_name);
+ goto error;
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(subgroup_id, link_name, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", link_name);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", link_name);
+ goto error;
+ }
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close subgroup '%s'\n", grp_name);
+ goto error;
+ }
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ /*
+ * NOTE: A counter is passed to the iteration callback to try to match up the
+ * expected links with a given step throughout all of the following
+ * iterations. This is to try and check that the links are indeed being
+ * returned in the correct order.
+ */
+
+ PART_BEGIN(H5Lvisit_no_cycles_link_name_increasing)
+ {
+ TESTING_2("H5Lvisit2 by link name in increasing order");
+
+ i = 0;
+
+ if (H5Lvisit2(group_id, H5_INDEX_NAME, H5_ITER_INC, link_visit_soft_links_no_cycles_cb, &i) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_name_increasing);
+ }
+
+ if (i != LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_no_cycles_link_name_increasing);
+
+ PART_BEGIN(H5Lvisit_no_cycles_link_name_decreasing)
+ {
+ TESTING_2("H5Lvisit2 by link name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ i = LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit2(group_id, H5_INDEX_NAME, H5_ITER_DEC, link_visit_soft_links_no_cycles_cb, &i) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_name_decreasing);
+ }
+
+ if (i != 2 * LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lvisit_no_cycles_link_name_decreasing);
+#endif
+ }
+ PART_END(H5Lvisit_no_cycles_link_name_decreasing);
+
+ PART_BEGIN(H5Lvisit_no_cycles_link_creation_increasing)
+ {
+ TESTING_2("H5Lvisit2 by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 2 * LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_INC, link_visit_soft_links_no_cycles_cb, &i) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_creation_increasing);
+ }
+
+ if (i != 3 * LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_no_cycles_link_creation_increasing);
+
+ PART_BEGIN(H5Lvisit_no_cycles_link_creation_decreasing)
+ {
+ TESTING_2("H5Lvisit2 by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 3 * LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_DEC, link_visit_soft_links_no_cycles_cb, &i) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_creation_decreasing);
+ }
+
+ if (i != 4 * LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_no_cycles_link_creation_decreasing);
+
+ PART_BEGIN(H5Lvisit_by_name_no_cycles_link_name_increasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by link name in increasing order");
+
+ i = 0;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_INC, link_visit_soft_links_no_cycles_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_name_increasing);
+ }
+
+ if (i != LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_no_cycles_link_name_increasing);
+
+ PART_BEGIN(H5Lvisit_by_name_no_cycles_link_name_decreasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by link name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ i = LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_DEC, link_visit_soft_links_no_cycles_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_name_decreasing);
+ }
+
+ if (i != 2 * LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lvisit_by_name_no_cycles_link_name_decreasing);
+#endif
+ }
+ PART_END(H5Lvisit_by_name_no_cycles_link_name_decreasing);
+
+ PART_BEGIN(H5Lvisit_by_name_no_cycles_link_creation_increasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 2 * LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_INC, link_visit_soft_links_no_cycles_cb, &i,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_creation_increasing);
+ }
+
+ if (i != 3 * LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_no_cycles_link_creation_increasing);
+
+ PART_BEGIN(H5Lvisit_by_name_no_cycles_link_creation_decreasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 3 * LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_DEC, link_visit_soft_links_no_cycles_cb, &i,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_creation_decreasing);
+ }
+
+ if (i != 4 * LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_no_cycles_link_creation_decreasing);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(gcpl_id);
+ H5Gclose(subgroup_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check the functionality of recursive
+ * link iteration using H5Lvisit(_by_name)2 with
+ * only external links and where there are no cyclic
+ * links. Iteration is done in increasing and
+ * decreasing order of both link name and link
+ * creation order.
+ */
+static int
+test_link_visit_external_links_no_cycles(void)
+{
+#ifndef NO_EXTERNAL_LINKS
+ size_t i;
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t subgroup_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+ char ext_link_filename[H5_API_TEST_FILENAME_MAX_LENGTH];
+#endif
+
+ TESTING_MULTIPART("link visiting without cycles (only external links)");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_EXTERNAL_LINKS) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) || !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, link, external link, iterate, or creation order "
+ "aren't supported with this connector\n");
+ return 0;
+ }
+
+#ifndef NO_EXTERNAL_LINKS
+ TESTING_2("test setup");
+
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s", EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ goto error;
+ }
+
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create a GCPL\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't enable link creation order tracking and indexing on GCPL\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_SUBGROUPS; i++) {
+ size_t j;
+ char grp_name[LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_BUF_SIZE];
+
+ /* Create the groups with a reverse-ordering naming scheme to test creation order later */
+ HDsnprintf(grp_name, LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NESTED_GRP_NAME "%d",
+ (int)(LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_SUBGROUPS - i - 1));
+
+ if ((subgroup_id = H5Gcreate2(group_id, grp_name, H5P_DEFAULT, gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", grp_name);
+ goto error;
+ }
+
+ for (j = 0; j < LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_GROUP; j++) {
+ char link_name[LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_BUF_SIZE];
+
+ /* Create the links with a reverse-ordering naming scheme to test creation order later */
+ HDsnprintf(link_name, LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_LINK_NAME "%d",
+ (int)(LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_GROUP - j - 1));
+
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup_id, link_name, H5P_DEFAULT, H5P_DEFAULT) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", link_name);
+ goto error;
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(subgroup_id, link_name, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", link_name);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", link_name);
+ goto error;
+ }
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close subgroup '%s'\n", grp_name);
+ goto error;
+ }
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ /*
+ * NOTE: A counter is passed to the iteration callback to try to match up the
+ * expected links with a given step throughout all of the following
+ * iterations. This is to try and check that the links are indeed being
+ * returned in the correct order.
+ */
+
+ PART_BEGIN(H5Lvisit_no_cycles_link_name_increasing)
+ {
+ TESTING_2("H5Lvisit2 by link name in increasing order");
+
+ i = 0;
+
+ if (H5Lvisit2(group_id, H5_INDEX_NAME, H5_ITER_INC, link_visit_external_links_no_cycles_cb, &i) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_name_increasing);
+ }
+
+ if (i != LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_no_cycles_link_name_increasing);
+
+ PART_BEGIN(H5Lvisit_no_cycles_link_name_decreasing)
+ {
+ TESTING_2("H5Lvisit2 by link name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ i = LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit2(group_id, H5_INDEX_NAME, H5_ITER_DEC, link_visit_external_links_no_cycles_cb, &i) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_name_decreasing);
+ }
+
+ if (i != 2 * LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lvisit_no_cycles_link_name_decreasing);
+#endif
+ }
+ PART_END(H5Lvisit_no_cycles_link_name_decreasing);
+
+ PART_BEGIN(H5Lvisit_no_cycles_link_creation_increasing)
+ {
+ TESTING_2("H5Lvisit2 by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 2 * LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_INC, link_visit_external_links_no_cycles_cb,
+ &i) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_creation_increasing);
+ }
+
+ if (i != 3 * LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_no_cycles_link_creation_increasing);
+
+ PART_BEGIN(H5Lvisit_no_cycles_link_creation_decreasing)
+ {
+ TESTING_2("H5Lvisit2 by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 3 * LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_DEC, link_visit_external_links_no_cycles_cb,
+ &i) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_creation_decreasing);
+ }
+
+ if (i != 4 * LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_no_cycles_link_creation_decreasing);
+
+ PART_BEGIN(H5Lvisit_by_name_no_cycles_link_name_increasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by link name in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 0;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_INC, link_visit_external_links_no_cycles_cb, &i,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_name_increasing);
+ }
+
+ if (i != LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_no_cycles_link_name_increasing);
+
+ PART_BEGIN(H5Lvisit_by_name_no_cycles_link_name_decreasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by link name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ i = LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_DEC, link_visit_external_links_no_cycles_cb, &i,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_name_decreasing);
+ }
+
+ if (i != 2 * LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lvisit_by_name_no_cycles_link_name_decreasing);
+#endif
+ }
+ PART_END(H5Lvisit_by_name_no_cycles_link_name_decreasing);
+
+ PART_BEGIN(H5Lvisit_by_name_no_cycles_link_creation_increasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 2 * LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_INC, link_visit_external_links_no_cycles_cb, &i,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_creation_increasing);
+ }
+
+ if (i != 3 * LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_no_cycles_link_creation_increasing);
+
+ PART_BEGIN(H5Lvisit_by_name_no_cycles_link_creation_decreasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 3 * LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_DEC, link_visit_external_links_no_cycles_cb, &i,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_creation_decreasing);
+ }
+
+ if (i != 4 * LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_no_cycles_link_creation_decreasing);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(gcpl_id);
+ H5Gclose(subgroup_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+#else
+ SKIPPED();
+ return 0;
+#endif
+}
+
+/*
+ * A test to check the functionality of recursive
+ * link iteration using H5Lvisit(_by_name)2 with
+ * only user-defined links and where there are no
+ * cyclic links. Iteration is done in increasing
+ * and decreasing order of both link name and link
+ * creation order.
+ *
+ * TODO refactor test so that creation order tests
+ * actually test the order that objects were created in.
+ */
+static int
+test_link_visit_ud_links_no_cycles(void)
+{
+ TESTING("link visiting without cycles (only user-defined links)");
+
+ SKIPPED();
+
+ return 1;
+}
+
+/*
+ * A test to check the functionality of recursive
+ * link iteration using H5Lvisit(_by_name)2 with
+ * mixed link types and where there are no cyclic
+ * links. Iteration is done in increasing and
+ * decreasing order of both link name and link
+ * creation order.
+ *
+ * TODO refactor test so that creation order tests
+ * actually test the order that objects were created in.
+ *
+ * TODO add UD links
+ *
+ * TODO refactor test to create a macroed number of subgroups
+ */
+static int
+test_link_visit_mixed_links_no_cycles(void)
+{
+#if !defined(NO_EXTERNAL_LINKS) && !defined(NO_USER_DEFINED_LINKS)
+ size_t i;
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t subgroup1 = H5I_INVALID_HID, subgroup2 = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ char ext_link_filename[H5_API_TEST_FILENAME_MAX_LENGTH];
+#endif
+
+ TESTING_MULTIPART("link visiting without cycles (mixed link types)");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_EXTERNAL_LINKS) || !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_HARD_LINKS) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, link, hard, soft, external link, iterate, or "
+ "creation order aren't supported with this connector\n");
+ return 0;
+ }
+
+#if !defined(NO_EXTERNAL_LINKS) && !defined(NO_USER_DEFINED_LINKS)
+ TESTING_2("test setup");
+
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s", EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ goto error;
+ }
+
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create a GCPL\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't enable link creation order tracking and indexing on GCPL\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME,
+ H5P_DEFAULT, gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((subgroup1 = H5Gcreate2(group_id, LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME2, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create first subgroup '%s'\n",
+ LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME2);
+ goto error;
+ }
+
+ if ((subgroup2 = H5Gcreate2(group_id, LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME3, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create second subgroup '%s'\n",
+ LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME3);
+ goto error;
+ }
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((fspace_id = generate_random_dataspace(LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_DSET_SPACE_RANK, NULL,
+ NULL, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(subgroup1, LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_DSET_NAME, dset_dtype,
+ fspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create first dataset '%s'\n", LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_DSET_NAME);
+ }
+
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(subgroup2, LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_DSET_NAME2, dset_dtype,
+ fspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create second dataset '%s'\n", LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_DSET_NAME);
+ }
+
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+
+ if (H5Lcreate_hard(subgroup1, LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_DSET_NAME, subgroup1,
+ LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME1, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create first hard link '%s'\n",
+ LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME1);
+ goto error;
+ }
+
+ if (H5Lcreate_soft(LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_DSET_NAME, subgroup1,
+ LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME2);
+ goto error;
+ }
+
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup2, LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME3,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME3);
+ goto error;
+ }
+
+ if (H5Lcreate_hard(subgroup2, LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_DSET_NAME2, subgroup2,
+ LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME4, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create second hard link '%s'\n",
+ LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME4);
+ goto error;
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup1, LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME1, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if first link '%s' exists\n",
+ LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME1);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link 1 did not exist\n");
+ goto error;
+ }
+
+ if ((link_exists = H5Lexists(subgroup1, LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME2, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if second link '%s' exists\n",
+ LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME2);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link 2 did not exist\n");
+ goto error;
+ }
+
+ if ((link_exists = H5Lexists(subgroup2, LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME3, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if third link '%s' exists\n",
+ LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME3);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link 3 did not exist\n");
+ goto error;
+ }
+
+ if ((link_exists = H5Lexists(subgroup2, LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME4, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if fourth link '%s' exists\n",
+ LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME4);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link 4 did not exist\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ /*
+ * NOTE: A counter is passed to the iteration callback to try to match up the
+ * expected links with a given step throughout all of the following
+ * iterations. This is to try and check that the links are indeed being
+ * returned in the correct order.
+ */
+
+ PART_BEGIN(H5Lvisit_no_cycles_link_name_increasing)
+ {
+ TESTING_2("H5Lvisit2 by link name in increasing order");
+
+ i = 0;
+
+ if (H5Lvisit2(group_id, H5_INDEX_NAME, H5_ITER_INC, link_visit_mixed_links_no_cycles_cb, &i) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_name_increasing);
+ }
+
+ if (i != LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_no_cycles_link_name_increasing);
+
+ PART_BEGIN(H5Lvisit_no_cycles_link_name_decreasing)
+ {
+ TESTING_2("H5Lvisit2 by link name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ i = LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_NUM_LINKS;
+
+ if (H5Lvisit2(group_id, H5_INDEX_NAME, H5_ITER_DEC, link_visit_mixed_links_no_cycles_cb, &i) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_name_decreasing);
+ }
+
+ if (i != 2 * LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lvisit_no_cycles_link_name_decreasing);
+#endif
+ }
+ PART_END(H5Lvisit_no_cycles_link_name_decreasing);
+
+ PART_BEGIN(H5Lvisit_no_cycles_link_creation_increasing)
+ {
+ TESTING_2("H5Lvisit2 by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 2 * LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_NUM_LINKS;
+
+ if (H5Lvisit2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_INC, link_visit_mixed_links_no_cycles_cb,
+ &i) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_creation_increasing);
+ }
+
+ if (i != 3 * LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_no_cycles_link_creation_increasing);
+
+ PART_BEGIN(H5Lvisit_no_cycles_link_creation_decreasing)
+ {
+ TESTING_2("H5Lvisit2 by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 3 * LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_NUM_LINKS;
+
+ if (H5Lvisit2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_DEC, link_visit_mixed_links_no_cycles_cb,
+ &i) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_creation_decreasing);
+ }
+
+ if (i != 4 * LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_no_cycles_link_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_no_cycles_link_creation_decreasing);
+
+ PART_BEGIN(H5Lvisit_by_name_no_cycles_link_name_increasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by link name in increasing order");
+
+ i = 0;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_INC, link_visit_mixed_links_no_cycles_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_name_increasing);
+ }
+
+ if (i != LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_no_cycles_link_name_increasing);
+
+ PART_BEGIN(H5Lvisit_by_name_no_cycles_link_name_decreasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by link name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ i = LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_NUM_LINKS;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_DEC, link_visit_mixed_links_no_cycles_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_name_decreasing);
+ }
+
+ if (i != 2 * LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lvisit_by_name_no_cycles_link_name_decreasing);
+#endif
+ }
+ PART_END(H5Lvisit_by_name_no_cycles_link_name_decreasing);
+
+ PART_BEGIN(H5Lvisit_by_name_no_cycles_link_creation_increasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 2 * LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_NUM_LINKS;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_INC, link_visit_mixed_links_no_cycles_cb, &i,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_creation_increasing);
+ }
+
+ if (i != 3 * LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_no_cycles_link_creation_increasing);
+
+ PART_BEGIN(H5Lvisit_by_name_no_cycles_link_creation_decreasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 3 * LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_NUM_LINKS;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_DEC, link_visit_mixed_links_no_cycles_cb, &i,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_creation_decreasing);
+ }
+
+ if (i != 4 * LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_no_cycles_link_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_no_cycles_link_creation_decreasing);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(subgroup1) < 0)
+ TEST_ERROR;
+ if (H5Gclose(subgroup2) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ H5Tclose(dset_dtype);
+ H5Dclose(dset_id);
+ H5Pclose(gcpl_id);
+ H5Gclose(subgroup1);
+ H5Gclose(subgroup2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+#else
+ SKIPPED();
+ return 0;
+#endif
+}
+
+/*
+ * A test to check the functionality of recursive
+ * link iteration using H5Lvisit(_by_name)2 with
+ * only hard links and where there are cyclic links.
+ * Iteration is done in increasing and decreasing
+ * order of both link name and link creation order.
+ */
+static int
+test_link_visit_hard_links_cycles(void)
+{
+ size_t i;
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t subgroup_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("link visiting with cycles (only hard links)");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_HARD_LINKS) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) || !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, link, hard link, iterate, or creation order "
+ "aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create a GCPL\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't enable link creation order tracking and indexing on GCPL\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, LINK_VISIT_HARD_LINKS_CYCLE_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ LINK_VISIT_HARD_LINKS_CYCLE_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_SUBGROUPS; i++) {
+ size_t j;
+ char grp_name[LINK_VISIT_HARD_LINKS_CYCLE_TEST_BUF_SIZE];
+
+ /* Create the groups with a reverse-ordering naming scheme to test creation order later */
+ HDsnprintf(grp_name, LINK_VISIT_HARD_LINKS_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_HARD_LINKS_CYCLE_TEST_NESTED_GRP_NAME "%d",
+ (int)(LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_SUBGROUPS - i - 1));
+
+ if ((subgroup_id = H5Gcreate2(group_id, grp_name, H5P_DEFAULT, gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", grp_name);
+ goto error;
+ }
+
+ for (j = 0; j < LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_LINKS_PER_GROUP; j++) {
+ char link_name[LINK_VISIT_HARD_LINKS_CYCLE_TEST_BUF_SIZE];
+
+ /* Create the links with a reverse-ordering naming scheme to test creation order later */
+ HDsnprintf(link_name, LINK_VISIT_HARD_LINKS_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_HARD_LINKS_CYCLE_TEST_LINK_NAME "%d",
+ (int)(LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_LINKS_PER_GROUP - j - 1));
+
+ if (H5Lcreate_hard(subgroup_id, ".", subgroup_id, link_name, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create hard link '%s'\n", link_name);
+ goto error;
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(subgroup_id, link_name, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", link_name);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", link_name);
+ goto error;
+ }
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close subgroup '%s'\n", grp_name);
+ goto error;
+ }
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ /*
+ * NOTE: A counter is passed to the iteration callback to try to match up the
+ * expected links with a given step throughout all of the following
+ * iterations. This is to try and check that the links are indeed being
+ * returned in the correct order.
+ */
+
+ PART_BEGIN(H5Lvisit_cycles_link_name_increasing)
+ {
+ TESTING_2("H5Lvisit2 by link name in increasing order");
+
+ i = 0;
+
+ if (H5Lvisit2(group_id, H5_INDEX_NAME, H5_ITER_INC, link_visit_hard_links_cycles_cb, &i) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Lvisit_cycles_link_name_increasing);
+ }
+
+ if (i != LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_cycles_link_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_cycles_link_name_increasing);
+
+ PART_BEGIN(H5Lvisit_cycles_link_name_decreasing)
+ {
+ TESTING_2("H5Lvisit2 by link name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ i = LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit2(group_id, H5_INDEX_NAME, H5_ITER_DEC, link_visit_hard_links_cycles_cb, &i) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_cycles_link_name_decreasing);
+ }
+
+ if (i != 2 * LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_cycles_link_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lvisit_cycles_link_name_decreasing);
+#endif
+ }
+ PART_END(H5Lvisit_cycles_link_name_decreasing);
+
+ PART_BEGIN(H5Lvisit_cycles_link_creation_increasing)
+ {
+ TESTING_2("H5Lvisit2 by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 2 * LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_INC, link_visit_hard_links_cycles_cb, &i) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Lvisit_cycles_link_creation_increasing);
+ }
+
+ if (i != 3 * LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_cycles_link_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_cycles_link_creation_increasing);
+
+ PART_BEGIN(H5Lvisit_cycles_link_creation_decreasing)
+ {
+ TESTING_2("H5Lvisit2 by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 3 * LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_DEC, link_visit_hard_links_cycles_cb, &i) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_cycles_link_creation_decreasing);
+ }
+
+ if (i != 4 * LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_cycles_link_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_cycles_link_creation_decreasing);
+
+ PART_BEGIN(H5Lvisit_by_name_cycles_link_name_increasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by link name in increasing order");
+
+ i = 0;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_HARD_LINKS_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_INC, link_visit_hard_links_cycles_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_name_increasing);
+ }
+
+ if (i != LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_cycles_link_name_increasing);
+
+ PART_BEGIN(H5Lvisit_by_name_cycles_link_name_decreasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by link name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ i = LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_HARD_LINKS_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_DEC, link_visit_hard_links_cycles_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_name_decreasing);
+ }
+
+ if (i != 2 * LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lvisit_by_name_cycles_link_name_decreasing);
+#endif
+ }
+ PART_END(H5Lvisit_by_name_cycles_link_name_decreasing);
+
+ PART_BEGIN(H5Lvisit_by_name_cycles_link_creation_increasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 2 * LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_HARD_LINKS_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_INC, link_visit_hard_links_cycles_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_creation_increasing);
+ }
+
+ if (i != 3 * LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_cycles_link_creation_increasing);
+
+ PART_BEGIN(H5Lvisit_by_name_cycles_link_creation_decreasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 3 * LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_HARD_LINKS_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_DEC, link_visit_hard_links_cycles_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_creation_decreasing);
+ }
+
+ if (i != 4 * LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_cycles_link_creation_decreasing);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(gcpl_id);
+ H5Gclose(subgroup_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check the functionality of recursive
+ * link iteration using H5Lvisit(_by_name)2 with
+ * only soft links and where there are cyclic links.
+ * Iteration is done in increasing and decreasing
+ * order of both link name and link creation order.
+ */
+static int
+test_link_visit_soft_links_cycles(void)
+{
+ size_t i;
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t subgroup_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("link visiting with cycles (only soft links)");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) || !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, link, soft link, iterate, or creation order "
+ "aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create a GCPL\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't enable link creation order tracking and indexing on GCPL\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, LINK_VISIT_SOFT_LINKS_CYCLE_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ LINK_VISIT_SOFT_LINKS_CYCLE_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_SUBGROUPS; i++) {
+ size_t j;
+ char grp_name[LINK_VISIT_SOFT_LINKS_CYCLE_TEST_BUF_SIZE];
+
+ /* Create the groups with a reverse-ordering naming scheme to test creation order later */
+ HDsnprintf(grp_name, LINK_VISIT_SOFT_LINKS_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NESTED_GRP_NAME "%d",
+ (int)(LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_SUBGROUPS - i - 1));
+
+ if ((subgroup_id = H5Gcreate2(group_id, grp_name, H5P_DEFAULT, gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", grp_name);
+ goto error;
+ }
+
+ for (j = 0; j < LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_LINKS_PER_GROUP; j++) {
+ char link_name[LINK_VISIT_SOFT_LINKS_CYCLE_TEST_BUF_SIZE];
+ char link_target[2 * LINK_VISIT_SOFT_LINKS_CYCLE_TEST_BUF_SIZE];
+
+ /* Create the links with a reverse-ordering naming scheme to test creation order later */
+ HDsnprintf(link_name, LINK_VISIT_SOFT_LINKS_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_SOFT_LINKS_CYCLE_TEST_LINK_NAME "%d",
+ (int)(LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_LINKS_PER_GROUP - j - 1));
+
+ HDsnprintf(link_target, 2 * LINK_VISIT_SOFT_LINKS_CYCLE_TEST_BUF_SIZE,
+ "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_SOFT_LINKS_CYCLE_TEST_SUBGROUP_NAME "/%s",
+ grp_name);
+
+ if (H5Lcreate_soft(link_target, subgroup_id, link_name, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", link_name);
+ goto error;
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(subgroup_id, link_name, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", link_name);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", link_name);
+ goto error;
+ }
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close subgroup '%s'\n", grp_name);
+ goto error;
+ }
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ /*
+ * NOTE: A counter is passed to the iteration callback to try to match up the
+ * expected links with a given step throughout all of the following
+ * iterations. This is to try and check that the links are indeed being
+ * returned in the correct order.
+ */
+
+ PART_BEGIN(H5Lvisit_cycles_link_name_increasing)
+ {
+ TESTING_2("H5Lvisit2 by link name in increasing order");
+
+ i = 0;
+
+ if (H5Lvisit2(group_id, H5_INDEX_NAME, H5_ITER_INC, link_visit_soft_links_cycles_cb, &i) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Lvisit_cycles_link_name_increasing);
+ }
+
+ if (i != LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_cycles_link_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_cycles_link_name_increasing);
+
+ PART_BEGIN(H5Lvisit_cycles_link_name_decreasing)
+ {
+ TESTING_2("H5Lvisit2 by link name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ i = LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit2(group_id, H5_INDEX_NAME, H5_ITER_DEC, link_visit_soft_links_cycles_cb, &i) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_cycles_link_name_decreasing);
+ }
+
+ if (i != 2 * LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_cycles_link_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lvisit_cycles_link_name_decreasing);
+#endif
+ }
+ PART_END(H5Lvisit_cycles_link_name_decreasing);
+
+ PART_BEGIN(H5Lvisit_cycles_link_creation_increasing)
+ {
+ TESTING_2("H5Lvisit2 by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 2 * LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_INC, link_visit_soft_links_cycles_cb, &i) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Lvisit_cycles_link_creation_increasing);
+ }
+
+ if (i != 3 * LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_cycles_link_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_cycles_link_creation_increasing);
+
+ PART_BEGIN(H5Lvisit_cycles_link_creation_decreasing)
+ {
+ TESTING_2("H5Lvisit2 by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 3 * LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_DEC, link_visit_soft_links_cycles_cb, &i) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_cycles_link_creation_decreasing);
+ }
+
+ if (i != 4 * LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_cycles_link_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_cycles_link_creation_decreasing);
+
+ PART_BEGIN(H5Lvisit_by_name_cycles_link_name_increasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by link name in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 0;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_SOFT_LINKS_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_INC, link_visit_soft_links_cycles_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_name_increasing);
+ }
+
+ if (i != LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_cycles_link_name_increasing);
+
+ PART_BEGIN(H5Lvisit_by_name_cycles_link_name_decreasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by link name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ i = LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_SOFT_LINKS_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_DEC, link_visit_soft_links_cycles_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_name_decreasing);
+ }
+
+ if (i != 2 * LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lvisit_by_name_cycles_link_name_decreasing);
+#endif
+ }
+ PART_END(H5Lvisit_by_name_cycles_link_name_decreasing);
+
+ PART_BEGIN(H5Lvisit_by_name_cycles_link_creation_increasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 2 * LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_SOFT_LINKS_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_INC, link_visit_soft_links_cycles_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_creation_increasing);
+ }
+
+ if (i != 3 * LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_cycles_link_creation_increasing);
+
+ PART_BEGIN(H5Lvisit_by_name_cycles_link_creation_decreasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 3 * LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_SOFT_LINKS_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_DEC, link_visit_soft_links_cycles_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_creation_decreasing);
+ }
+
+ if (i != 4 * LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_cycles_link_creation_decreasing);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(gcpl_id);
+ H5Gclose(subgroup_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check the functionality of recursive
+ * link iteration using H5Lvisit(_by_name)2 with
+ * only external links and where there are cyclic
+ * links. Iteration is done in increasing and
+ * decreasing order of both link name and link
+ * creation order.
+ */
+static int
+test_link_visit_external_links_cycles(void)
+{
+#ifndef NO_EXTERNAL_LINKS
+ size_t i;
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t subgroup_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+#endif
+
+ TESTING_MULTIPART("link visiting with cycles (only external links)");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_EXTERNAL_LINKS) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) || !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, link, external link, iterate, or creation order "
+ "aren't supported with this connector\n");
+ return 0;
+ }
+
+#ifndef NO_EXTERNAL_LINKS
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create a GCPL\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't enable link creation order tracking and indexing on GCPL\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, LINK_VISIT_EXT_LINKS_CYCLE_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ LINK_VISIT_EXT_LINKS_CYCLE_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_SUBGROUPS; i++) {
+ size_t j;
+ char grp_name[LINK_VISIT_EXT_LINKS_CYCLE_TEST_BUF_SIZE];
+
+ /* Create the groups with a reverse-ordering naming scheme to test creation order later */
+ HDsnprintf(grp_name, LINK_VISIT_EXT_LINKS_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_EXT_LINKS_CYCLE_TEST_NESTED_GRP_NAME "%d",
+ (int)(LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_SUBGROUPS - i - 1));
+
+ if ((subgroup_id = H5Gcreate2(group_id, grp_name, H5P_DEFAULT, gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup '%s'\n", grp_name);
+ goto error;
+ }
+
+ for (j = 0; j < LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_LINKS_PER_GROUP; j++) {
+ char link_name[LINK_VISIT_EXT_LINKS_CYCLE_TEST_BUF_SIZE];
+ char link_target_obj[2 * LINK_VISIT_EXT_LINKS_CYCLE_TEST_BUF_SIZE];
+
+ /* Create the links with a reverse-ordering naming scheme to test creation order later */
+ HDsnprintf(link_name, LINK_VISIT_EXT_LINKS_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_EXT_LINKS_CYCLE_TEST_LINK_NAME "%d",
+ (int)(LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_LINKS_PER_GROUP - j - 1));
+
+ HDsnprintf(link_target_obj, 2 * LINK_VISIT_EXT_LINKS_CYCLE_TEST_BUF_SIZE,
+ "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_EXT_LINKS_CYCLE_TEST_SUBGROUP_NAME "/%s",
+ grp_name);
+
+ if (H5Lcreate_external(H5_api_test_filename, link_target_obj, subgroup_id, link_name, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", link_name);
+ goto error;
+ }
+
+ /* Verify the link has been created */
+ if ((link_exists = H5Lexists(subgroup_id, link_name, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' exists\n", link_name);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' did not exist\n", link_name);
+ goto error;
+ }
+ }
+
+ if (H5Gclose(subgroup_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close subgroup '%s'\n", grp_name);
+ goto error;
+ }
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ /*
+ * NOTE: A counter is passed to the iteration callback to try to match up the
+ * expected links with a given step throughout all of the following
+ * iterations. This is to try and check that the links are indeed being
+ * returned in the correct order.
+ */
+
+ PART_BEGIN(H5Lvisit_cycles_link_name_increasing)
+ {
+ TESTING_2("H5Lvisit2 by link name in increasing order");
+
+ i = 0;
+
+ if (H5Lvisit2(group_id, H5_INDEX_NAME, H5_ITER_INC, link_visit_external_links_cycles_cb, &i) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Lvisit_cycles_link_name_increasing);
+ }
+
+ if (i != LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_cycles_link_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_cycles_link_name_increasing);
+
+ PART_BEGIN(H5Lvisit_cycles_link_name_decreasing)
+ {
+ TESTING_2("H5Lvisit2 by link name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ i = LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit2(group_id, H5_INDEX_NAME, H5_ITER_DEC, link_visit_external_links_cycles_cb, &i) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_cycles_link_name_decreasing);
+ }
+
+ if (i != 2 * LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_cycles_link_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lvisit_cycles_link_name_decreasing);
+#endif
+ }
+ PART_END(H5Lvisit_cycles_link_name_decreasing);
+
+ PART_BEGIN(H5Lvisit_cycles_link_creation_increasing)
+ {
+ TESTING_2("H5Lvisit2 by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 2 * LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_INC, link_visit_external_links_cycles_cb,
+ &i) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Lvisit_cycles_link_creation_increasing);
+ }
+
+ if (i != 3 * LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_cycles_link_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_cycles_link_creation_increasing);
+
+ PART_BEGIN(H5Lvisit_cycles_link_creation_decreasing)
+ {
+ TESTING_2("H5Lvisit2 by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 3 * LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_DEC, link_visit_external_links_cycles_cb,
+ &i) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_cycles_link_creation_decreasing);
+ }
+
+ if (i != 4 * LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_cycles_link_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_cycles_link_creation_decreasing);
+
+ PART_BEGIN(H5Lvisit_by_name_cycles_link_name_increasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by link name in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 0;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_EXT_LINKS_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_INC, link_visit_external_links_cycles_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_name_increasing);
+ }
+
+ if (i != LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_cycles_link_name_increasing);
+
+ PART_BEGIN(H5Lvisit_by_name_cycles_link_name_decreasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by link name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ i = LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_EXT_LINKS_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_DEC, link_visit_external_links_cycles_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_name_decreasing);
+ }
+
+ if (i != 2 * LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lvisit_by_name_cycles_link_name_decreasing);
+#endif
+ }
+ PART_END(H5Lvisit_by_name_cycles_link_name_decreasing);
+
+ PART_BEGIN(H5Lvisit_by_name_cycles_link_creation_increasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 2 * LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit_by_name2(file_id,
+ "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_EXT_LINKS_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_INC, link_visit_external_links_cycles_cb, &i,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_creation_increasing);
+ }
+
+ if (i != 3 * LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_cycles_link_creation_increasing);
+
+ PART_BEGIN(H5Lvisit_by_name_cycles_link_creation_decreasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 3 * LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ if (H5Lvisit_by_name2(file_id,
+ "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_EXT_LINKS_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_DEC, link_visit_external_links_cycles_cb, &i,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_creation_decreasing);
+ }
+
+ if (i != 4 * LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_cycles_link_creation_decreasing);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(gcpl_id);
+ H5Gclose(subgroup_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+#else
+ SKIPPED();
+ return 0;
+#endif
+}
+
+/*
+ * A test to check the functionality of recursive
+ * link iteration using H5Lvisit(_by_name)2 with
+ * only user-defined links and where there are
+ * cyclic links. Iteration is done in increasing
+ * and decreasing order of both link name and link
+ * creation order.
+ *
+ * TODO refactor test so that creation order tests
+ * actually test the order that objects were created in.
+ */
+static int
+test_link_visit_ud_links_cycles(void)
+{
+ TESTING("link visiting with cycles (only user-defined links)");
+
+ SKIPPED();
+
+ return 1;
+}
+
+/*
+ * A test to check the functionality of recursive
+ * link iteration using H5Lvisit(_by_name)2 with
+ * mixed link types and where there are cyclic links.
+ * Iteration is done in increasing and decreasing
+ * order of both link name and link creation order.
+ *
+ * TODO refactor test so that creation order tests
+ * actually test the order that objects were created in.
+ */
+static int
+test_link_visit_mixed_links_cycles(void)
+{
+#if !defined(NO_EXTERNAL_LINKS) && !defined(NO_USER_DEFINED_LINKS)
+ htri_t link_exists;
+ size_t i;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t subgroup1 = H5I_INVALID_HID, subgroup2 = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+ char ext_link_filename[H5_API_TEST_FILENAME_MAX_LENGTH];
+#endif
+
+ TESTING_MULTIPART("link visiting with cycles (mixed link types)");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_EXTERNAL_LINKS) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_HARD_LINKS) || !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) || !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, link, hard, soft, external link, iterate, or "
+ "creation order aren't supported with this connector\n");
+ return 0;
+ }
+
+#if !defined(NO_EXTERNAL_LINKS) && !defined(NO_USER_DEFINED_LINKS)
+ TESTING_2("test setup");
+
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s", EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ goto error;
+ }
+
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create a GCPL\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't enable link creation order tracking and indexing on GCPL\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((subgroup1 = H5Gcreate2(group_id, LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME2, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create first subgroup '%s'\n",
+ LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME2);
+ goto error;
+ }
+
+ if ((subgroup2 = H5Gcreate2(group_id, LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME3, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create second subgroup '%s'\n",
+ LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME3);
+ goto error;
+ }
+
+ if (H5Lcreate_hard(group_id, LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME2, subgroup1,
+ LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME1, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create first hard link '%s'\n", LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME1);
+ goto error;
+ }
+
+ if (H5Lcreate_soft("/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME
+ "/" LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME2,
+ subgroup1, LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME2);
+ goto error;
+ }
+
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup2, LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME3,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME3);
+ goto error;
+ }
+
+ if (H5Lcreate_hard(group_id, LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME3, subgroup2,
+ LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME4, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create second hard link '%s'\n", LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME4);
+ goto error;
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup1, LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME1, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if first link '%s' exists\n",
+ LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME1);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" first link '%s' did not exist\n", LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME1);
+ goto error;
+ }
+
+ if ((link_exists = H5Lexists(subgroup1, LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if second link '%s' exists\n",
+ LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME2);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" second link '%s' did not exist\n", LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME2);
+ goto error;
+ }
+
+ if ((link_exists = H5Lexists(subgroup2, LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if third link '%s' exists\n",
+ LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME3);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" third link '%s' did not exist\n", LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME3);
+ goto error;
+ }
+
+ if ((link_exists = H5Lexists(subgroup2, LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME4, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if fourth link '%s' exists\n",
+ LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME4);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" fourth link '%s' did not exist\n", LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME4);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ /*
+ * NOTE: A counter is passed to the iteration callback to try to match up the
+ * expected links with a given step throughout all of the following
+ * iterations. This is to try and check that the links are indeed being
+ * returned in the correct order.
+ */
+
+ PART_BEGIN(H5Lvisit_cycles_link_name_increasing)
+ {
+ TESTING_2("H5Lvisit2 by link name in increasing order");
+
+ i = 0;
+
+ if (H5Lvisit2(group_id, H5_INDEX_NAME, H5_ITER_INC, link_visit_mixed_links_cycles_cb, &i) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Lvisit_cycles_link_name_increasing);
+ }
+
+ if (i != LINK_VISIT_MIXED_LINKS_CYCLE_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_cycles_link_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_cycles_link_name_increasing);
+
+ PART_BEGIN(H5Lvisit_cycles_link_name_decreasing)
+ {
+ TESTING_2("H5Lvisit2 by link name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ i = LINK_VISIT_MIXED_LINKS_CYCLE_TEST_NUM_LINKS;
+
+ if (H5Lvisit2(group_id, H5_INDEX_NAME, H5_ITER_DEC, link_visit_mixed_links_cycles_cb, &i) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_cycles_link_name_decreasing);
+ }
+
+ if (i != 2 * LINK_VISIT_MIXED_LINKS_CYCLE_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_cycles_link_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lvisit_cycles_link_name_decreasing);
+#endif
+ }
+ PART_END(H5Lvisit_cycles_link_name_decreasing);
+
+ PART_BEGIN(H5Lvisit_cycles_link_creation_increasing)
+ {
+ TESTING_2("H5Lvisit2 by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 2 * LINK_VISIT_MIXED_LINKS_CYCLE_TEST_NUM_LINKS;
+
+ if (H5Lvisit2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_INC, link_visit_mixed_links_cycles_cb, &i) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Lvisit_cycles_link_creation_increasing);
+ }
+
+ if (i != 3 * LINK_VISIT_MIXED_LINKS_CYCLE_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_cycles_link_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_cycles_link_creation_increasing);
+
+ PART_BEGIN(H5Lvisit_cycles_link_creation_decreasing)
+ {
+ TESTING_2("H5Lvisit2 by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 3 * LINK_VISIT_MIXED_LINKS_CYCLE_TEST_NUM_LINKS;
+
+ if (H5Lvisit2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_DEC, link_visit_mixed_links_cycles_cb, &i) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_cycles_link_creation_decreasing);
+ }
+
+ if (i != 4 * LINK_VISIT_MIXED_LINKS_CYCLE_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_cycles_link_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_cycles_link_creation_decreasing);
+
+ PART_BEGIN(H5Lvisit_by_name_cycles_link_name_increasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by link name in increasing order");
+
+ i = 0;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_INC, link_visit_mixed_links_cycles_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_name_increasing);
+ }
+
+ if (i != LINK_VISIT_MIXED_LINKS_CYCLE_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_cycles_link_name_increasing);
+
+ PART_BEGIN(H5Lvisit_by_name_cycles_link_name_decreasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by link name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ i = LINK_VISIT_MIXED_LINKS_CYCLE_TEST_NUM_LINKS;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_DEC, link_visit_mixed_links_cycles_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_name_decreasing);
+ }
+
+ if (i != 2 * LINK_VISIT_MIXED_LINKS_CYCLE_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lvisit_by_name_cycles_link_name_decreasing);
+#endif
+ }
+ PART_END(H5Lvisit_by_name_cycles_link_name_decreasing);
+
+ PART_BEGIN(H5Lvisit_by_name_cycles_link_creation_increasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 2 * LINK_VISIT_MIXED_LINKS_CYCLE_TEST_NUM_LINKS;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_INC, link_visit_mixed_links_cycles_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_creation_increasing);
+ }
+
+ if (i != 3 * LINK_VISIT_MIXED_LINKS_CYCLE_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_cycles_link_creation_increasing);
+
+ PART_BEGIN(H5Lvisit_by_name_cycles_link_creation_decreasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 3 * LINK_VISIT_MIXED_LINKS_CYCLE_TEST_NUM_LINKS;
+
+ if (H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_DEC, link_visit_mixed_links_cycles_cb, &i, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_creation_decreasing);
+ }
+
+ if (i != 4 * LINK_VISIT_MIXED_LINKS_CYCLE_TEST_NUM_LINKS) {
+ H5_FAILED();
+ HDprintf(" some links were not visited!\n");
+ PART_ERROR(H5Lvisit_by_name_cycles_link_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_cycles_link_creation_decreasing);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(subgroup1) < 0)
+ TEST_ERROR;
+ if (H5Gclose(subgroup2) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(gcpl_id);
+ H5Gclose(subgroup1);
+ H5Gclose(subgroup2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+#else
+ SKIPPED();
+ return 0;
+#endif
+}
+
+/*
+ * A test to check that H5Lvisit(_by_name)2 fails when
+ * it is given invalid parameters.
+ */
+static int
+test_link_visit_invalid_params(void)
+{
+ herr_t err_ret = -1;
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t subgroup1 = H5I_INVALID_HID, subgroup2 = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ char ext_link_filename[H5_API_TEST_FILENAME_MAX_LENGTH];
+
+ TESTING_MULTIPART("link visiting with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_EXTERNAL_LINKS) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, link, external link, iterate, or "
+ "creation order aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ HDsnprintf(ext_link_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s", EXTERNAL_LINK_TEST_FILE_NAME);
+
+ if ((file_id = H5Fcreate(ext_link_filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s' for external link to reference\n", ext_link_filename);
+ goto error;
+ }
+
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, LINK_VISIT_INVALID_PARAMS_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ LINK_VISIT_INVALID_PARAMS_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((subgroup1 = H5Gcreate2(group_id, LINK_VISIT_INVALID_PARAMS_TEST_SUBGROUP_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create first subgroup '%s'\n", LINK_VISIT_INVALID_PARAMS_TEST_SUBGROUP_NAME2);
+ goto error;
+ }
+
+ if ((subgroup2 = H5Gcreate2(group_id, LINK_VISIT_INVALID_PARAMS_TEST_SUBGROUP_NAME3, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create second subgroup '%s'\n", LINK_VISIT_INVALID_PARAMS_TEST_SUBGROUP_NAME3);
+ goto error;
+ }
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((fspace_id = generate_random_dataspace(LINK_VISIT_INVALID_PARAMS_TEST_DSET_SPACE_RANK, NULL, NULL,
+ FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(subgroup1, LINK_VISIT_INVALID_PARAMS_TEST_DSET_NAME, dset_dtype, fspace_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create first dataset '%s'\n", LINK_VISIT_INVALID_PARAMS_TEST_DSET_NAME);
+ }
+
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(subgroup2, LINK_VISIT_INVALID_PARAMS_TEST_DSET_NAME, dset_dtype, fspace_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create second dataset '%s'\n", LINK_VISIT_INVALID_PARAMS_TEST_DSET_NAME);
+ }
+
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+
+ if (H5Lcreate_hard(subgroup1, LINK_VISIT_INVALID_PARAMS_TEST_DSET_NAME, subgroup1,
+ LINK_VISIT_INVALID_PARAMS_TEST_LINK_NAME1, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create first hard link '%s'\n", LINK_VISIT_INVALID_PARAMS_TEST_LINK_NAME1);
+ goto error;
+ }
+
+ if (H5Lcreate_soft(LINK_VISIT_INVALID_PARAMS_TEST_DSET_NAME, subgroup1,
+ LINK_VISIT_INVALID_PARAMS_TEST_LINK_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", LINK_VISIT_INVALID_PARAMS_TEST_LINK_NAME2);
+ goto error;
+ }
+#ifndef NO_EXTERNAL_LINKS
+ if (H5Lcreate_external(ext_link_filename, "/", subgroup2, LINK_VISIT_INVALID_PARAMS_TEST_LINK_NAME3,
+ H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create external link '%s'\n", LINK_VISIT_INVALID_PARAMS_TEST_LINK_NAME3);
+ goto error;
+ }
+#endif
+ if (H5Lcreate_hard(subgroup2, LINK_VISIT_INVALID_PARAMS_TEST_DSET_NAME, subgroup2,
+ LINK_VISIT_INVALID_PARAMS_TEST_LINK_NAME4, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create second hard link '%s'\n", LINK_VISIT_INVALID_PARAMS_TEST_LINK_NAME4);
+ goto error;
+ }
+
+ /* Verify the links have been created */
+ if ((link_exists = H5Lexists(subgroup1, LINK_VISIT_INVALID_PARAMS_TEST_LINK_NAME1, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if first link '%s' exists\n",
+ LINK_VISIT_INVALID_PARAMS_TEST_LINK_NAME1);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link 1 did not exist\n");
+ goto error;
+ }
+
+ if ((link_exists = H5Lexists(subgroup1, LINK_VISIT_INVALID_PARAMS_TEST_LINK_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if second link '%s' exists\n",
+ LINK_VISIT_INVALID_PARAMS_TEST_LINK_NAME2);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link 2 did not exist\n");
+ goto error;
+ }
+#ifndef NO_EXTERNAL_LINKS
+ if ((link_exists = H5Lexists(subgroup2, LINK_VISIT_INVALID_PARAMS_TEST_LINK_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if third link '%s' exists\n",
+ LINK_VISIT_INVALID_PARAMS_TEST_LINK_NAME3);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link 3 did not exist\n");
+ goto error;
+ }
+#endif
+ if ((link_exists = H5Lexists(subgroup2, LINK_VISIT_INVALID_PARAMS_TEST_LINK_NAME4, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if fourth link '%s' exists\n",
+ LINK_VISIT_INVALID_PARAMS_TEST_LINK_NAME4);
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" link 4 did not exist\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Lvisit_invalid_grp_id)
+ {
+ TESTING_2("H5Lvisit2 with an invalid group ID");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lvisit2(H5I_INVALID_HID, H5_INDEX_NAME, H5_ITER_INC, link_visit_invalid_params_cb,
+ NULL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 succeeded with an invalid group ID!\n");
+ PART_ERROR(H5Lvisit_invalid_grp_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_invalid_grp_id);
+
+ PART_BEGIN(H5Lvisit_invalid_index_type)
+ {
+ TESTING_2("H5Lvisit2 with an invalid index type");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Lvisit2(group_id, H5_INDEX_UNKNOWN, H5_ITER_INC, link_visit_invalid_params_cb, NULL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 succeeded with invalid index type H5_INDEX_UNKNOWN!\n");
+ PART_ERROR(H5Lvisit_invalid_index_type);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lvisit2(group_id, H5_INDEX_N, H5_ITER_INC, link_visit_invalid_params_cb, NULL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 succeeded with invalid index type H5_INDEX_N!\n");
+ PART_ERROR(H5Lvisit_invalid_index_type);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_invalid_index_type);
+
+ PART_BEGIN(H5Lvisit_invalid_iter_order)
+ {
+ TESTING_2("H5Lvisit2 with an invalid iteration ordering");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Lvisit2(group_id, H5_INDEX_NAME, H5_ITER_UNKNOWN, link_visit_invalid_params_cb, NULL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 succeeded with invalid iteration ordering H5_ITER_UNKNOWN!\n");
+ PART_ERROR(H5Lvisit_invalid_iter_order);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lvisit2(group_id, H5_INDEX_NAME, H5_ITER_N, link_visit_invalid_params_cb, NULL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 succeeded with invalid iteration ordering H5_ITER_N!\n");
+ PART_ERROR(H5Lvisit_invalid_iter_order);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_invalid_iter_order);
+
+ PART_BEGIN(H5Lvisit_by_name_invalid_loc_id)
+ {
+ TESTING_2("H5Lvisit_by_name2 with an invalid location ID");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lvisit_by_name2(
+ H5I_INVALID_HID,
+ "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_INVALID_PARAMS_TEST_SUBGROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_INC, link_visit_invalid_params_cb, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 succeeded with an invalid location ID!\n");
+ PART_ERROR(H5Lvisit_by_name_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_invalid_loc_id);
+
+ PART_BEGIN(H5Lvisit_by_name_invalid_grp_name)
+ {
+ TESTING_2("H5Lvisit_by_name2 with an invalid group name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lvisit_by_name2(file_id, NULL, H5_INDEX_NAME, H5_ITER_INC,
+ link_visit_invalid_params_cb, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 succeeded with a NULL group name!\n");
+ PART_ERROR(H5Lvisit_by_name_invalid_grp_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lvisit_by_name2(file_id, "", H5_INDEX_NAME, H5_ITER_INC,
+ link_visit_invalid_params_cb, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 succeeded with an invalid group name of ''!\n");
+ PART_ERROR(H5Lvisit_by_name_invalid_grp_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_invalid_grp_name);
+
+ PART_BEGIN(H5Lvisit_by_name_invalid_index_type)
+ {
+ TESTING_2("H5Lvisit_by_name2 with an invalid index type");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_INVALID_PARAMS_TEST_SUBGROUP_NAME,
+ H5_INDEX_UNKNOWN, H5_ITER_INC, link_visit_invalid_params_cb, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 succeeded with invalid index type H5_INDEX_UNKNOWN!\n");
+ PART_ERROR(H5Lvisit_by_name_invalid_index_type);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_INVALID_PARAMS_TEST_SUBGROUP_NAME,
+ H5_INDEX_N, H5_ITER_INC, link_visit_invalid_params_cb, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 succeeded with invalid index type H5_INDEX_N!\n");
+ PART_ERROR(H5Lvisit_by_name_invalid_index_type);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_invalid_index_type);
+
+ PART_BEGIN(H5Lvisit_by_name_invalid_iter_order)
+ {
+ TESTING_2("H5Lvisit_by_name2 with an invalid iteration ordering");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_INVALID_PARAMS_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_UNKNOWN, link_visit_invalid_params_cb, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(
+ " H5Lvisit_by_name2 succeeded with invalid iteration ordering H5_ITER_UNKNOWN!\n");
+ PART_ERROR(H5Lvisit_by_name_invalid_iter_order);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_INVALID_PARAMS_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_N, link_visit_invalid_params_cb, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 succeeded with invalid iteration ordering H5_ITER_N!\n");
+ PART_ERROR(H5Lvisit_by_name_invalid_iter_order);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_invalid_iter_order);
+
+ PART_BEGIN(H5Lvisit_by_name_invalid_lapl)
+ {
+ TESTING_2("H5Lvisit_by_name2 with an invalid LAPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Lvisit_by_name2(
+ file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_INVALID_PARAMS_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_INC, link_visit_invalid_params_cb, NULL, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 succeeded with an invalid LAPL!\n");
+ PART_ERROR(H5Lvisit_by_name_invalid_lapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_invalid_lapl);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Gclose(subgroup1) < 0)
+ TEST_ERROR;
+ if (H5Gclose(subgroup2) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ H5Tclose(dset_dtype);
+ H5Dclose(dset_id);
+ H5Gclose(subgroup1);
+ H5Gclose(subgroup2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that recursive link iteration
+ * performed on a group with no links in it is
+ * not problematic.
+ */
+static int
+test_link_visit_0_links(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("link visiting on group with subgroups containing 0 links");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) || !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, link iterate, or creation order aren't supported "
+ "with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create a GCPL\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't enable link creation order tracking and indexing on GCPL\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, LINK_VISIT_0_LINKS_TEST_SUBGROUP_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n", LINK_VISIT_0_LINKS_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Lvisit_0_links_name_increasing)
+ {
+ TESTING_2("H5Lvisit2 by link name in increasing order");
+
+ if (H5Lvisit2(group_id, H5_INDEX_NAME, H5_ITER_INC, link_visit_0_links_cb, NULL) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Lvisit_0_links_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_0_links_name_increasing);
+
+ PART_BEGIN(H5Lvisit_0_links_name_decreasing)
+ {
+ TESTING_2("H5Lvisit2 by link name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ if (H5Lvisit2(group_id, H5_INDEX_NAME, H5_ITER_DEC, link_visit_0_links_cb, NULL) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_0_links_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lvisit_0_links_name_decreasing);
+#endif
+ }
+ PART_END(H5Lvisit_0_links_name_decreasing);
+
+ PART_BEGIN(H5Lvisit_0_links_creation_increasing)
+ {
+ TESTING_2("H5Lvisit2 by creation order in increasing order");
+
+ if (H5Lvisit2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_INC, link_visit_0_links_cb, NULL) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Lvisit_0_links_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_0_links_creation_increasing);
+
+ PART_BEGIN(H5Lvisit_0_links_creation_decreasing)
+ {
+ TESTING_2("H5Lvisit2 by creation order in decreasing order");
+
+ if (H5Lvisit2(group_id, H5_INDEX_CRT_ORDER, H5_ITER_DEC, link_visit_0_links_cb, NULL) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_0_links_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_0_links_creation_decreasing);
+
+ PART_BEGIN(H5Lvisit_by_name_0_links_name_increasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by link name in increasing order");
+
+ if (H5Lvisit_by_name2(file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_0_LINKS_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_INC, link_visit_0_links_cb, NULL, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type name in increasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_0_links_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_0_links_name_increasing);
+
+ PART_BEGIN(H5Lvisit_by_name_0_links_name_decreasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by link name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ if (H5Lvisit_by_name2(file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_0_LINKS_TEST_SUBGROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_DEC, link_visit_0_links_cb, NULL, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type name in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_0_links_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Lvisit_by_name_0_links_name_decreasing);
+#endif
+ }
+ PART_END(H5Lvisit_by_name_0_links_name_decreasing);
+
+ PART_BEGIN(H5Lvisit_by_name_0_links_creation_increasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by creation order in increasing order");
+
+ if (H5Lvisit_by_name2(file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_0_LINKS_TEST_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_INC, link_visit_0_links_cb, NULL,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type creation order in increasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_0_links_creation_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_0_links_creation_increasing);
+
+ PART_BEGIN(H5Lvisit_by_name_0_links_creation_decreasing)
+ {
+ TESTING_2("H5Lvisit_by_name2 by creation order in decreasing order");
+
+ if (H5Lvisit_by_name2(file_id, "/" LINK_TEST_GROUP_NAME "/" LINK_VISIT_0_LINKS_TEST_SUBGROUP_NAME,
+ H5_INDEX_CRT_ORDER, H5_ITER_DEC, link_visit_0_links_cb, NULL,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Lvisit_by_name2 by index type creation order in decreasing order failed\n");
+ PART_ERROR(H5Lvisit_by_name_0_links_creation_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Lvisit_by_name_0_links_creation_decreasing);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(gcpl_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * Link iteration callback for the hard links test which iterates
+ * through all of the links in the test group and checks to make sure
+ * their names and link classes match what is expected.
+ */
+static herr_t
+link_iter_hard_links_cb(hid_t group_id, const char *name, const H5L_info2_t *info, void *op_data)
+{
+ size_t *i = (size_t *)op_data;
+ size_t counter_val = *((size_t *)op_data);
+ size_t test_iteration;
+ char expected_link_name[LINK_ITER_HARD_LINKS_TEST_BUF_SIZE];
+ herr_t ret_val = H5_ITER_CONT;
+
+ UNUSED(group_id);
+ UNUSED(op_data);
+
+ if (H5L_TYPE_HARD != info->type) {
+ ret_val = H5_ITER_ERROR;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_HARD!\n", name);
+ goto done;
+ }
+
+ /*
+ * Four tests are run in the following order per link iteration API call:
+ *
+ * - iteration by link name in increasing order
+ * - iteration by link name in decreasing order
+ * - iteration by link creation order in increasing order
+ * - iteration by link creation order in decreasing order
+ *
+ * Based on how the test is written, this will mean that the dataset names
+ * will run in increasing order on the first and fourth tests and decreasing
+ * order on the second and third tests.
+ */
+ test_iteration = (counter_val / LINK_ITER_HARD_LINKS_TEST_NUM_LINKS);
+ if (test_iteration == 0 || test_iteration == 3) {
+ HDsnprintf(expected_link_name, LINK_ITER_HARD_LINKS_TEST_BUF_SIZE,
+ LINK_ITER_HARD_LINKS_TEST_LINK_NAME "%d",
+ (int)(counter_val % LINK_ITER_HARD_LINKS_TEST_NUM_LINKS));
+ }
+ else {
+ HDsnprintf(expected_link_name, LINK_ITER_HARD_LINKS_TEST_BUF_SIZE,
+ LINK_ITER_HARD_LINKS_TEST_LINK_NAME "%d",
+ (int)(LINK_ITER_HARD_LINKS_TEST_NUM_LINKS -
+ (counter_val % LINK_ITER_HARD_LINKS_TEST_NUM_LINKS) - 1));
+ }
+
+ if (HDstrncmp(name, expected_link_name, LINK_ITER_HARD_LINKS_TEST_BUF_SIZE)) {
+ HDprintf(" link name '%s' didn't match expected name '%s'\n", name, expected_link_name);
+ ret_val = H5_ITER_ERROR;
+ goto done;
+ }
+
+done:
+ (*i)++;
+
+ return ret_val;
+}
+
+/*
+ * Link iteration callback for the soft links test which iterates
+ * through all of the links in the test group and checks to make sure
+ * their names and link classes match what is expected.
+ */
+static herr_t
+link_iter_soft_links_cb(hid_t group_id, const char *name, const H5L_info2_t *info, void *op_data)
+{
+ size_t *i = (size_t *)op_data;
+ size_t counter_val = *((size_t *)op_data);
+ size_t test_iteration;
+ char expected_link_name[LINK_ITER_SOFT_LINKS_TEST_BUF_SIZE];
+ herr_t ret_val = H5_ITER_CONT;
+
+ UNUSED(group_id);
+ UNUSED(op_data);
+
+ if (H5L_TYPE_SOFT != info->type) {
+ ret_val = H5_ITER_ERROR;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_SOFT!\n", name);
+ goto done;
+ }
+
+ /*
+ * Four tests are run in the following order per link iteration API call:
+ *
+ * - iteration by link name in increasing order
+ * - iteration by link name in decreasing order
+ * - iteration by link creation order in increasing order
+ * - iteration by link creation order in decreasing order
+ *
+ * Based on how the test is written, this will mean that the link names
+ * will run in increasing order on the first and fourth tests and decreasing
+ * order on the second and third tests.
+ */
+ test_iteration = (counter_val / LINK_ITER_SOFT_LINKS_TEST_NUM_LINKS);
+ if (test_iteration == 0 || test_iteration == 3) {
+ HDsnprintf(expected_link_name, LINK_ITER_SOFT_LINKS_TEST_BUF_SIZE,
+ LINK_ITER_SOFT_LINKS_TEST_LINK_NAME "%d",
+ (int)(counter_val % LINK_ITER_SOFT_LINKS_TEST_NUM_LINKS));
+ }
+ else {
+ HDsnprintf(expected_link_name, LINK_ITER_SOFT_LINKS_TEST_BUF_SIZE,
+ LINK_ITER_SOFT_LINKS_TEST_LINK_NAME "%d",
+ (int)(LINK_ITER_SOFT_LINKS_TEST_NUM_LINKS -
+ (counter_val % LINK_ITER_SOFT_LINKS_TEST_NUM_LINKS) - 1));
+ }
+
+ if (HDstrncmp(name, expected_link_name, LINK_ITER_SOFT_LINKS_TEST_BUF_SIZE)) {
+ HDprintf(" link name '%s' didn't match expected name '%s'\n", name, expected_link_name);
+ ret_val = H5_ITER_ERROR;
+ goto done;
+ }
+
+done:
+ (*i)++;
+
+ return ret_val;
+}
+
+/*
+ * Link iteration callback for the external links test which iterates
+ * through all of the links in the test group and checks to make sure
+ * their names and link classes match what is expected.
+ */
+#ifndef NO_EXTERNAL_LINKS
+static herr_t
+link_iter_external_links_cb(hid_t group_id, const char *name, const H5L_info2_t *info, void *op_data)
+{
+ size_t *i = (size_t *)op_data;
+ size_t counter_val = *((size_t *)op_data);
+ size_t test_iteration;
+ char expected_link_name[LINK_ITER_EXT_LINKS_TEST_BUF_SIZE];
+ herr_t ret_val = H5_ITER_CONT;
+
+ UNUSED(group_id);
+ UNUSED(op_data);
+
+ if (H5L_TYPE_EXTERNAL != info->type) {
+ ret_val = H5_ITER_ERROR;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_EXTERNAL!\n", name);
+ goto done;
+ }
+
+ /*
+ * Four tests are run in the following order per link iteration API call:
+ *
+ * - iteration by link name in increasing order
+ * - iteration by link name in decreasing order
+ * - iteration by link creation order in increasing order
+ * - iteration by link creation order in decreasing order
+ *
+ * Based on how the test is written, this will mean that the link names
+ * will run in increasing order on the first and fourth tests and decreasing
+ * order on the second and third tests.
+ */
+ test_iteration = (counter_val / LINK_ITER_EXT_LINKS_TEST_NUM_LINKS);
+ if (test_iteration == 0 || test_iteration == 3) {
+ HDsnprintf(expected_link_name, LINK_ITER_EXT_LINKS_TEST_BUF_SIZE,
+ LINK_ITER_EXT_LINKS_TEST_LINK_NAME "%d",
+ (int)(counter_val % LINK_ITER_EXT_LINKS_TEST_NUM_LINKS));
+ }
+ else {
+ HDsnprintf(expected_link_name, LINK_ITER_EXT_LINKS_TEST_BUF_SIZE,
+ LINK_ITER_EXT_LINKS_TEST_LINK_NAME "%d",
+ (int)(LINK_ITER_EXT_LINKS_TEST_NUM_LINKS -
+ (counter_val % LINK_ITER_EXT_LINKS_TEST_NUM_LINKS) - 1));
+ }
+
+ if (HDstrncmp(name, expected_link_name, LINK_ITER_EXT_LINKS_TEST_BUF_SIZE)) {
+ HDprintf(" link name '%s' didn't match expected name '%s'\n", name, expected_link_name);
+ ret_val = H5_ITER_ERROR;
+ goto done;
+ }
+
+done:
+ (*i)++;
+
+ return ret_val;
+}
+#endif
+#ifndef NO_USER_DEFINED_LINKS
+static herr_t link_iter_ud_links_cb(hid_t group_id, const char *name, const H5L_info2_t *info, void *op_data);
+#endif
+/*
+ * Link iteration callback for the mixed link types test which iterates
+ * through all of the links in the test group and checks to make sure
+ * their names and link classes match what is expected.
+ */
+#if !defined(NO_EXTERNAL_LINKS) && !defined(NO_USER_DEFINED_LINKS)
+static herr_t
+link_iter_mixed_links_cb(hid_t group_id, const char *name, const H5L_info2_t *info, void *op_data)
+{
+ size_t *i = (size_t *)op_data;
+ size_t counter_val = *((size_t *)op_data);
+ herr_t ret_val = 0;
+
+ UNUSED(group_id);
+
+ if (!HDstrncmp(name, LINK_ITER_MIXED_LINKS_TEST_HARD_LINK_NAME,
+ strlen(LINK_ITER_MIXED_LINKS_TEST_HARD_LINK_NAME) + 1) &&
+ (counter_val == 1 || counter_val == 4 || counter_val == 6 || counter_val == 11)) {
+ if (H5L_TYPE_HARD != info->type) {
+ ret_val = -1;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_HARD!\n", name);
+ }
+
+ goto done;
+ }
+ else if (!HDstrncmp(name, LINK_ITER_MIXED_LINKS_TEST_SOFT_LINK_NAME,
+ strlen(LINK_ITER_MIXED_LINKS_TEST_SOFT_LINK_NAME) + 1) &&
+ (counter_val == 2 || counter_val == 3 || counter_val == 7 || counter_val == 10)) {
+ if (H5L_TYPE_SOFT != info->type) {
+ ret_val = -1;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_SOFT!\n", name);
+ }
+
+ goto done;
+ }
+ else if (!HDstrncmp(name, LINK_ITER_MIXED_LINKS_TEST_EXT_LINK_NAME,
+ strlen(LINK_ITER_MIXED_LINKS_TEST_EXT_LINK_NAME) + 1) &&
+ (counter_val == 0 || counter_val == 5 || counter_val == 8 || counter_val == 9)) {
+ if (H5L_TYPE_EXTERNAL != info->type) {
+ ret_val = -1;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_EXTERNAL!\n", name);
+ }
+
+ goto done;
+ }
+
+ HDprintf(" link name '%s' didn't match known names or came in an incorrect order\n", name);
+
+ ret_val = -1;
+
+done:
+ (*i)++;
+
+ return ret_val;
+}
+#endif
+
+/*
+ * Link iteration callback for the H5Literate(_by_name)2 invalid
+ * parameters test which simply does nothing.
+ */
+static herr_t
+link_iter_invalid_params_cb(hid_t group_id, const char *name, const H5L_info2_t *info, void *op_data)
+{
+ UNUSED(group_id);
+ UNUSED(name);
+ UNUSED(info);
+ UNUSED(op_data);
+
+ return 0;
+}
+
+/*
+ * Link iteration callback for the 0 links iteration test which
+ * simply does nothing.
+ */
+static herr_t
+link_iter_0_links_cb(hid_t group_id, const char *name, const H5L_info2_t *info, void *op_data)
+{
+ UNUSED(group_id);
+ UNUSED(name);
+ UNUSED(info);
+ UNUSED(op_data);
+
+ return 0;
+}
+
+/*
+ * Link iteration callback to test that the index-saving behavior of H5Literate2
+ * works correctly.
+ */
+#if !defined(NO_EXTERNAL_LINKS) && !defined(NO_USER_DEFINED_LINKS)
+static herr_t
+link_iter_idx_saving_cb(hid_t group_id, const char *name, const H5L_info2_t *info, void *op_data)
+{
+ int *broken = (int *)op_data;
+
+ UNUSED(group_id);
+
+ if (broken && !*broken &&
+ !HDstrncmp(name, LINK_ITER_MIXED_LINKS_TEST_SOFT_LINK_NAME,
+ strlen(LINK_ITER_MIXED_LINKS_TEST_SOFT_LINK_NAME) + 1)) {
+ return (*broken = 1);
+ }
+
+ if (!HDstrncmp(name, LINK_ITER_MIXED_LINKS_TEST_HARD_LINK_NAME,
+ strlen(LINK_ITER_MIXED_LINKS_TEST_HARD_LINK_NAME) + 1)) {
+ if (H5L_TYPE_HARD != info->type) {
+ H5_FAILED();
+ HDprintf(" link type did not match\n");
+ goto error;
+ }
+ }
+ else if (!HDstrncmp(name, LINK_ITER_MIXED_LINKS_TEST_SOFT_LINK_NAME,
+ strlen(LINK_ITER_MIXED_LINKS_TEST_SOFT_LINK_NAME) + 1)) {
+ if (H5L_TYPE_SOFT != info->type) {
+ H5_FAILED();
+ HDprintf(" link type did not match\n");
+ goto error;
+ }
+ }
+ else if (!HDstrncmp(name, LINK_ITER_MIXED_LINKS_TEST_EXT_LINK_NAME,
+ strlen(LINK_ITER_MIXED_LINKS_TEST_EXT_LINK_NAME) + 1)) {
+ if (H5L_TYPE_EXTERNAL != info->type) {
+ H5_FAILED();
+ HDprintf(" link type did not match\n");
+ goto error;
+ }
+ }
+ else {
+ H5_FAILED();
+ HDprintf(" link name didn't match known names\n");
+ goto error;
+ }
+
+ return 0;
+
+error:
+ return -1;
+}
+#endif
+
+/*
+ * Link visiting callback for the hard links + no cycles test which
+ * iterates recursively through all of the links in the test group and
+ * checks to make sure their names and link classes match what is expected.
+ */
+static herr_t
+link_visit_hard_links_no_cycles_cb(hid_t group_id, const char *name, const H5L_info2_t *info, void *op_data)
+{
+ hbool_t is_subgroup_link;
+ size_t *i = (size_t *)op_data;
+ size_t counter_val = *((size_t *)op_data);
+ size_t test_iteration;
+ size_t subgroup_number;
+ size_t link_idx_val;
+ char expected_link_name[LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_BUF_SIZE];
+ herr_t ret_val = H5_ITER_CONT;
+
+ UNUSED(group_id);
+ UNUSED(op_data);
+
+ if (H5L_TYPE_HARD != info->type) {
+ ret_val = H5_ITER_ERROR;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_HARD!\n", name);
+ goto done;
+ }
+
+ /*
+ * Four tests are run in the following order per link visiting API call:
+ *
+ * - visitation by link name in increasing order
+ * - visitation by link name in decreasing order
+ * - visitation by link creation order in increasing order
+ * - visitation by link creation order in decreasing order
+ *
+ * Based on how the test is written, this will mean that the dataset and group
+ * names will run in increasing order on the first and fourth tests and decreasing
+ * order on the second and third tests.
+ */
+ test_iteration = counter_val / LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ /* Determine which subgroup is currently being processed */
+ subgroup_number =
+ /* Take the current counter value modulo the total number of links per test iteration (datasets +
+ subgroups) */
+ (counter_val % LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST)
+ /* and divide it by the number of links per subgroup + 1 to get the subgroup's index number. */
+ / (LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_GROUP + 1);
+
+ /* Determine whether the current link points to the current subgroup itself */
+ is_subgroup_link = (counter_val % (LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_GROUP + 1) == 0);
+ if (!is_subgroup_link) {
+ /* Determine the index number of this link within its containing subgroup */
+ link_idx_val =
+ /* Take the current counter value modulo the total number of links per test iteration (datasets +
+ subgroups) */
+ (counter_val % LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST)
+ /* and take it modulo the number of links per subgroup + 1, finally subtracting 1 to get the
+ link's index number. */
+ % (LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_GROUP + 1) -
+ 1;
+ }
+
+ if (test_iteration == 0 || test_iteration == 3) {
+ if (is_subgroup_link) {
+ HDsnprintf(expected_link_name, LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NESTED_GRP_NAME "%d", (int)subgroup_number);
+ }
+ else {
+ HDsnprintf(expected_link_name, LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NESTED_GRP_NAME
+ "%d"
+ "/" LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_LINK_NAME "%d",
+ (int)subgroup_number, (int)link_idx_val);
+ }
+ }
+ else {
+ if (is_subgroup_link) {
+ HDsnprintf(expected_link_name, LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NESTED_GRP_NAME "%d",
+ (int)(LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_SUBGROUPS - subgroup_number - 1));
+ }
+ else {
+ HDsnprintf(expected_link_name, LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NESTED_GRP_NAME
+ "%d"
+ "/" LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_LINK_NAME "%d",
+ (int)(LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_SUBGROUPS - subgroup_number - 1),
+ (int)(LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_GROUP - link_idx_val - 1));
+ }
+ }
+
+ if (HDstrncmp(name, expected_link_name, LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_BUF_SIZE)) {
+ HDprintf(" link name '%s' didn't match expected name '%s'\n", name, expected_link_name);
+ ret_val = H5_ITER_ERROR;
+ goto done;
+ }
+
+done:
+ (*i)++;
+
+ return ret_val;
+}
+
+/*
+ * Link visiting callback for the soft links + no cycles test which
+ * iterates recursively through all of the links in the test group and
+ * checks to make sure their names and link classes match what is expected.
+ */
+static herr_t
+link_visit_soft_links_no_cycles_cb(hid_t group_id, const char *name, const H5L_info2_t *info, void *op_data)
+{
+ hbool_t is_subgroup_link;
+ size_t *i = (size_t *)op_data;
+ size_t counter_val = *((size_t *)op_data);
+ size_t test_iteration;
+ size_t subgroup_number;
+ size_t link_idx_val;
+ char expected_link_name[LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_BUF_SIZE];
+ herr_t ret_val = H5_ITER_CONT;
+
+ UNUSED(group_id);
+ UNUSED(op_data);
+
+ /* Determine whether the current link points to the current subgroup itself */
+ is_subgroup_link = (counter_val % (LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_GROUP + 1) == 0);
+
+ if (is_subgroup_link) {
+ if (H5L_TYPE_HARD != info->type) {
+ ret_val = H5_ITER_ERROR;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_HARD!\n", name);
+ goto done;
+ }
+ }
+ else {
+ if (H5L_TYPE_SOFT != info->type) {
+ ret_val = H5_ITER_ERROR;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_SOFT!\n", name);
+ goto done;
+ }
+ }
+
+ /*
+ * Four tests are run in the following order per link visiting API call:
+ *
+ * - visitation by link name in increasing order
+ * - visitation by link name in decreasing order
+ * - visitation by link creation order in increasing order
+ * - visitation by link creation order in decreasing order
+ *
+ * Based on how the test is written, this will mean that the link names will
+ * run in increasing order on the first and fourth tests and decreasing
+ * order on the second and third tests.
+ */
+ test_iteration = counter_val / LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ /* Determine which subgroup is currently being processed */
+ subgroup_number =
+ /* Take the current counter value modulo the total number of links per test iteration (links +
+ subgroups) */
+ (counter_val % LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST)
+ /* and divide it by the number of links per subgroup + 1 to get the subgroup's index number. */
+ / (LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_GROUP + 1);
+
+ if (!is_subgroup_link) {
+ /* Determine the index number of this link within its containing subgroup */
+ link_idx_val =
+ /* Take the current counter value modulo the total number of links per test iteration (links +
+ subgroups) */
+ (counter_val % LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST)
+ /* and take it modulo the number of links per subgroup + 1, finally subtracting 1 to get the
+ link's index number. */
+ % (LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_GROUP + 1) -
+ 1;
+ }
+
+ if (test_iteration == 0 || test_iteration == 3) {
+ if (is_subgroup_link) {
+ HDsnprintf(expected_link_name, LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NESTED_GRP_NAME "%d", (int)subgroup_number);
+ }
+ else {
+ HDsnprintf(expected_link_name, LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NESTED_GRP_NAME
+ "%d"
+ "/" LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_LINK_NAME "%d",
+ (int)subgroup_number, (int)link_idx_val);
+ }
+ }
+ else {
+ if (is_subgroup_link) {
+ HDsnprintf(expected_link_name, LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NESTED_GRP_NAME "%d",
+ (int)(LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_SUBGROUPS - subgroup_number - 1));
+ }
+ else {
+ HDsnprintf(expected_link_name, LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NESTED_GRP_NAME
+ "%d"
+ "/" LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_LINK_NAME "%d",
+ (int)(LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_SUBGROUPS - subgroup_number - 1),
+ (int)(LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_GROUP - link_idx_val - 1));
+ }
+ }
+
+ if (HDstrncmp(name, expected_link_name, LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_BUF_SIZE)) {
+ HDprintf(" link name '%s' didn't match expected name '%s'\n", name, expected_link_name);
+ ret_val = H5_ITER_ERROR;
+ goto done;
+ }
+
+done:
+ (*i)++;
+
+ return ret_val;
+}
+
+/*
+ * Link visiting callback for the external links + no cycles test which
+ * iterates recursively through all of the links in the test group and
+ * checks to make sure their names and link classes match what is expected.
+ */
+#ifndef NO_EXTERNAL_LINKS
+static herr_t
+link_visit_external_links_no_cycles_cb(hid_t group_id, const char *name, const H5L_info2_t *info,
+ void *op_data)
+{
+ hbool_t is_subgroup_link;
+ size_t *i = (size_t *)op_data;
+ size_t counter_val = *((size_t *)op_data);
+ size_t test_iteration;
+ size_t subgroup_number;
+ size_t link_idx_val;
+ char expected_link_name[LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_BUF_SIZE];
+ herr_t ret_val = H5_ITER_CONT;
+
+ UNUSED(group_id);
+ UNUSED(op_data);
+
+ /* Determine whether the current link points to the current subgroup itself */
+ is_subgroup_link = (counter_val % (LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_GROUP + 1) == 0);
+
+ if (is_subgroup_link) {
+ if (H5L_TYPE_HARD != info->type) {
+ ret_val = H5_ITER_ERROR;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_HARD!\n", name);
+ goto done;
+ }
+ }
+ else {
+ if (H5L_TYPE_EXTERNAL != info->type) {
+ ret_val = H5_ITER_ERROR;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_EXTERNAL!\n", name);
+ goto done;
+ }
+ }
+
+ /*
+ * Four tests are run in the following order per link visiting API call:
+ *
+ * - visitation by link name in increasing order
+ * - visitation by link name in decreasing order
+ * - visitation by link creation order in increasing order
+ * - visitation by link creation order in decreasing order
+ *
+ * Based on how the test is written, this will mean that the link names will
+ * run in increasing order on the first and fourth tests and decreasing
+ * order on the second and third tests.
+ */
+ test_iteration = counter_val / LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ /* Determine which subgroup is currently being processed */
+ subgroup_number =
+ /* Take the current counter value modulo the total number of links per test iteration (links +
+ subgroups) */
+ (counter_val % LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST)
+ /* and divide it by the number of links per subgroup + 1 to get the subgroup's index number. */
+ / (LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_GROUP + 1);
+
+ if (!is_subgroup_link) {
+ /* Determine the index number of this link within its containing subgroup */
+ link_idx_val =
+ /* Take the current counter value modulo the total number of links per test iteration (links +
+ subgroups) */
+ (counter_val % LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST)
+ /* and take it modulo the number of links per subgroup + 1, finally subtracting 1 to get the
+ link's index number. */
+ % (LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_GROUP + 1) -
+ 1;
+ }
+
+ if (test_iteration == 0 || test_iteration == 3) {
+ if (is_subgroup_link) {
+ HDsnprintf(expected_link_name, LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NESTED_GRP_NAME "%d", (int)subgroup_number);
+ }
+ else {
+ HDsnprintf(expected_link_name, LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NESTED_GRP_NAME
+ "%d"
+ "/" LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_LINK_NAME "%d",
+ (int)subgroup_number, (int)link_idx_val);
+ }
+ }
+ else {
+ if (is_subgroup_link) {
+ HDsnprintf(expected_link_name, LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NESTED_GRP_NAME "%d",
+ (int)(LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_SUBGROUPS - subgroup_number - 1));
+ }
+ else {
+ HDsnprintf(expected_link_name, LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NESTED_GRP_NAME
+ "%d"
+ "/" LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_LINK_NAME "%d",
+ (int)(LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_SUBGROUPS - subgroup_number - 1),
+ (int)(LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_GROUP - link_idx_val - 1));
+ }
+ }
+
+ if (HDstrncmp(name, expected_link_name, LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_BUF_SIZE)) {
+ HDprintf(" link name '%s' didn't match expected name '%s'\n", name, expected_link_name);
+ ret_val = H5_ITER_ERROR;
+ goto done;
+ }
+
+done:
+ (*i)++;
+
+ return ret_val;
+}
+#endif
+#ifndef NO_USER_DEFINED_LINKS
+static herr_t link_visit_ud_links_no_cycles_cb(hid_t group_id, const char *name, const H5L_info2_t *info,
+ void *op_data);
+#endif
+/*
+ * Link visiting callback for the mixed link types + no cycles test which
+ * iterates recursively through all of the links in the test group and
+ * checks to make sure their names and link classes match what is expected.
+ */
+#if !defined(NO_EXTERNAL_LINKS) && !defined(NO_USER_DEFINED_LINKS)
+static herr_t
+link_visit_mixed_links_no_cycles_cb(hid_t group_id, const char *name, const H5L_info2_t *info, void *op_data)
+{
+ size_t *i = (size_t *)op_data;
+ size_t counter_val = *((size_t *)op_data);
+ herr_t ret_val = 0;
+
+ UNUSED(group_id);
+ UNUSED(op_data);
+
+ if (!HDstrncmp(name,
+ LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME2
+ "/" LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME1,
+ strlen(LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME2
+ "/" LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME1) +
+ 1) &&
+ (counter_val == 2 || counter_val == 14 || counter_val == 18 || counter_val == 30)) {
+ if (H5L_TYPE_HARD != info->type) {
+ ret_val = -1;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_HARD!\n",
+ LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME2
+ "/" LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME1);
+ }
+
+ goto done;
+ }
+ else if (!HDstrncmp(name,
+ LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME2
+ "/" LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME2,
+ strlen(LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME2
+ "/" LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME2) +
+ 1) &&
+ (counter_val == 3 || counter_val == 13 || counter_val == 19 || counter_val == 29)) {
+ if (H5L_TYPE_SOFT != info->type) {
+ ret_val = -1;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_SOFT!\n",
+ LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME2
+ "/" LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME2);
+ }
+
+ goto done;
+ }
+ else if (!HDstrncmp(name,
+ LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME3
+ "/" LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME3,
+ strlen(LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME3
+ "/" LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME3) +
+ 1) &&
+ (counter_val == 6 || counter_val == 10 || counter_val == 22 || counter_val == 26)) {
+ if (H5L_TYPE_EXTERNAL != info->type) {
+ ret_val = -1;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_EXTERNAL!\n",
+ LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME3
+ "/" LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME3);
+ }
+
+ goto done;
+ }
+ else if (!HDstrncmp(name,
+ LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME3
+ "/" LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME4,
+ strlen(LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME3
+ "/" LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME4) +
+ 1) &&
+ (counter_val == 7 || counter_val == 9 || counter_val == 23 || counter_val == 25)) {
+ if (H5L_TYPE_HARD != info->type) {
+ ret_val = -1;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_HARD!\n",
+ LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME3
+ "/" LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME4);
+ }
+
+ goto done;
+ }
+ else if (!HDstrncmp(name,
+ LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME2
+ "/" LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_DSET_NAME,
+ strlen(LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME2
+ "/" LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_DSET_NAME) +
+ 1) &&
+ (counter_val == 1 || counter_val == 15 || counter_val == 17 || counter_val == 31)) {
+ if (H5L_TYPE_HARD != info->type) {
+ ret_val = -1;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_HARD!\n",
+ LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME2
+ "/" LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_DSET_NAME);
+ }
+
+ goto done;
+ }
+ else if (!HDstrncmp(name,
+ LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME3
+ "/" LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_DSET_NAME2,
+ strlen(LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME3
+ "/" LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_DSET_NAME2) +
+ 1) &&
+ (counter_val == 5 || counter_val == 11 || counter_val == 21 || counter_val == 27)) {
+ if (H5L_TYPE_HARD != info->type) {
+ ret_val = -1;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_HARD!\n",
+ LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME3
+ "/" LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_DSET_NAME2);
+ }
+
+ goto done;
+ }
+ else if (!HDstrncmp(name, LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME2,
+ strlen(LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME2) + 1) &&
+ (counter_val == 0 || counter_val == 12 || counter_val == 16 || counter_val == 28)) {
+ if (H5L_TYPE_HARD != info->type) {
+ ret_val = -1;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_HARD!\n",
+ LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME2);
+ }
+
+ goto done;
+ }
+ else if (!HDstrncmp(name, LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME3,
+ strlen(LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME3) + 1) &&
+ (counter_val == 4 || counter_val == 8 || counter_val == 20 || counter_val == 24)) {
+ if (H5L_TYPE_HARD != info->type) {
+ ret_val = -1;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_HARD!\n",
+ LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME3);
+ }
+
+ goto done;
+ }
+
+ HDprintf(" link name '%s' didn't match known names or came in an incorrect order\n", name);
+
+ ret_val = -1;
+
+done:
+ (*i)++;
+
+ return ret_val;
+}
+#endif
+
+/*
+ * Link visiting callback for the hard links + cycles test which
+ * iterates recursively through all of the links in the test group and
+ * checks to make sure their names and link classes match what is expected.
+ */
+static herr_t
+link_visit_hard_links_cycles_cb(hid_t group_id, const char *name, const H5L_info2_t *info, void *op_data)
+{
+ hbool_t is_subgroup_link;
+ size_t *i = (size_t *)op_data;
+ size_t counter_val = *((size_t *)op_data);
+ size_t test_iteration;
+ size_t subgroup_number;
+ size_t link_idx_val;
+ char expected_link_name[LINK_VISIT_HARD_LINKS_CYCLE_TEST_BUF_SIZE];
+ herr_t ret_val = H5_ITER_CONT;
+
+ UNUSED(group_id);
+ UNUSED(op_data);
+
+ if (H5L_TYPE_HARD != info->type) {
+ ret_val = H5_ITER_ERROR;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_HARD!\n", name);
+ goto done;
+ }
+
+ /*
+ * Four tests are run in the following order per link visiting API call:
+ *
+ * - visitation by link name in increasing order
+ * - visitation by link name in decreasing order
+ * - visitation by link creation order in increasing order
+ * - visitation by link creation order in decreasing order
+ *
+ * Based on how the test is written, this will mean that the link and group
+ * names will run in increasing order on the first and fourth tests and decreasing
+ * order on the second and third tests.
+ */
+ test_iteration = counter_val / LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ /* Determine which subgroup is currently being processed */
+ subgroup_number =
+ /* Take the current counter value modulo the total number of links per test iteration (links +
+ subgroups) */
+ (counter_val % LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST)
+ /* and divide it by the number of links per subgroup + 1 to get the subgroup's index number. */
+ / (LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_LINKS_PER_GROUP + 1);
+
+ /* Determine whether the current link points to the current subgroup itself */
+ is_subgroup_link = (counter_val % (LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_LINKS_PER_GROUP + 1) == 0);
+ if (!is_subgroup_link) {
+ /* Determine the index number of this link within its containing subgroup */
+ link_idx_val =
+ /* Take the current counter value modulo the total number of links per test iteration (links +
+ subgroups) */
+ (counter_val % LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST)
+ /* and take it modulo the number of links per subgroup + 1, finally subtracting 1 to get the
+ link's index number. */
+ % (LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_LINKS_PER_GROUP + 1) -
+ 1;
+ }
+
+ if (test_iteration == 0 || test_iteration == 3) {
+ if (is_subgroup_link) {
+ HDsnprintf(expected_link_name, LINK_VISIT_HARD_LINKS_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_HARD_LINKS_CYCLE_TEST_NESTED_GRP_NAME "%d", (int)subgroup_number);
+ }
+ else {
+ HDsnprintf(expected_link_name, LINK_VISIT_HARD_LINKS_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_HARD_LINKS_CYCLE_TEST_NESTED_GRP_NAME
+ "%d"
+ "/" LINK_VISIT_HARD_LINKS_CYCLE_TEST_LINK_NAME "%d",
+ (int)subgroup_number, (int)link_idx_val);
+ }
+ }
+ else {
+ if (is_subgroup_link) {
+ HDsnprintf(expected_link_name, LINK_VISIT_HARD_LINKS_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_HARD_LINKS_CYCLE_TEST_NESTED_GRP_NAME "%d",
+ (int)(LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_SUBGROUPS - subgroup_number - 1));
+ }
+ else {
+ HDsnprintf(expected_link_name, LINK_VISIT_HARD_LINKS_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_HARD_LINKS_CYCLE_TEST_NESTED_GRP_NAME
+ "%d"
+ "/" LINK_VISIT_HARD_LINKS_CYCLE_TEST_LINK_NAME "%d",
+ (int)(LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_SUBGROUPS - subgroup_number - 1),
+ (int)(LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_LINKS_PER_GROUP - link_idx_val - 1));
+ }
+ }
+
+ if (HDstrncmp(name, expected_link_name, LINK_VISIT_HARD_LINKS_CYCLE_TEST_BUF_SIZE)) {
+ HDprintf(" link name '%s' didn't match expected name '%s'\n", name, expected_link_name);
+ ret_val = H5_ITER_ERROR;
+ goto done;
+ }
+
+done:
+ (*i)++;
+
+ return ret_val;
+}
+
+/*
+ * Link visiting callback for the soft links + cycles test which
+ * iterates recursively through all of the links in the test group and
+ * checks to make sure their names and link classes match what is expected.
+ */
+static herr_t
+link_visit_soft_links_cycles_cb(hid_t group_id, const char *name, const H5L_info2_t *info, void *op_data)
+{
+ hbool_t is_subgroup_link;
+ size_t *i = (size_t *)op_data;
+ size_t counter_val = *((size_t *)op_data);
+ size_t test_iteration;
+ size_t subgroup_number;
+ size_t link_idx_val;
+ char expected_link_name[LINK_VISIT_SOFT_LINKS_CYCLE_TEST_BUF_SIZE];
+ herr_t ret_val = H5_ITER_CONT;
+
+ UNUSED(group_id);
+ UNUSED(op_data);
+
+ /* Determine whether the current link points to the current subgroup itself */
+ is_subgroup_link = (counter_val % (LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_LINKS_PER_GROUP + 1) == 0);
+
+ if (is_subgroup_link) {
+ if (H5L_TYPE_HARD != info->type) {
+ ret_val = H5_ITER_ERROR;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_HARD!\n", name);
+ goto done;
+ }
+ }
+ else {
+ if (H5L_TYPE_SOFT != info->type) {
+ ret_val = H5_ITER_ERROR;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_SOFT!\n", name);
+ goto done;
+ }
+ }
+
+ /*
+ * Four tests are run in the following order per link visiting API call:
+ *
+ * - visitation by link name in increasing order
+ * - visitation by link name in decreasing order
+ * - visitation by link creation order in increasing order
+ * - visitation by link creation order in decreasing order
+ *
+ * Based on how the test is written, this will mean that the link and group
+ * names will run in increasing order on the first and fourth tests and decreasing
+ * order on the second and third tests.
+ */
+ test_iteration = counter_val / LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ /* Determine which subgroup is currently being processed */
+ subgroup_number =
+ /* Take the current counter value modulo the total number of links per test iteration (links +
+ subgroups) */
+ (counter_val % LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST)
+ /* and divide it by the number of links per subgroup + 1 to get the subgroup's index number. */
+ / (LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_LINKS_PER_GROUP + 1);
+
+ if (!is_subgroup_link) {
+ /* Determine the index number of this link within its containing subgroup */
+ link_idx_val =
+ /* Take the current counter value modulo the total number of links per test iteration (links +
+ subgroups) */
+ (counter_val % LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST)
+ /* and take it modulo the number of links per subgroup + 1, finally subtracting 1 to get the
+ link's index number. */
+ % (LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_LINKS_PER_GROUP + 1) -
+ 1;
+ }
+
+ if (test_iteration == 0 || test_iteration == 3) {
+ if (is_subgroup_link) {
+ HDsnprintf(expected_link_name, LINK_VISIT_SOFT_LINKS_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NESTED_GRP_NAME "%d", (int)subgroup_number);
+ }
+ else {
+ HDsnprintf(expected_link_name, LINK_VISIT_SOFT_LINKS_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NESTED_GRP_NAME
+ "%d"
+ "/" LINK_VISIT_SOFT_LINKS_CYCLE_TEST_LINK_NAME "%d",
+ (int)subgroup_number, (int)link_idx_val);
+ }
+ }
+ else {
+ if (is_subgroup_link) {
+ HDsnprintf(expected_link_name, LINK_VISIT_SOFT_LINKS_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NESTED_GRP_NAME "%d",
+ (int)(LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_SUBGROUPS - subgroup_number - 1));
+ }
+ else {
+ HDsnprintf(expected_link_name, LINK_VISIT_SOFT_LINKS_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NESTED_GRP_NAME
+ "%d"
+ "/" LINK_VISIT_SOFT_LINKS_CYCLE_TEST_LINK_NAME "%d",
+ (int)(LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_SUBGROUPS - subgroup_number - 1),
+ (int)(LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_LINKS_PER_GROUP - link_idx_val - 1));
+ }
+ }
+
+ if (HDstrncmp(name, expected_link_name, LINK_VISIT_SOFT_LINKS_CYCLE_TEST_BUF_SIZE)) {
+ HDprintf(" link name '%s' didn't match expected name '%s'\n", name, expected_link_name);
+ ret_val = H5_ITER_ERROR;
+ goto done;
+ }
+
+done:
+ (*i)++;
+
+ return ret_val;
+}
+
+/*
+ * Link visiting callback for the external links + cycles test which
+ * iterates recursively through all of the links in the test group and
+ * checks to make sure their names and link classes match what is expected.
+ */
+#ifndef NO_EXTERNAL_LINKS
+static herr_t
+link_visit_external_links_cycles_cb(hid_t group_id, const char *name, const H5L_info2_t *info, void *op_data)
+{
+ hbool_t is_subgroup_link;
+ size_t *i = (size_t *)op_data;
+ size_t counter_val = *((size_t *)op_data);
+ size_t test_iteration;
+ size_t subgroup_number;
+ size_t link_idx_val;
+ char expected_link_name[LINK_VISIT_EXT_LINKS_CYCLE_TEST_BUF_SIZE];
+ herr_t ret_val = H5_ITER_CONT;
+
+ UNUSED(group_id);
+ UNUSED(op_data);
+
+ /* Determine whether the current link points to the current subgroup itself */
+ is_subgroup_link = (counter_val % (LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_LINKS_PER_GROUP + 1) == 0);
+
+ if (is_subgroup_link) {
+ if (H5L_TYPE_HARD != info->type) {
+ ret_val = H5_ITER_ERROR;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_HARD!\n", name);
+ goto done;
+ }
+ }
+ else {
+ if (H5L_TYPE_EXTERNAL != info->type) {
+ ret_val = H5_ITER_ERROR;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_EXTERNAL!\n", name);
+ goto done;
+ }
+ }
+
+ /*
+ * Four tests are run in the following order per link visiting API call:
+ *
+ * - visitation by link name in increasing order
+ * - visitation by link name in decreasing order
+ * - visitation by link creation order in increasing order
+ * - visitation by link creation order in decreasing order
+ *
+ * Based on how the test is written, this will mean that the link and group
+ * names will run in increasing order on the first and fourth tests and decreasing
+ * order on the second and third tests.
+ */
+ test_iteration = counter_val / LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST;
+
+ /* Determine which subgroup is currently being processed */
+ subgroup_number =
+ /* Take the current counter value modulo the total number of links per test iteration (links +
+ subgroups) */
+ (counter_val % LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST)
+ /* and divide it by the number of links per subgroup + 1 to get the subgroup's index number. */
+ / (LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_LINKS_PER_GROUP + 1);
+
+ if (!is_subgroup_link) {
+ /* Determine the index number of this link within its containing subgroup */
+ link_idx_val =
+ /* Take the current counter value modulo the total number of links per test iteration (links +
+ subgroups) */
+ (counter_val % LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST)
+ /* and take it modulo the number of links per subgroup + 1, finally subtracting 1 to get the
+ link's index number. */
+ % (LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_LINKS_PER_GROUP + 1) -
+ 1;
+ }
+
+ if (test_iteration == 0 || test_iteration == 3) {
+ if (is_subgroup_link) {
+ HDsnprintf(expected_link_name, LINK_VISIT_EXT_LINKS_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_EXT_LINKS_CYCLE_TEST_NESTED_GRP_NAME "%d", (int)subgroup_number);
+ }
+ else {
+ HDsnprintf(expected_link_name, LINK_VISIT_EXT_LINKS_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_EXT_LINKS_CYCLE_TEST_NESTED_GRP_NAME
+ "%d"
+ "/" LINK_VISIT_EXT_LINKS_CYCLE_TEST_LINK_NAME "%d",
+ (int)subgroup_number, (int)link_idx_val);
+ }
+ }
+ else {
+ if (is_subgroup_link) {
+ HDsnprintf(expected_link_name, LINK_VISIT_EXT_LINKS_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_EXT_LINKS_CYCLE_TEST_NESTED_GRP_NAME "%d",
+ (int)(LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_SUBGROUPS - subgroup_number - 1));
+ }
+ else {
+ HDsnprintf(expected_link_name, LINK_VISIT_EXT_LINKS_CYCLE_TEST_BUF_SIZE,
+ LINK_VISIT_EXT_LINKS_CYCLE_TEST_NESTED_GRP_NAME
+ "%d"
+ "/" LINK_VISIT_EXT_LINKS_CYCLE_TEST_LINK_NAME "%d",
+ (int)(LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_SUBGROUPS - subgroup_number - 1),
+ (int)(LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_LINKS_PER_GROUP - link_idx_val - 1));
+ }
+ }
+
+ if (HDstrncmp(name, expected_link_name, LINK_VISIT_EXT_LINKS_CYCLE_TEST_BUF_SIZE)) {
+ HDprintf(" link name '%s' didn't match expected name '%s'\n", name, expected_link_name);
+ ret_val = H5_ITER_ERROR;
+ goto done;
+ }
+
+done:
+ (*i)++;
+
+ return ret_val;
+}
+#endif
+#ifndef NO_USER_DEFINED_LINKS
+static herr_t link_visit_ud_links_cycles_cb(hid_t group_id, const char *name, const H5L_info2_t *info,
+ void *op_data);
+#endif
+/*
+ * Link visiting callback for the mixed link types + cycles test which
+ * iterates recursively through all of the links in the test group and
+ * checks to make sure their names and link classes match what is expected.
+ */
+#if !defined(NO_EXTERNAL_LINKS) && !defined(NO_USER_DEFINED_LINKS)
+static herr_t
+link_visit_mixed_links_cycles_cb(hid_t group_id, const char *name, const H5L_info2_t *info, void *op_data)
+{
+ size_t *i = (size_t *)op_data;
+ size_t counter_val = *((size_t *)op_data);
+ herr_t ret_val = 0;
+
+ UNUSED(group_id);
+ UNUSED(op_data);
+
+ if (!HDstrncmp(name,
+ LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME2
+ "/" LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME1,
+ strlen(LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME2
+ "/" LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME1) +
+ 1) &&
+ (counter_val == 1 || counter_val == 11 || counter_val == 13 || counter_val == 23)) {
+ if (H5L_TYPE_HARD != info->type) {
+ ret_val = -1;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_HARD!\n",
+ LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME2
+ "/" LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME1);
+ }
+
+ goto done;
+ }
+ else if (!HDstrncmp(name,
+ LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME2
+ "/" LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME2,
+ strlen(LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME2
+ "/" LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME2) +
+ 1) &&
+ (counter_val == 2 || counter_val == 10 || counter_val == 14 || counter_val == 22)) {
+ if (H5L_TYPE_SOFT != info->type) {
+ ret_val = -1;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_SOFT!\n",
+ LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME2
+ "/" LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME2);
+ }
+
+ goto done;
+ }
+ else if (!HDstrncmp(name,
+ LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME3
+ "/" LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME3,
+ strlen(LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME3
+ "/" LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME3) +
+ 1) &&
+ (counter_val == 4 || counter_val == 8 || counter_val == 16 || counter_val == 20)) {
+ if (H5L_TYPE_EXTERNAL != info->type) {
+ ret_val = -1;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_EXTERNAL!\n",
+ LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME3
+ "/" LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME3);
+ }
+
+ goto done;
+ }
+ else if (!HDstrncmp(name,
+ LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME3
+ "/" LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME4,
+ strlen(LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME3
+ "/" LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME4) +
+ 1) &&
+ (counter_val == 5 || counter_val == 7 || counter_val == 17 || counter_val == 19)) {
+ if (H5L_TYPE_HARD != info->type) {
+ ret_val = -1;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_HARD!\n",
+ LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME3
+ "/" LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME4);
+ }
+
+ goto done;
+ }
+ else if (!HDstrncmp(name, LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME2,
+ strlen(LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME2) + 1) &&
+ (counter_val == 0 || counter_val == 9 || counter_val == 12 || counter_val == 21)) {
+ if (H5L_TYPE_HARD != info->type) {
+ ret_val = -1;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_HARD!\n",
+ LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME2);
+ }
+
+ goto done;
+ }
+ else if (!HDstrncmp(name, LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME3,
+ strlen(LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME3) + 1) &&
+ (counter_val == 3 || counter_val == 6 || counter_val == 15 || counter_val == 18)) {
+ if (H5L_TYPE_HARD != info->type) {
+ ret_val = -1;
+ HDprintf(" link type for link '%s' was not H5L_TYPE_HARD!\n",
+ LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME3);
+ }
+
+ goto done;
+ }
+
+ HDprintf(" link name '%s' didn't match known names or came in an incorrect order\n", name);
+
+ ret_val = -1;
+
+done:
+ (*i)++;
+
+ return ret_val;
+}
+#endif
+
+/*
+ * Link visiting callback for the H5Lvisit(_by_name)2 invalid
+ * parameters test which simply does nothing.
+ */
+static herr_t
+link_visit_invalid_params_cb(hid_t group_id, const char *name, const H5L_info2_t *info, void *op_data)
+{
+ UNUSED(group_id);
+ UNUSED(name);
+ UNUSED(info);
+ UNUSED(op_data);
+
+ return 0;
+}
+
+/*
+ * Link visiting callback for the 0 links visiting test which
+ * simply does nothing.
+ */
+static herr_t
+link_visit_0_links_cb(hid_t group_id, const char *name, const H5L_info2_t *info, void *op_data)
+{
+ UNUSED(group_id);
+ UNUSED(name);
+ UNUSED(info);
+ UNUSED(op_data);
+
+ return 0;
+}
+
+/*
+ * Cleanup temporary test files
+ */
+static void
+cleanup_files(void)
+{
+ H5Fdelete(EXTERNAL_LINK_TEST_FILE_NAME, H5P_DEFAULT);
+ H5Fdelete(EXTERNAL_LINK_INVALID_PARAMS_TEST_FILE_NAME, H5P_DEFAULT);
+}
+
+int
+H5_api_link_test(void)
+{
+ size_t i;
+ int nerrors;
+
+ HDprintf("**********************************************\n");
+ HDprintf("* *\n");
+ HDprintf("* API Link Tests *\n");
+ HDprintf("* *\n");
+ HDprintf("**********************************************\n\n");
+
+ for (i = 0, nerrors = 0; i < ARRAY_LENGTH(link_tests); i++) {
+ nerrors += (*link_tests[i])() ? 1 : 0;
+ }
+
+ HDprintf("\n");
+
+ HDprintf("Cleaning up testing files\n");
+ cleanup_files();
+
+ return nerrors;
+}
diff --git a/test/API/H5_api_link_test.h b/test/API/H5_api_link_test.h
new file mode 100644
index 0000000..e161517
--- /dev/null
+++ b/test/API/H5_api_link_test.h
@@ -0,0 +1,437 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifndef H5_API_LINK_TEST_H
+#define H5_API_LINK_TEST_H
+
+#include "H5_api_test.h"
+
+int H5_api_link_test(void);
+
+/*********************************************
+ * *
+ * API Link test defines *
+ * *
+ *********************************************/
+
+#define HARD_LINK_TEST_GROUP_NAME "hard_link_creation_test"
+#define HARD_LINK_TEST_LINK_NAME "hard_link"
+
+#define HARD_LINK_TEST_GROUP_LONG_NAME "hard_link_long_name"
+#define MAX_NAME_LEN ((64 * 1024) + 1024)
+
+#define HARD_LINK_TEST_GROUP_MANY_NAME "hard_link_many_name"
+#define HARD_LINK_TEST_GROUP_MANY_FINAL_NAME "hard_link_final"
+#define HARD_LINK_TEST_GROUP_MANY_NAME_BUF_SIZE 1024
+
+#define H5L_SAME_LOC_TEST_GROUP_NAME "h5l_same_loc_test_group"
+#define H5L_SAME_LOC_TEST_LINK_NAME1 "h5l_same_loc_test_link1"
+#define H5L_SAME_LOC_TEST_LINK_NAME2 "h5l_same_loc_test_link2"
+
+#define HARD_LINK_INVALID_PARAMS_TEST_GROUP_NAME "hard_link_creation_invalid_params_test"
+#define HARD_LINK_INVALID_PARAMS_TEST_LINK_NAME "hard_link"
+
+#define SOFT_LINK_EXISTING_RELATIVE_TEST_SUBGROUP_NAME "soft_link_to_existing_relative_path_test"
+#define SOFT_LINK_EXISTING_RELATIVE_TEST_OBJECT_NAME "group"
+#define SOFT_LINK_EXISTING_RELATIVE_TEST_LINK_NAME "soft_link_to_existing_relative_path"
+
+#define SOFT_LINK_EXISTING_ABSOLUTE_TEST_SUBGROUP_NAME "soft_link_to_existing_absolute_path_test"
+#define SOFT_LINK_EXISTING_ABSOLUTE_TEST_LINK_NAME "soft_link_to_existing_absolute_path"
+
+#define SOFT_LINK_DANGLING_RELATIVE_TEST_SUBGROUP_NAME "soft_link_dangling_relative_path_test"
+#define SOFT_LINK_DANGLING_RELATIVE_TEST_OBJECT_NAME "group"
+#define SOFT_LINK_DANGLING_RELATIVE_TEST_LINK_NAME "soft_link_dangling_relative_path"
+
+#define SOFT_LINK_DANGLING_ABSOLUTE_TEST_SUBGROUP_NAME "soft_link_dangling_absolute_path_test"
+#define SOFT_LINK_DANGLING_ABSOLUTE_TEST_OBJECT_NAME "group"
+#define SOFT_LINK_DANGLING_ABSOLUTE_TEST_LINK_NAME "soft_link_dangling_absolute_path"
+
+#define SOFT_LINK_TEST_GROUP_LONG_NAME "soft_link_long_name"
+#define SOFT_LINK_TEST_LONG_OBJECT_NAME "soft_link_object_name"
+
+#define SOFT_LINK_TEST_GROUP_MANY_NAME "soft_link_many_name"
+#define SOFT_LINK_TEST_GROUP_MANY_FINAL_NAME "soft_link_final"
+#define SOFT_LINK_TEST_GROUP_MANY_NAME_BUF_SIZE 1024
+
+#define SOFT_LINK_INVALID_PARAMS_TEST_GROUP_NAME "soft_link_creation_invalid_params_test"
+#define SOFT_LINK_INVALID_PARAMS_TEST_LINK_NAME "soft_link_to_root"
+
+#define EXTERNAL_LINK_TEST_SUBGROUP_NAME "external_link_test"
+#define EXTERNAL_LINK_TEST_FILE_NAME "ext_link_file.h5"
+#define EXTERNAL_LINK_TEST_LINK_NAME "ext_link"
+
+#define EXTERNAL_LINK_TEST_DANGLING_SUBGROUP_NAME "external_link_dangling_test"
+#define EXTERNAL_LINK_TEST_DANGLING_LINK_NAME "dangling_ext_link"
+#define EXTERNAL_LINK_TEST_DANGLING_OBJECT_NAME "external_group"
+
+#define EXTERNAL_LINK_TEST_MULTI_NAME "external_link_multi_test"
+#define EXTERNAL_LINK_TEST_MULTI_NAME_BUF_SIZE 1024
+#define EXTERNAL_LINK_TEST_FILE_NAME2 "ext_link_file_2.h5"
+#define EXTERNAL_LINK_TEST_FILE_NAME3 "ext_link_file_3.h5"
+#define EXTERNAL_LINK_TEST_FILE_NAME4 "ext_link_file_4.h5"
+
+#define EXTERNAL_LINK_TEST_PING_PONG_NAME1 "ext_link_file_ping_pong_1.h5"
+#define EXTERNAL_LINK_TEST_PING_PONG_NAME2 "ext_link_file_ping_pong_2.h5"
+#define EXTERNAL_LINK_TEST_PING_PONG_NAME_BUF_SIZE 1024
+
+#define EXTERNAL_LINK_INVALID_PARAMS_TEST_GROUP_NAME "external_link_creation_invalid_params_test"
+#define EXTERNAL_LINK_INVALID_PARAMS_TEST_FILE_NAME "ext_link_invalid_params_file.h5"
+#define EXTERNAL_LINK_INVALID_PARAMS_TEST_LINK_NAME "external_link"
+
+#define UD_LINK_TEST_UDATA_MAX_SIZE 256
+#define UD_LINK_TEST_GROUP_NAME "ud_link_creation_test"
+#define UD_LINK_TEST_LINK_NAME "ud_link"
+
+#define UD_LINK_INVALID_PARAMS_TEST_UDATA_MAX_SIZE 256
+#define UD_LINK_INVALID_PARAMS_TEST_GROUP_NAME "ud_link_creation_invalid_params_test"
+#define UD_LINK_INVALID_PARAMS_TEST_LINK_NAME "ud_link"
+
+#define LINK_DELETE_TEST_NESTED_GRP_NAME "nested_grp"
+#define LINK_DELETE_TEST_HARD_LINK_NAME "hard_link"
+#define LINK_DELETE_TEST_NESTED_HARD_LINK_NAME \
+ LINK_DELETE_TEST_NESTED_GRP_NAME "/" LINK_DELETE_TEST_HARD_LINK_NAME
+#define LINK_DELETE_TEST_HARD_LINK_NAME2 LINK_DELETE_TEST_HARD_LINK_NAME "2"
+#define LINK_DELETE_TEST_HARD_LINK_NAME3 LINK_DELETE_TEST_HARD_LINK_NAME "3"
+#define LINK_DELETE_TEST_SOFT_LINK_NAME "soft_link"
+#define LINK_DELETE_TEST_SOFT_LINK_NAME2 LINK_DELETE_TEST_SOFT_LINK_NAME "2"
+#define LINK_DELETE_TEST_SOFT_LINK_NAME3 LINK_DELETE_TEST_SOFT_LINK_NAME "3"
+#define LINK_DELETE_TEST_EXTERNAL_LINK_NAME "external_link"
+#define LINK_DELETE_TEST_EXTERNAL_LINK_NAME2 LINK_DELETE_TEST_EXTERNAL_LINK_NAME "2"
+#define LINK_DELETE_TEST_EXTERNAL_LINK_NAME3 LINK_DELETE_TEST_EXTERNAL_LINK_NAME "3"
+#define LINK_DELETE_TEST_SUBGROUP_NAME "link_delete_test"
+#define LINK_DELETE_TEST_SUBGROUP1_NAME "H5Ldelete_hard_link"
+#define LINK_DELETE_TEST_NESTED_SUBGROUP_NAME1 "H5Ldelete_nested_hard_link"
+#define LINK_DELETE_TEST_SUBGROUP2_NAME "H5Ldelete_soft_link"
+#define LINK_DELETE_TEST_SUBGROUP3_NAME "H5Ldelete_external_link"
+#define LINK_DELETE_TEST_SUBGROUP4_NAME "H5Ldelete_ud_link"
+#define LINK_DELETE_TEST_SUBGROUP5_NAME "H5Ldelete_by_idx_hard_link_crt_order_increasing"
+#define LINK_DELETE_TEST_SUBGROUP6_NAME "H5Ldelete_by_idx_hard_link_crt_order_decreasing"
+#define LINK_DELETE_TEST_SUBGROUP7_NAME "H5Ldelete_by_idx_hard_link_name_order_increasing"
+#define LINK_DELETE_TEST_SUBGROUP8_NAME "H5Ldelete_by_idx_hard_link_name_order_decreasing"
+#define LINK_DELETE_TEST_SUBGROUP9_NAME "H5Ldelete_by_idx_soft_link_crt_order_increasing"
+#define LINK_DELETE_TEST_SUBGROUP10_NAME "H5Ldelete_by_idx_soft_link_crt_order_decreasing"
+#define LINK_DELETE_TEST_SUBGROUP11_NAME "H5Ldelete_by_idx_soft_link_name_order_increasing"
+#define LINK_DELETE_TEST_SUBGROUP12_NAME "H5Ldelete_by_idx_soft_link_name_order_decreasing"
+#define LINK_DELETE_TEST_SUBGROUP13_NAME "H5Ldelete_by_idx_external_link_crt_order_increasing"
+#define LINK_DELETE_TEST_SUBGROUP14_NAME "H5Ldelete_by_idx_external_link_crt_order_decreasing"
+#define LINK_DELETE_TEST_SUBGROUP15_NAME "H5Ldelete_by_idx_external_link_name_order_increasing"
+#define LINK_DELETE_TEST_SUBGROUP16_NAME "H5Ldelete_by_idx_external_link_name_order_decreasing"
+#define LINK_DELETE_TEST_SUBGROUP17_NAME "H5Ldelete_by_idx_ud_link_crt_order_increasing"
+#define LINK_DELETE_TEST_SUBGROUP18_NAME "H5Ldelete_by_idx_ud_link_crt_order_decreasing"
+#define LINK_DELETE_TEST_SUBGROUP19_NAME "H5Ldelete_by_idx_ud_link_name_order_increasing"
+#define LINK_DELETE_TEST_SUBGROUP20_NAME "H5Ldelete_by_idx_ud_link_name_order_decreasing"
+
+#define LINK_DELETE_RESET_MAX_CRT_ORDER_TEST_SUBGROUP_NAME "H5Ldelete_reset_grp_max_crt_order_test"
+#define LINK_DELETE_RESET_MAX_CRT_ORDER_TEST_SUBGROUP1_NAME "H5Ldelete_bottom_up"
+#define LINK_DELETE_RESET_MAX_CRT_ORDER_TEST_SUBGROUP2_NAME "H5Ldelete_top_down"
+#define LINK_DELETE_RESET_MAX_CRT_ORDER_TEST_NUM_LINKS 5
+#define LINK_DELETE_RESET_MAX_CRT_ORDER_TEST_BUF_SIZE 1024
+
+#define LINK_DELETE_INVALID_PARAMS_TEST_HARD_LINK_NAME "hard_link"
+#define LINK_DELETE_INVALID_PARAMS_TEST_GROUP_NAME "link_deletion_invalid_params_test"
+
+#define COPY_LINK_TEST_LINK_VAL_BUF_SIZE 1024
+#define COPY_LINK_TEST_EXTERNAL_LINK_NAME "external_link"
+#define COPY_LINK_TEST_EXTERNAL_LINK_NAME2 COPY_LINK_TEST_EXTERNAL_LINK_NAME "2"
+#define COPY_LINK_TEST_EXTERNAL_LINK_NAME3 COPY_LINK_TEST_EXTERNAL_LINK_NAME "3"
+#define COPY_LINK_TEST_EXTERNAL_LINK_COPY_NAME "external_link_copy"
+#define COPY_LINK_TEST_EXTERNAL_LINK_COPY_NAME2 COPY_LINK_TEST_EXTERNAL_LINK_COPY_NAME "2"
+#define COPY_LINK_TEST_EXTERNAL_LINK_SAME_LOC_NAME "external_link_same_loc"
+#define COPY_LINK_TEST_EXTERNAL_LINK_SAME_LOC_NAME2 COPY_LINK_TEST_EXTERNAL_LINK_SAME_LOC_NAME "2"
+#define COPY_LINK_TEST_HARD_LINK_NAME "hard_link"
+#define COPY_LINK_TEST_HARD_LINK_NAME2 COPY_LINK_TEST_HARD_LINK_NAME "2"
+#define COPY_LINK_TEST_HARD_LINK_NAME3 COPY_LINK_TEST_HARD_LINK_NAME "3"
+#define COPY_LINK_TEST_HARD_LINK_COPY_NAME "hard_link_copy"
+#define COPY_LINK_TEST_HARD_LINK_COPY_NAME2 COPY_LINK_TEST_HARD_LINK_COPY_NAME "2"
+#define COPY_LINK_TEST_HARD_LINK_SAME_LOC_NAME "hard_link_same_loc"
+#define COPY_LINK_TEST_HARD_LINK_SAME_LOC_NAME2 COPY_LINK_TEST_HARD_LINK_SAME_LOC_NAME "2"
+#define COPY_LINK_TEST_SOFT_LINK_TARGET_PATH "/" LINK_TEST_GROUP_NAME "/" COPY_LINK_TEST_SUBGROUP_NAME
+#define COPY_LINK_TEST_SOFT_LINK_NAME "soft_link"
+#define COPY_LINK_TEST_SOFT_LINK_NAME2 COPY_LINK_TEST_SOFT_LINK_NAME "2"
+#define COPY_LINK_TEST_SOFT_LINK_NAME3 COPY_LINK_TEST_SOFT_LINK_NAME "3"
+#define COPY_LINK_TEST_SOFT_LINK_COPY_NAME "soft_link_copy"
+#define COPY_LINK_TEST_SOFT_LINK_COPY_NAME2 COPY_LINK_TEST_SOFT_LINK_COPY_NAME "2"
+#define COPY_LINK_TEST_SOFT_LINK_SAME_LOC_NAME "soft_link_same_loc"
+#define COPY_LINK_TEST_SOFT_LINK_SAME_LOC_NAME2 COPY_LINK_TEST_SOFT_LINK_SAME_LOC_NAME "2"
+#define COPY_LINK_TEST_SRC_GROUP_NAME "src_group"
+#define COPY_LINK_TEST_DST_GROUP_NAME "dst_group"
+#define COPY_LINK_TEST_SUBGROUP_NAME "link_copy_test"
+
+#define COPY_LINK_INVALID_PARAMS_TEST_HARD_LINK_COPY_NAME "hard_link_copy"
+#define COPY_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME "hard_link"
+#define COPY_LINK_INVALID_PARAMS_TEST_HARD_LINK_NEW_NAME "hard_link_new"
+#define COPY_LINK_INVALID_PARAMS_TEST_SRC_GROUP_NAME "src_group"
+#define COPY_LINK_INVALID_PARAMS_TEST_DST_GROUP_NAME "dst_group"
+#define COPY_LINK_INVALID_PARAMS_TEST_SUBGROUP_NAME "link_copy_invalid_params_test"
+
+#define MOVE_LINK_TEST_LINK_VAL_BUF_SIZE 1024
+#define MOVE_LINK_TEST_EXTERN_LINK_NAME "extern_link"
+#define MOVE_LINK_TEST_EXTERN_LINK_NAME2 MOVE_LINK_TEST_EXTERN_LINK_NAME "2"
+#define MOVE_LINK_TEST_EXTERN_LINK_NAME3 MOVE_LINK_TEST_EXTERN_LINK_NAME "3"
+#define MOVE_LINK_TEST_EXTERN_LINK_NAME4 MOVE_LINK_TEST_EXTERN_LINK_NAME "4"
+#define MOVE_LINK_TEST_EXTERN_LINK_NEW_NAME "extern_link_renamed"
+#define MOVE_LINK_TEST_EXTERN_LINK_SAME_LOC_NAME "extern_link_same_loc"
+#define MOVE_LINK_TEST_HARD_LINK_NAME "hard_link"
+#define MOVE_LINK_TEST_HARD_LINK_NAME2 MOVE_LINK_TEST_HARD_LINK_NAME "2"
+#define MOVE_LINK_TEST_HARD_LINK_NAME3 MOVE_LINK_TEST_HARD_LINK_NAME "3"
+#define MOVE_LINK_TEST_HARD_LINK_NAME4 MOVE_LINK_TEST_HARD_LINK_NAME "4"
+#define MOVE_LINK_TEST_HARD_LINK_NEW_NAME "hard_link_renamed"
+#define MOVE_LINK_TEST_HARD_LINK_SAME_LOC_NAME "hard_link_same_loc"
+#define MOVE_LINK_TEST_SOFT_LINK_TARGET_PATH "/" LINK_TEST_GROUP_NAME "/" MOVE_LINK_TEST_SUBGROUP_NAME
+#define MOVE_LINK_TEST_SOFT_LINK_NAME "soft_link"
+#define MOVE_LINK_TEST_SOFT_LINK_NAME2 MOVE_LINK_TEST_SOFT_LINK_NAME "2"
+#define MOVE_LINK_TEST_SOFT_LINK_NAME3 MOVE_LINK_TEST_SOFT_LINK_NAME "3"
+#define MOVE_LINK_TEST_SOFT_LINK_NAME4 MOVE_LINK_TEST_SOFT_LINK_NAME "4"
+#define MOVE_LINK_TEST_SOFT_LINK_NEW_NAME "soft_link_renamed"
+#define MOVE_LINK_TEST_SOFT_LINK_SAME_LOC_NAME "soft_link_same_loc"
+#define MOVE_LINK_TEST_SRC_GROUP_NAME "src_group"
+#define MOVE_LINK_TEST_DST_GROUP_NAME "dst_group"
+#define MOVE_LINK_TEST_SUBGROUP_NAME "link_move_test"
+
+#define MOVE_LINK_INTO_GRP_WITH_LINKS_TEST_SUBGROUP_NAME "link_move_into_group_with_links_test"
+#define MOVE_LINK_INTO_GRP_WITH_LINKS_TEST_SRC_GRP_NAME "source_group"
+#define MOVE_LINK_INTO_GRP_WITH_LINKS_TEST_DST_GRP_NAME "dest_group"
+#define MOVE_LINK_INTO_GRP_WITH_LINKS_TEST_NUM_LINKS 5
+#define MOVE_LINK_INTO_GRP_WITH_LINKS_TEST_BUF_SIZE 1024
+
+#define MOVE_LINK_RESET_MAX_CRT_ORDER_TEST_SUBGROUP_NAME "H5Lmove_reset_grp_max_crt_order_test"
+#define MOVE_LINK_RESET_MAX_CRT_ORDER_TEST_SRC_GRP_NAME "source_group"
+#define MOVE_LINK_RESET_MAX_CRT_ORDER_TEST_DST_GRP_NAME "dest_group"
+#define MOVE_LINK_RESET_MAX_CRT_ORDER_TEST_NUM_LINKS 5
+#define MOVE_LINK_RESET_MAX_CRT_ORDER_TEST_BUF_SIZE 1024
+
+#define MOVE_LINK_INVALID_PARAMS_TEST_HARD_LINK_NAME "hard_link"
+#define MOVE_LINK_INVALID_PARAMS_TEST_SRC_GROUP_NAME "src_grp"
+#define MOVE_LINK_INVALID_PARAMS_TEST_DST_GROUP_NAME "dst_grp"
+#define MOVE_LINK_INVALID_PARAMS_TEST_SUBGROUP_NAME "link_move_invalid_params_test"
+
+#define GET_LINK_VAL_TEST_LINK_VAL_BUF_SIZE 1024
+#define GET_LINK_VAL_TEST_SUBGROUP_NAME "get_link_val_test"
+#define GET_LINK_VAL_TEST_SOFT_LINK_NAME "soft_link"
+#define GET_LINK_VAL_TEST_SOFT_LINK_NAME2 GET_LINK_VAL_TEST_SOFT_LINK_NAME "2"
+#define GET_LINK_VAL_TEST_SOFT_LINK_NAME3 GET_LINK_VAL_TEST_SOFT_LINK_NAME "3"
+#define GET_LINK_VAL_TEST_EXT_LINK_NAME "ext_link"
+#define GET_LINK_VAL_TEST_EXT_LINK_NAME2 GET_LINK_VAL_TEST_EXT_LINK_NAME "2"
+#define GET_LINK_VAL_TEST_EXT_LINK_NAME3 GET_LINK_VAL_TEST_EXT_LINK_NAME "3"
+#define GET_LINK_VAL_TEST_SUBGROUP1_NAME "H5Lget_val_soft_link"
+#define GET_LINK_VAL_TEST_SUBGROUP2_NAME "H5Lget_val_external_link"
+#define GET_LINK_VAL_TEST_SUBGROUP3_NAME "H5Lget_val_ud_link"
+#define GET_LINK_VAL_TEST_SUBGROUP4_NAME "H5Lget_val_by_idx_soft_link_crt_order_increasing"
+#define GET_LINK_VAL_TEST_SUBGROUP5_NAME "H5Lget_val_by_idx_soft_link_crt_order_decreasing"
+#define GET_LINK_VAL_TEST_SUBGROUP6_NAME "H5Lget_val_by_idx_soft_link_name_order_increasing"
+#define GET_LINK_VAL_TEST_SUBGROUP7_NAME "H5Lget_val_by_idx_soft_link_name_order_decreasing"
+#define GET_LINK_VAL_TEST_SUBGROUP8_NAME "H5Lget_val_by_idx_external_link_crt_order_increasing"
+#define GET_LINK_VAL_TEST_SUBGROUP9_NAME "H5Lget_val_by_idx_external_link_crt_order_decreasing"
+#define GET_LINK_VAL_TEST_SUBGROUP10_NAME "H5Lget_val_by_idx_external_link_name_order_increasing"
+#define GET_LINK_VAL_TEST_SUBGROUP11_NAME "H5Lget_val_by_idx_external_link_name_order_decreasing"
+#define GET_LINK_VAL_TEST_SUBGROUP12_NAME "H5Lget_val_by_idx_ud_link_crt_order_increasing"
+#define GET_LINK_VAL_TEST_SUBGROUP13_NAME "H5Lget_val_by_idx_ud_link_crt_order_decreasing"
+#define GET_LINK_VAL_TEST_SUBGROUP14_NAME "H5Lget_val_by_idx_ud_link_name_order_increasing"
+#define GET_LINK_VAL_TEST_SUBGROUP15_NAME "H5Lget_val_by_idx_ud_link_name_order_decreasing"
+
+#define GET_LINK_VAL_INVALID_PARAMS_TEST_SOFT_LINK_NAME "soft_link"
+#define GET_LINK_VAL_INVALID_PARAMS_TEST_GROUP_NAME "get_link_val_invalid_params_test"
+
+#define GET_LINK_INFO_TEST_HARD_LINK_NAME "hard_link"
+#define GET_LINK_INFO_TEST_HARD_LINK_NAME2 GET_LINK_INFO_TEST_HARD_LINK_NAME "2"
+#define GET_LINK_INFO_TEST_HARD_LINK_NAME3 GET_LINK_INFO_TEST_HARD_LINK_NAME "3"
+#define GET_LINK_INFO_TEST_SOFT_LINK_NAME "soft_link"
+#define GET_LINK_INFO_TEST_SOFT_LINK_NAME2 GET_LINK_INFO_TEST_SOFT_LINK_NAME "2"
+#define GET_LINK_INFO_TEST_SOFT_LINK_NAME3 GET_LINK_INFO_TEST_SOFT_LINK_NAME "3"
+#define GET_LINK_INFO_TEST_EXT_LINK_NAME "ext_link"
+#define GET_LINK_INFO_TEST_EXT_LINK_NAME2 GET_LINK_INFO_TEST_EXT_LINK_NAME "2"
+#define GET_LINK_INFO_TEST_EXT_LINK_NAME3 GET_LINK_INFO_TEST_EXT_LINK_NAME "3"
+#define GET_LINK_INFO_TEST_GROUP_NAME "get_link_info_test"
+#define GET_LINK_INFO_TEST_SUBGROUP1_NAME "H5Lget_info_hard_link"
+#define GET_LINK_INFO_TEST_SUBGROUP2_NAME "H5Lget_info_soft_link"
+#define GET_LINK_INFO_TEST_SUBGROUP3_NAME "H5Lget_info_external_link"
+#define GET_LINK_INFO_TEST_SUBGROUP4_NAME "H5Lget_info_ud_link"
+#define GET_LINK_INFO_TEST_SUBGROUP5_NAME "H5Lget_info_by_idx_hard_link_crt_order_increasing"
+#define GET_LINK_INFO_TEST_SUBGROUP6_NAME "H5Lget_info_by_idx_hard_link_crt_order_decreasing"
+#define GET_LINK_INFO_TEST_SUBGROUP7_NAME "H5Lget_info_by_idx_hard_link_name_order_increasing"
+#define GET_LINK_INFO_TEST_SUBGROUP8_NAME "H5Lget_info_by_idx_hard_link_name_order_decreasing"
+#define GET_LINK_INFO_TEST_SUBGROUP9_NAME "H5Lget_info_by_idx_soft_link_crt_order_increasing"
+#define GET_LINK_INFO_TEST_SUBGROUP10_NAME "H5Lget_info_by_idx_soft_link_crt_order_decreasing"
+#define GET_LINK_INFO_TEST_SUBGROUP11_NAME "H5Lget_info_by_idx_soft_link_name_order_increasing"
+#define GET_LINK_INFO_TEST_SUBGROUP12_NAME "H5Lget_info_by_idx_soft_link_name_order_decreasing"
+#define GET_LINK_INFO_TEST_SUBGROUP13_NAME "H5Lget_info_by_idx_external_link_crt_order_increasing"
+#define GET_LINK_INFO_TEST_SUBGROUP14_NAME "H5Lget_info_by_idx_external_link_crt_order_decreasing"
+#define GET_LINK_INFO_TEST_SUBGROUP15_NAME "H5Lget_info_by_idx_external_link_name_order_increasing"
+#define GET_LINK_INFO_TEST_SUBGROUP16_NAME "H5Lget_info_by_idx_external_link_name_order_decreasing"
+#define GET_LINK_INFO_TEST_SUBGROUP17_NAME "H5Lget_info_by_idx_ud_link_crt_order_increasing"
+#define GET_LINK_INFO_TEST_SUBGROUP18_NAME "H5Lget_info_by_idx_ud_link_crt_order_decreasing"
+#define GET_LINK_INFO_TEST_SUBGROUP19_NAME "H5Lget_info_by_idx_ud_link_name_order_increasing"
+#define GET_LINK_INFO_TEST_SUBGROUP20_NAME "H5Lget_info_by_idx_ud_link_name_order_decreasing"
+
+#define GET_LINK_INFO_INVALID_PARAMS_TEST_HARD_LINK_NAME "hard_link"
+#define GET_LINK_INFO_INVALID_PARAMS_TEST_GROUP_NAME "get_link_info_invalid_params_test"
+
+#define GET_LINK_NAME_TEST_EXTERNAL_SUBGROUP_NAME "get_external_link_name_crt_order_increasing"
+#define GET_LINK_NAME_TEST_EXTERNAL_SUBGROUP_NAME2 "get_external_link_name_crt_order_decreasing"
+#define GET_LINK_NAME_TEST_EXTERNAL_SUBGROUP_NAME3 "get_external_link_name_alpha_order_increasing"
+#define GET_LINK_NAME_TEST_EXTERNAL_SUBGROUP_NAME4 "get_external_link_name_alpha_order_decreasing"
+#define GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME "external_link"
+#define GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME2 GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME "2"
+#define GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME3 GET_LINK_NAME_TEST_EXTERNAL_LINK_NAME "3"
+#define GET_LINK_NAME_TEST_HARD_SUBGROUP_NAME "get_hard_link_name_crt_order_increasing"
+#define GET_LINK_NAME_TEST_HARD_SUBGROUP_NAME2 "get_hard_link_name_crt_order_decreasing"
+#define GET_LINK_NAME_TEST_HARD_SUBGROUP_NAME3 "get_hard_link_name_alpha_order_increasing"
+#define GET_LINK_NAME_TEST_HARD_SUBGROUP_NAME4 "get_hard_link_name_alpha_order_decreasing"
+#define GET_LINK_NAME_TEST_HARD_LINK_NAME "hard_link"
+#define GET_LINK_NAME_TEST_HARD_LINK_NAME2 GET_LINK_NAME_TEST_HARD_LINK_NAME "2"
+#define GET_LINK_NAME_TEST_HARD_LINK_NAME3 GET_LINK_NAME_TEST_HARD_LINK_NAME "3"
+#define GET_LINK_NAME_TEST_SOFT_SUBGROUP_NAME "get_soft_link_name_crt_order_increasing"
+#define GET_LINK_NAME_TEST_SOFT_SUBGROUP_NAME2 "get_soft_link_name_crt_order_decreasing"
+#define GET_LINK_NAME_TEST_SOFT_SUBGROUP_NAME3 "get_soft_link_name_alpha_order_increasing"
+#define GET_LINK_NAME_TEST_SOFT_SUBGROUP_NAME4 "get_soft_link_name_alpha_order_decreasing"
+#define GET_LINK_NAME_TEST_SOFT_LINK_NAME "soft_link"
+#define GET_LINK_NAME_TEST_SOFT_LINK_NAME2 GET_LINK_NAME_TEST_SOFT_LINK_NAME "2"
+#define GET_LINK_NAME_TEST_SOFT_LINK_NAME3 GET_LINK_NAME_TEST_SOFT_LINK_NAME "3"
+#define GET_LINK_NAME_TEST_GROUP_NAME "get_link_name_test"
+#define GET_LINK_NAME_TEST_BUF_SIZE 256
+
+#define GET_LINK_NAME_INVALID_PARAMS_TEST_HARD_LINK_NAME "test_link1"
+#define GET_LINK_NAME_INVALID_PARAMS_TEST_GROUP_NAME "get_link_name_invalid_params_test"
+
+#define LINK_ITER_HARD_LINKS_TEST_DSET_SPACE_RANK 2
+#define LINK_ITER_HARD_LINKS_TEST_SUBGROUP_NAME "link_iter_hard_links_test"
+#define LINK_ITER_HARD_LINKS_TEST_LINK_NAME "hard_link"
+#define LINK_ITER_HARD_LINKS_TEST_NUM_LINKS 10
+#define LINK_ITER_HARD_LINKS_TEST_BUF_SIZE 64
+
+#define LINK_ITER_SOFT_LINKS_TEST_SUBGROUP_NAME "link_iter_soft_links_test"
+#define LINK_ITER_SOFT_LINKS_TEST_LINK_NAME "soft_link"
+#define LINK_ITER_SOFT_LINKS_TEST_NUM_LINKS 10
+#define LINK_ITER_SOFT_LINKS_TEST_BUF_SIZE 64
+
+#define LINK_ITER_EXT_LINKS_TEST_SUBGROUP_NAME "link_iter_ext_links_test"
+#define LINK_ITER_EXT_LINKS_TEST_LINK_NAME "external_link"
+#define LINK_ITER_EXT_LINKS_TEST_NUM_LINKS 10
+#define LINK_ITER_EXT_LINKS_TEST_BUF_SIZE 64
+
+#define LINK_ITER_MIXED_LINKS_TEST_DSET_SPACE_RANK 2
+#define LINK_ITER_MIXED_LINKS_TEST_HARD_LINK_NAME "hard_link1"
+#define LINK_ITER_MIXED_LINKS_TEST_SOFT_LINK_NAME "soft_link1"
+#define LINK_ITER_MIXED_LINKS_TEST_EXT_LINK_NAME "ext_link1"
+#define LINK_ITER_MIXED_LINKS_TEST_SUBGROUP_NAME "link_iter_mixed_links_test"
+#define LINK_ITER_MIXED_LINKS_TEST_NUM_LINKS 3
+
+#define LINK_ITER_INVALID_PARAMS_TEST_DSET_SPACE_RANK 2
+#define LINK_ITER_INVALID_PARAMS_TEST_HARD_LINK_NAME "hard_link1"
+#define LINK_ITER_INVALID_PARAMS_TEST_SOFT_LINK_NAME "soft_link1"
+#define LINK_ITER_INVALID_PARAMS_TEST_EXT_LINK_NAME "ext_link1"
+#define LINK_ITER_INVALID_PARAMS_TEST_SUBGROUP_NAME "link_iter_invalid_params_test"
+
+#define LINK_ITER_0_LINKS_TEST_SUBGROUP_NAME "link_iter_0_links_test"
+
+#define LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST \
+ ((LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_GROUP * \
+ LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_SUBGROUPS) + \
+ LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_SUBGROUPS)
+#define LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_GROUP 10
+#define LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_DSET_SPACE_RANK 2
+#define LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NUM_SUBGROUPS 5
+#define LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_NESTED_GRP_NAME "subgroup"
+#define LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME "link_visit_hard_links_no_cycle_test"
+#define LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_LINK_NAME "hard_link"
+#define LINK_VISIT_HARD_LINKS_NO_CYCLE_TEST_BUF_SIZE 256
+
+#define LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST \
+ ((LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_GROUP * \
+ LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_SUBGROUPS) + \
+ LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_SUBGROUPS)
+#define LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_GROUP 10
+#define LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NUM_SUBGROUPS 5
+#define LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_NESTED_GRP_NAME "subgroup"
+#define LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME "link_visit_soft_links_no_cycle_test"
+#define LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_LINK_NAME "soft_link"
+#define LINK_VISIT_SOFT_LINKS_NO_CYCLE_TEST_BUF_SIZE 256
+
+#define LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_TEST \
+ ((LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_GROUP * \
+ LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_SUBGROUPS) + \
+ LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_SUBGROUPS)
+#define LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_LINKS_PER_GROUP 10
+#define LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NUM_SUBGROUPS 5
+#define LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_NESTED_GRP_NAME "subgroup"
+#define LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME "link_visit_ext_links_no_cycle_test"
+#define LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_LINK_NAME "external_link"
+#define LINK_VISIT_EXT_LINKS_NO_CYCLE_TEST_BUF_SIZE 256
+
+#define LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_DSET_SPACE_RANK 2
+#define LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_DSET_NAME "dset"
+#define LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_DSET_NAME2 "dset2"
+#define LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME "link_visit_mixed_links_no_cycle_test"
+#define LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME2 "link_visit_subgroup1"
+#define LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_SUBGROUP_NAME3 "link_visit_subgroup2"
+#define LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME1 "hard_link1"
+#define LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME2 "soft_link1"
+#define LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME3 "ext_link1"
+#define LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_LINK_NAME4 "hard_link2"
+#define LINK_VISIT_MIXED_LINKS_NO_CYCLE_TEST_NUM_LINKS 8
+
+#define LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST \
+ ((LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_LINKS_PER_GROUP * \
+ LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_SUBGROUPS) + \
+ LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_SUBGROUPS)
+#define LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_LINKS_PER_GROUP 10
+#define LINK_VISIT_HARD_LINKS_CYCLE_TEST_NUM_SUBGROUPS 5
+#define LINK_VISIT_HARD_LINKS_CYCLE_TEST_NESTED_GRP_NAME "subgroup"
+#define LINK_VISIT_HARD_LINKS_CYCLE_TEST_SUBGROUP_NAME "link_visit_hard_links_cycle_test"
+#define LINK_VISIT_HARD_LINKS_CYCLE_TEST_LINK_NAME "hard_link"
+#define LINK_VISIT_HARD_LINKS_CYCLE_TEST_BUF_SIZE 256
+
+#define LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST \
+ ((LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_LINKS_PER_GROUP * \
+ LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_SUBGROUPS) + \
+ LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_SUBGROUPS)
+#define LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_LINKS_PER_GROUP 10
+#define LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NUM_SUBGROUPS 5
+#define LINK_VISIT_SOFT_LINKS_CYCLE_TEST_NESTED_GRP_NAME "subgroup"
+#define LINK_VISIT_SOFT_LINKS_CYCLE_TEST_SUBGROUP_NAME "link_visit_soft_links_cycle_test"
+#define LINK_VISIT_SOFT_LINKS_CYCLE_TEST_LINK_NAME "soft_link"
+#define LINK_VISIT_SOFT_LINKS_CYCLE_TEST_BUF_SIZE 256
+
+#define LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_LINKS_PER_TEST \
+ ((LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_LINKS_PER_GROUP * LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_SUBGROUPS) + \
+ LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_SUBGROUPS)
+#define LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_LINKS_PER_GROUP 10
+#define LINK_VISIT_EXT_LINKS_CYCLE_TEST_NUM_SUBGROUPS 5
+#define LINK_VISIT_EXT_LINKS_CYCLE_TEST_NESTED_GRP_NAME "subgroup"
+#define LINK_VISIT_EXT_LINKS_CYCLE_TEST_SUBGROUP_NAME "link_visit_ext_links_cycle_test"
+#define LINK_VISIT_EXT_LINKS_CYCLE_TEST_LINK_NAME "external_link"
+#define LINK_VISIT_EXT_LINKS_CYCLE_TEST_BUF_SIZE 256
+
+#define LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME "link_visit_mixed_links_cycle_test"
+#define LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME2 "link_visit_subgroup1"
+#define LINK_VISIT_MIXED_LINKS_CYCLE_TEST_SUBGROUP_NAME3 "link_visit_subgroup2"
+#define LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME1 "hard_link1"
+#define LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME2 "soft_link1"
+#define LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME3 "ext_link1"
+#define LINK_VISIT_MIXED_LINKS_CYCLE_TEST_LINK_NAME4 "hard_link2"
+#define LINK_VISIT_MIXED_LINKS_CYCLE_TEST_NUM_LINKS 6
+
+#define LINK_VISIT_INVALID_PARAMS_TEST_DSET_SPACE_RANK 2
+#define LINK_VISIT_INVALID_PARAMS_TEST_DSET_NAME "dset"
+#define LINK_VISIT_INVALID_PARAMS_TEST_SUBGROUP_NAME "link_visit_invalid_params_test"
+#define LINK_VISIT_INVALID_PARAMS_TEST_SUBGROUP_NAME2 "link_visit_subgroup1"
+#define LINK_VISIT_INVALID_PARAMS_TEST_SUBGROUP_NAME3 "link_visit_subgroup2"
+#define LINK_VISIT_INVALID_PARAMS_TEST_LINK_NAME1 "hard_link1"
+#define LINK_VISIT_INVALID_PARAMS_TEST_LINK_NAME2 "soft_link1"
+#define LINK_VISIT_INVALID_PARAMS_TEST_LINK_NAME3 "ext_link1"
+#define LINK_VISIT_INVALID_PARAMS_TEST_LINK_NAME4 "hard_link2"
+
+#define LINK_VISIT_0_LINKS_TEST_SUBGROUP_NAME "link_visit_0_links_test"
+#define LINK_VISIT_0_LINKS_TEST_SUBGROUP_NAME2 "link_visit_0_links_test_subgroup1"
+#define LINK_VISIT_0_LINKS_TEST_SUBGROUP_NAME3 "link_visit_0_links_test_subgroup2"
+
+#endif
diff --git a/test/API/H5_api_misc_test.c b/test/API/H5_api_misc_test.c
new file mode 100644
index 0000000..256550b
--- /dev/null
+++ b/test/API/H5_api_misc_test.c
@@ -0,0 +1,1060 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include "H5_api_misc_test.h"
+
+static int test_open_link_without_leading_slash(void);
+static int test_object_creation_by_absolute_path(void);
+static int test_absolute_vs_relative_path(void);
+static int test_dot_for_object_name(void);
+static int test_symbols_in_compound_field_name(void);
+static int test_double_init_term(void);
+
+/*
+ * The array of miscellaneous tests to be performed.
+ */
+static int (*misc_tests[])(void) = {
+ test_open_link_without_leading_slash, test_object_creation_by_absolute_path,
+ test_absolute_vs_relative_path, test_dot_for_object_name,
+ test_symbols_in_compound_field_name, test_double_init_term,
+};
+
+static int
+test_open_link_without_leading_slash(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+
+ TESTING("opening a link without a leading slash");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file\n");
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, MISCELLANEOUS_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group\n");
+ goto error;
+ }
+
+ if ((space_id = generate_random_dataspace(OPEN_LINK_WITHOUT_SLASH_DSET_SPACE_RANK, NULL, NULL, FALSE)) <
+ 0)
+ TEST_ERROR;
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(container_group, OPEN_LINK_WITHOUT_SLASH_DSET_NAME, dset_dtype, space_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset\n");
+ goto error;
+ }
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gopen2(file_id, "/", H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open root group\n");
+ goto error;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, MISCELLANEOUS_TEST_GROUP_NAME "/" OPEN_LINK_WITHOUT_SLASH_DSET_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset\n");
+ goto error;
+ }
+
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ H5Tclose(dset_dtype);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+static int
+test_object_creation_by_absolute_path(void)
+{
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID, sub_group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t dtype_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("object creation by absolute path");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, link, or stored datatype aren't "
+ "supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file\n");
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, MISCELLANEOUS_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group\n");
+ goto error;
+ }
+
+ /* Start by creating a group to hold all the objects for this test */
+ if ((group_id = H5Gcreate2(container_group, OBJECT_CREATE_BY_ABSOLUTE_PATH_TEST_CONTAINER_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group\n");
+ goto error;
+ }
+
+ if ((link_exists = H5Lexists(file_id,
+ "/" MISCELLANEOUS_TEST_GROUP_NAME
+ "/" OBJECT_CREATE_BY_ABSOLUTE_PATH_TEST_CONTAINER_GROUP_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link exists\n");
+ goto error;
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" container group didn't exist at the correct location\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Gcreate_using_absolute_path)
+ {
+ TESTING_2("creation of group using absolute pathname");
+
+ /* Try to create a group under the container group by using an absolute pathname */
+ if ((sub_group_id = H5Gcreate2(file_id,
+ "/" MISCELLANEOUS_TEST_GROUP_NAME
+ "/" OBJECT_CREATE_BY_ABSOLUTE_PATH_TEST_CONTAINER_GROUP_NAME
+ "/" OBJECT_CREATE_BY_ABSOLUTE_PATH_TEST_SUBGROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create subgroup by absolute pathname\n");
+ PART_ERROR(H5Gcreate_using_absolute_path);
+ }
+
+ if ((link_exists = H5Lexists(file_id,
+ "/" MISCELLANEOUS_TEST_GROUP_NAME
+ "/" OBJECT_CREATE_BY_ABSOLUTE_PATH_TEST_CONTAINER_GROUP_NAME
+ "/" OBJECT_CREATE_BY_ABSOLUTE_PATH_TEST_SUBGROUP_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link exists\n");
+ PART_ERROR(H5Gcreate_using_absolute_path);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" subgroup didn't exist at the correct location\n");
+ PART_ERROR(H5Gcreate_using_absolute_path);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gcreate_using_absolute_path);
+
+ PART_BEGIN(H5Dcreate_using_absolute_path)
+ {
+ TESTING_2("creation of dataset using absolute pathname");
+
+ /* Try to create a dataset nested at the end of this group chain by using an absolute pathname */
+ if ((fspace_id = generate_random_dataspace(OBJECT_CREATE_BY_ABSOLUTE_PATH_TEST_DSET_SPACE_RANK,
+ NULL, NULL, FALSE)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to generate dataspace\n");
+ PART_ERROR(H5Dcreate_using_absolute_path);
+ }
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to generate datatype\n");
+ PART_ERROR(H5Dcreate_using_absolute_path);
+ }
+
+ if ((dset_id = H5Dcreate2(file_id,
+ "/" MISCELLANEOUS_TEST_GROUP_NAME
+ "/" OBJECT_CREATE_BY_ABSOLUTE_PATH_TEST_CONTAINER_GROUP_NAME
+ "/" OBJECT_CREATE_BY_ABSOLUTE_PATH_TEST_SUBGROUP_NAME
+ "/" OBJECT_CREATE_BY_ABSOLUTE_PATH_TEST_DSET_NAME,
+ dset_dtype, fspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset\n");
+ PART_ERROR(H5Dcreate_using_absolute_path);
+ }
+
+ if ((link_exists = H5Lexists(file_id,
+ "/" MISCELLANEOUS_TEST_GROUP_NAME
+ "/" OBJECT_CREATE_BY_ABSOLUTE_PATH_TEST_CONTAINER_GROUP_NAME
+ "/" OBJECT_CREATE_BY_ABSOLUTE_PATH_TEST_SUBGROUP_NAME
+ "/" OBJECT_CREATE_BY_ABSOLUTE_PATH_TEST_DSET_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link exists\n");
+ PART_ERROR(H5Dcreate_using_absolute_path);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" dataset didn't exist at the correct location\n");
+ PART_ERROR(H5Dcreate_using_absolute_path);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dcreate_using_absolute_path);
+
+ PART_BEGIN(H5Tcommit_using_absolute_path)
+ {
+ TESTING_2("creation of committed datatype using absolute pathname");
+
+ /* Try to create a committed datatype in the same fashion as the preceding dataset */
+ if ((dtype_id = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create datatype\n");
+ PART_ERROR(H5Tcommit_using_absolute_path);
+ }
+
+ if (H5Tcommit2(file_id,
+ "/" MISCELLANEOUS_TEST_GROUP_NAME
+ "/" OBJECT_CREATE_BY_ABSOLUTE_PATH_TEST_CONTAINER_GROUP_NAME
+ "/" OBJECT_CREATE_BY_ABSOLUTE_PATH_TEST_SUBGROUP_NAME
+ "/" OBJECT_CREATE_BY_ABSOLUTE_PATH_TEST_DTYPE_NAME,
+ dtype_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't commit datatype\n");
+ PART_ERROR(H5Tcommit_using_absolute_path);
+ }
+
+ if ((link_exists = H5Lexists(file_id,
+ "/" MISCELLANEOUS_TEST_GROUP_NAME
+ "/" OBJECT_CREATE_BY_ABSOLUTE_PATH_TEST_CONTAINER_GROUP_NAME
+ "/" OBJECT_CREATE_BY_ABSOLUTE_PATH_TEST_SUBGROUP_NAME
+ "/" OBJECT_CREATE_BY_ABSOLUTE_PATH_TEST_DTYPE_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link exists\n");
+ PART_ERROR(H5Tcommit_using_absolute_path);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" datatype didn't exist at the correct location\n");
+ PART_ERROR(H5Tcommit_using_absolute_path);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Tcommit_using_absolute_path);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dtype_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(sub_group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ H5Tclose(dset_dtype);
+ H5Dclose(dset_id);
+ H5Tclose(dtype_id);
+ H5Gclose(sub_group_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/* XXX: Add testing for groups */
+static int
+test_absolute_vs_relative_path(void)
+{
+ htri_t link_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id1 = H5I_INVALID_HID, dset_id2 = H5I_INVALID_HID, dset_id3 = H5I_INVALID_HID,
+ dset_id4 = H5I_INVALID_HID, dset_id5 = H5I_INVALID_HID, dset_id6 = H5I_INVALID_HID;
+ hid_t dset_dtype1 = H5I_INVALID_HID, dset_dtype2 = H5I_INVALID_HID, dset_dtype3 = H5I_INVALID_HID,
+ dset_dtype4 = H5I_INVALID_HID, dset_dtype5 = H5I_INVALID_HID, dset_dtype6 = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("absolute vs. relative pathnames");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, or link aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file\n");
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, MISCELLANEOUS_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group\n");
+ goto error;
+ }
+
+ /* Start by creating a group to be used during some of the dataset creation operations */
+ if ((group_id = H5Gcreate2(container_group, ABSOLUTE_VS_RELATIVE_PATH_TEST_CONTAINER_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container group\n");
+ goto error;
+ }
+
+ if ((fspace_id = generate_random_dataspace(ABSOLUTE_VS_RELATIVE_PATH_TEST_DSET_SPACE_RANK, NULL, NULL,
+ FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_dtype1 = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+ if ((dset_dtype2 = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+ if ((dset_dtype3 = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+ if ((dset_dtype4 = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+ if ((dset_dtype5 = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+ if ((dset_dtype6 = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Dcreate_absolute_from_root)
+ {
+ TESTING_2("dataset creation by absolute path from root group");
+
+ /* Create a dataset by absolute path in the form "/group/dataset" starting from the root group */
+ if ((dset_id1 = H5Dcreate2(file_id,
+ "/" MISCELLANEOUS_TEST_GROUP_NAME
+ "/" ABSOLUTE_VS_RELATIVE_PATH_TEST_CONTAINER_GROUP_NAME
+ "/" ABSOLUTE_VS_RELATIVE_PATH_TEST_DSET1_NAME,
+ dset_dtype1, fspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset by absolute path from root\n");
+ PART_ERROR(H5Dcreate_absolute_from_root);
+ }
+
+ if ((link_exists = H5Lexists(file_id,
+ "/" MISCELLANEOUS_TEST_GROUP_NAME
+ "/" ABSOLUTE_VS_RELATIVE_PATH_TEST_CONTAINER_GROUP_NAME
+ "/" ABSOLUTE_VS_RELATIVE_PATH_TEST_DSET1_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link exists\n");
+ PART_ERROR(H5Dcreate_absolute_from_root);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" didn't exist at the correct location\n");
+ PART_ERROR(H5Dcreate_absolute_from_root);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dcreate_absolute_from_root);
+
+ PART_BEGIN(H5Dcreate_absolute_from_nonroot)
+ {
+ TESTING_2("dataset creation by absolute path from non-root group");
+
+ /* Create a dataset by absolute path in the form "/group/dataset" starting from the container
+ * group */
+ if ((dset_id4 = H5Dcreate2(container_group,
+ "/" MISCELLANEOUS_TEST_GROUP_NAME
+ "/" ABSOLUTE_VS_RELATIVE_PATH_TEST_CONTAINER_GROUP_NAME
+ "/" ABSOLUTE_VS_RELATIVE_PATH_TEST_DSET4_NAME,
+ dset_dtype4, fspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset by absolute path from container group\n");
+ PART_ERROR(H5Dcreate_absolute_from_nonroot);
+ }
+
+ if ((link_exists = H5Lexists(file_id,
+ "/" MISCELLANEOUS_TEST_GROUP_NAME
+ "/" ABSOLUTE_VS_RELATIVE_PATH_TEST_CONTAINER_GROUP_NAME
+ "/" ABSOLUTE_VS_RELATIVE_PATH_TEST_DSET4_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link exists\n");
+ PART_ERROR(H5Dcreate_absolute_from_nonroot);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" didn't exist at the correct location\n");
+ PART_ERROR(H5Dcreate_absolute_from_nonroot);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dcreate_absolute_from_nonroot);
+
+ PART_BEGIN(H5Dcreate_relative_from_root)
+ {
+ TESTING_2("dataset creation by relative path from root group");
+
+ /* TODO: */
+
+ SKIPPED();
+ PART_EMPTY(H5Dcreate_relative_from_root);
+ }
+ PART_END(H5Dcreate_relative_from_root);
+
+ PART_BEGIN(H5Dcreate_relative_from_nonroot)
+ {
+ TESTING_2("dataset creation by relative path from non-root group");
+
+ /* Create a dataset by relative path in the form "dataset" starting from the test container group
+ */
+ if ((dset_id5 = H5Dcreate2(group_id, ABSOLUTE_VS_RELATIVE_PATH_TEST_DSET5_NAME, dset_dtype5,
+ fspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset by relative path from container group\n");
+ PART_ERROR(H5Dcreate_relative_from_nonroot);
+ }
+
+ /* Create a dataset by relative path in the form "group/dataset" starting from the top-level
+ * container group */
+ if ((dset_id2 = H5Dcreate2(container_group,
+ ABSOLUTE_VS_RELATIVE_PATH_TEST_CONTAINER_GROUP_NAME
+ "/" ABSOLUTE_VS_RELATIVE_PATH_TEST_DSET2_NAME,
+ dset_dtype2, fspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset by relative path from container group\n");
+ PART_ERROR(H5Dcreate_relative_from_nonroot);
+ }
+
+ if ((link_exists = H5Lexists(file_id,
+ "/" MISCELLANEOUS_TEST_GROUP_NAME
+ "/" ABSOLUTE_VS_RELATIVE_PATH_TEST_CONTAINER_GROUP_NAME
+ "/" ABSOLUTE_VS_RELATIVE_PATH_TEST_DSET2_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link exists\n");
+ PART_ERROR(H5Dcreate_relative_from_nonroot);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" didn't exist at the correct location\n");
+ PART_ERROR(H5Dcreate_relative_from_nonroot);
+ }
+
+ if ((link_exists = H5Lexists(file_id,
+ "/" MISCELLANEOUS_TEST_GROUP_NAME
+ "/" ABSOLUTE_VS_RELATIVE_PATH_TEST_CONTAINER_GROUP_NAME
+ "/" ABSOLUTE_VS_RELATIVE_PATH_TEST_DSET5_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link exists\n");
+ PART_ERROR(H5Dcreate_relative_from_nonroot);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" didn't exist at the correct location\n");
+ PART_ERROR(H5Dcreate_relative_from_nonroot);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dcreate_relative_from_nonroot);
+
+ PART_BEGIN(H5Dcreate_relative_leading_dot_root)
+ {
+ TESTING_2("dataset creation by path with leading '.' from root group");
+
+ /* Create a dataset by relative path in the form "./group/dataset" starting from the root group */
+ if ((dset_id3 = H5Dcreate2(file_id,
+ "./" MISCELLANEOUS_TEST_GROUP_NAME
+ "/" ABSOLUTE_VS_RELATIVE_PATH_TEST_CONTAINER_GROUP_NAME
+ "/" ABSOLUTE_VS_RELATIVE_PATH_TEST_DSET3_NAME,
+ dset_dtype3, fspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset by relative path from root with leading '.'\n");
+ PART_ERROR(H5Dcreate_relative_leading_dot_root);
+ }
+
+ if ((link_exists = H5Lexists(file_id,
+ "/" MISCELLANEOUS_TEST_GROUP_NAME
+ "/" ABSOLUTE_VS_RELATIVE_PATH_TEST_CONTAINER_GROUP_NAME
+ "/" ABSOLUTE_VS_RELATIVE_PATH_TEST_DSET3_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link exists\n");
+ PART_ERROR(H5Dcreate_relative_leading_dot_root);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" didn't exist at the correct location\n");
+ PART_ERROR(H5Dcreate_relative_leading_dot_root);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dcreate_relative_leading_dot_root);
+
+ PART_BEGIN(H5Dcreate_relative_leading_dot_nonroot)
+ {
+ TESTING_2("dataset creation by path with leading '.' from non-root group");
+
+ /* Create a dataset by relative path in the form "./dataset" starting from the container group */
+ if ((dset_id6 = H5Dcreate2(group_id, "./" ABSOLUTE_VS_RELATIVE_PATH_TEST_DSET6_NAME, dset_dtype6,
+ fspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(
+ " couldn't create dataset by relative path from container group with leading '.'\n");
+ PART_ERROR(H5Dcreate_relative_leading_dot_nonroot);
+ }
+
+ if ((link_exists = H5Lexists(file_id,
+ "/" MISCELLANEOUS_TEST_GROUP_NAME
+ "/" ABSOLUTE_VS_RELATIVE_PATH_TEST_CONTAINER_GROUP_NAME
+ "/" ABSOLUTE_VS_RELATIVE_PATH_TEST_DSET6_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link exists\n");
+ PART_ERROR(H5Dcreate_relative_leading_dot_nonroot);
+ }
+
+ if (!link_exists) {
+ H5_FAILED();
+ HDprintf(" didn't exist at the correct location\n");
+ PART_ERROR(H5Dcreate_relative_leading_dot_nonroot);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dcreate_relative_leading_dot_nonroot);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype1) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype2) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype3) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype4) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype5) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype6) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id1) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id2) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id3) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id4) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id5) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id6) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ H5Tclose(dset_dtype1);
+ H5Tclose(dset_dtype2);
+ H5Tclose(dset_dtype3);
+ H5Tclose(dset_dtype4);
+ H5Tclose(dset_dtype5);
+ H5Tclose(dset_dtype6);
+ H5Dclose(dset_id1);
+ H5Dclose(dset_id2);
+ H5Dclose(dset_id3);
+ H5Dclose(dset_id4);
+ H5Dclose(dset_id5);
+ H5Dclose(dset_id6);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check creating/opening objects with the "." as the name
+ */
+static int
+test_dot_for_object_name(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, subgroup_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID, dspace_id = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t dtype_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ herr_t ret = -1;
+
+ TESTING_MULTIPART("creating objects with \".\" as the name");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, or stored datatype aren't supported with "
+ "this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, MISCELLANEOUS_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", MISCELLANEOUS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((subgroup_id = H5Gcreate2(container_group, DOT_AS_OBJECT_NAME_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n", DOT_AS_OBJECT_NAME_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((dspace_id = H5Screate(H5S_SCALAR)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create data space\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Gcreate_dot_as_name)
+ {
+ TESTING_2("invalid creation of group with '.' as name");
+
+ /* Create a group with the "." as the name. It should fail. */
+ H5E_BEGIN_TRY
+ {
+ group_id = H5Gcreate2(subgroup_id, ".", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (group_id >= 0) {
+ H5_FAILED();
+ HDprintf(" a group was created with '.' as the name!\n");
+ PART_ERROR(H5Gcreate_dot_as_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Gcreate_dot_as_name);
+
+ PART_BEGIN(H5Dcreate_dot_as_name)
+ {
+ TESTING_2("invalid creation of dataset with '.' as name");
+
+ /* Create a dataset with the "." as the name. It should fail. */
+ H5E_BEGIN_TRY
+ {
+ dset_id = H5Dcreate2(subgroup_id, ".", H5T_NATIVE_INT, dspace_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (dset_id >= 0) {
+ H5_FAILED();
+ HDprintf(" a dataset was created with '.' as the name!\n");
+ PART_ERROR(H5Dcreate_dot_as_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dcreate_dot_as_name);
+
+ PART_BEGIN(H5Tcommit_dot_as_name)
+ {
+ TESTING_2("invalid creation of committed datatype with '.' as name");
+
+ if ((dtype_id = H5Tcopy(H5T_NATIVE_INT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't copy a native datatype\n");
+ PART_ERROR(H5Tcommit_dot_as_name);
+ }
+
+ /* Commit a datatype with "." as the name. It should fail. */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Tcommit2(subgroup_id, ".", dtype_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (ret >= 0) {
+ H5_FAILED();
+ HDprintf(" a named datatype was committed with '.' as the name!\n");
+ PART_ERROR(H5Tcommit_dot_as_name);
+ }
+
+ if (H5Tclose(dtype_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close datatype\n");
+ PART_ERROR(H5Tcommit_dot_as_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Tcommit_dot_as_name);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(dspace_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(subgroup_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(dspace_id);
+ H5Aclose(attr_id);
+ H5Dclose(dset_id);
+ H5Tclose(dtype_id);
+ H5Gclose(group_id);
+ H5Gclose(subgroup_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that the initialization and termination
+ * functions of a VOL connector can be called multiple times
+ * in a row.
+ *
+ * TODO: Not sure if this test can be done from public APIs
+ * at the moment.
+ */
+static int
+test_double_init_term(void)
+{
+ TESTING("double init/term correctness");
+
+ SKIPPED();
+
+ return 0;
+
+#if 0
+error:
+ return 1;
+#endif
+}
+
+static int
+test_symbols_in_compound_field_name(void)
+{
+ size_t i;
+ size_t total_type_size;
+ size_t next_offset;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t compound_type = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t type_pool[COMPOUND_WITH_SYMBOLS_IN_MEMBER_NAMES_TEST_NUM_SUBTYPES];
+ char member_names[COMPOUND_WITH_SYMBOLS_IN_MEMBER_NAMES_TEST_NUM_SUBTYPES][256];
+
+ TESTING("usage of '{', '}' and '\\\"' symbols in compound field name");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ for (i = 0; i < COMPOUND_WITH_SYMBOLS_IN_MEMBER_NAMES_TEST_NUM_SUBTYPES; i++)
+ type_pool[i] = H5I_INVALID_HID;
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file\n");
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, MISCELLANEOUS_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, COMPOUND_WITH_SYMBOLS_IN_MEMBER_NAMES_TEST_SUBGROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group\n");
+ goto error;
+ }
+
+ for (i = 0, total_type_size = 0; i < COMPOUND_WITH_SYMBOLS_IN_MEMBER_NAMES_TEST_NUM_SUBTYPES; i++) {
+ type_pool[i] = generate_random_datatype(H5T_NO_CLASS, FALSE);
+ total_type_size += H5Tget_size(type_pool[i]);
+ }
+
+ HDsnprintf(member_names[0], 256, "{{{ member0");
+ HDsnprintf(member_names[1], 256, "member1 }}}");
+ HDsnprintf(member_names[2], 256, "{{{ member2 }}");
+ HDsnprintf(member_names[3], 256, "{{ member3 }}}");
+ HDsnprintf(member_names[4], 256, "\\\"member4");
+ HDsnprintf(member_names[5], 256, "member5\\\"");
+ HDsnprintf(member_names[6], 256, "mem\\\"ber6");
+ HDsnprintf(member_names[7], 256, "{{ member7\\\" }");
+ HDsnprintf(member_names[8], 256, "{{ member8\\\\");
+
+ if ((compound_type = H5Tcreate(H5T_COMPOUND, total_type_size)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create compound datatype\n");
+ goto error;
+ }
+
+ for (i = 0, next_offset = 0; i < COMPOUND_WITH_SYMBOLS_IN_MEMBER_NAMES_TEST_NUM_SUBTYPES; i++) {
+ if (H5Tinsert(compound_type, member_names[i], next_offset, type_pool[i]) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't insert compound member %zu\n", i);
+ goto error;
+ }
+
+ next_offset += H5Tget_size(type_pool[i]);
+ }
+
+ if (H5Tpack(compound_type) < 0)
+ TEST_ERROR;
+
+ if ((fspace_id = generate_random_dataspace(COMPOUND_WITH_SYMBOLS_IN_MEMBER_NAMES_TEST_DSET_RANK, NULL,
+ NULL, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, COMPOUND_WITH_SYMBOLS_IN_MEMBER_NAMES_TEST_DSET_NAME, compound_type,
+ fspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset\n");
+ goto error;
+ }
+
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dopen2(group_id, COMPOUND_WITH_SYMBOLS_IN_MEMBER_NAMES_TEST_DSET_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" failed to open dataset\n");
+ goto error;
+ }
+
+ for (i = 0; i < COMPOUND_WITH_SYMBOLS_IN_MEMBER_NAMES_TEST_NUM_SUBTYPES; i++)
+ if (type_pool[i] >= 0 && H5Tclose(type_pool[i]) < 0)
+ TEST_ERROR;
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(compound_type) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ for (i = 0; i < COMPOUND_WITH_SYMBOLS_IN_MEMBER_NAMES_TEST_NUM_SUBTYPES; i++)
+ H5Tclose(type_pool[i]);
+ H5Sclose(fspace_id);
+ H5Tclose(compound_type);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+int
+H5_api_misc_test(void)
+{
+ size_t i;
+ int nerrors;
+
+ HDprintf("**********************************************\n");
+ HDprintf("* *\n");
+ HDprintf("* API Miscellaneous Tests *\n");
+ HDprintf("* *\n");
+ HDprintf("**********************************************\n\n");
+
+ for (i = 0, nerrors = 0; i < ARRAY_LENGTH(misc_tests); i++) {
+ nerrors += (*misc_tests[i])() ? 1 : 0;
+ }
+
+ HDprintf("\n");
+
+ return nerrors;
+}
diff --git a/test/API/H5_api_misc_test.h b/test/API/H5_api_misc_test.h
new file mode 100644
index 0000000..8729db7
--- /dev/null
+++ b/test/API/H5_api_misc_test.h
@@ -0,0 +1,52 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifndef H5_API_MISC_TEST_H
+#define H5_API_MISC_TEST_H
+
+#include "H5_api_test.h"
+
+int H5_api_misc_test(void);
+
+/******************************************************
+ * *
+ * API Miscellaneous test defines *
+ * *
+ ******************************************************/
+
+#define OPEN_LINK_WITHOUT_SLASH_DSET_SPACE_RANK 2
+#define OPEN_LINK_WITHOUT_SLASH_DSET_NAME "link_without_slash_test_dset"
+
+#define OBJECT_CREATE_BY_ABSOLUTE_PATH_TEST_CONTAINER_GROUP_NAME "absolute_path_test_container_group"
+#define OBJECT_CREATE_BY_ABSOLUTE_PATH_TEST_SUBGROUP_NAME "absolute_path_test_subgroup"
+#define OBJECT_CREATE_BY_ABSOLUTE_PATH_TEST_DTYPE_NAME "absolute_path_test_dtype"
+#define OBJECT_CREATE_BY_ABSOLUTE_PATH_TEST_DSET_NAME "absolute_path_test_dset"
+#define OBJECT_CREATE_BY_ABSOLUTE_PATH_TEST_DSET_SPACE_RANK 3
+
+#define ABSOLUTE_VS_RELATIVE_PATH_TEST_CONTAINER_GROUP_NAME "absolute_vs_relative_test_container_group"
+#define ABSOLUTE_VS_RELATIVE_PATH_TEST_DSET1_NAME "absolute_vs_relative_test_dset1"
+#define ABSOLUTE_VS_RELATIVE_PATH_TEST_DSET2_NAME "absolute_vs_relative_test_dset2"
+#define ABSOLUTE_VS_RELATIVE_PATH_TEST_DSET3_NAME "absolute_vs_relative_test_dset3"
+#define ABSOLUTE_VS_RELATIVE_PATH_TEST_DSET4_NAME "absolute_vs_relative_test_dset4"
+#define ABSOLUTE_VS_RELATIVE_PATH_TEST_DSET5_NAME "absolute_vs_relative_test_dset5"
+#define ABSOLUTE_VS_RELATIVE_PATH_TEST_DSET6_NAME "absolute_vs_relative_test_dset6"
+#define ABSOLUTE_VS_RELATIVE_PATH_TEST_DSET_SPACE_RANK 3
+
+#define DOT_AS_OBJECT_NAME_TEST_SUBGROUP_NAME "dot_as_object_name_test"
+
+#define COMPOUND_WITH_SYMBOLS_IN_MEMBER_NAMES_TEST_SUBGROUP_NAME \
+ "compound_type_with_symbols_in_member_names_test"
+#define COMPOUND_WITH_SYMBOLS_IN_MEMBER_NAMES_TEST_NUM_SUBTYPES 9
+#define COMPOUND_WITH_SYMBOLS_IN_MEMBER_NAMES_TEST_DSET_RANK 2
+#define COMPOUND_WITH_SYMBOLS_IN_MEMBER_NAMES_TEST_DSET_NAME "dset"
+
+#endif
diff --git a/test/API/H5_api_object_test.c b/test/API/H5_api_object_test.c
new file mode 100644
index 0000000..e054356
--- /dev/null
+++ b/test/API/H5_api_object_test.c
@@ -0,0 +1,7172 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include "H5_api_object_test.h"
+
+static int test_open_object(void);
+static int test_open_object_invalid_params(void);
+static int test_object_exists(void);
+static int test_object_exists_invalid_params(void);
+static int test_get_object_info(void);
+static int test_get_object_info_invalid_params(void);
+static int test_link_object(void);
+static int test_link_object_invalid_params(void);
+static int test_incr_decr_object_refcount(void);
+static int test_incr_decr_object_refcount_invalid_params(void);
+static int test_object_copy_basic(void);
+static int test_object_copy_already_existing(void);
+static int test_object_copy_shallow_group_copy(void);
+static int test_object_copy_no_attributes(void);
+static int test_object_copy_by_soft_link(void);
+static int test_object_copy_group_with_soft_links(void);
+static int test_object_copy_between_files(void);
+static int test_object_copy_invalid_params(void);
+static int test_object_comments(void);
+static int test_object_comments_invalid_params(void);
+static int test_object_visit(void);
+static int test_object_visit_soft_link(void);
+static int test_object_visit_invalid_params(void);
+static int test_close_object(void);
+static int test_close_object_invalid_params(void);
+static int test_close_invalid_objects(void);
+static int test_flush_object(void);
+static int test_flush_object_invalid_params(void);
+static int test_refresh_object(void);
+static int test_refresh_object_invalid_params(void);
+
+static herr_t object_copy_attribute_iter_callback(hid_t location_id, const char *attr_name,
+ const H5A_info_t *ainfo, void *op_data);
+static herr_t object_copy_soft_link_non_expand_callback(hid_t group, const char *name,
+ const H5L_info2_t *info, void *op_data);
+static herr_t object_copy_soft_link_expand_callback(hid_t group, const char *name, const H5L_info2_t *info,
+ void *op_data);
+static herr_t object_visit_callback(hid_t o_id, const char *name, const H5O_info2_t *object_info,
+ void *op_data);
+static herr_t object_visit_dset_callback(hid_t o_id, const char *name, const H5O_info2_t *object_info,
+ void *op_data);
+static herr_t object_visit_dtype_callback(hid_t o_id, const char *name, const H5O_info2_t *object_info,
+ void *op_data);
+static herr_t object_visit_soft_link_callback(hid_t o_id, const char *name, const H5O_info2_t *object_info,
+ void *op_data);
+static herr_t object_visit_noop_callback(hid_t o_id, const char *name, const H5O_info2_t *object_info,
+ void *op_data);
+
+/*
+ * The array of object tests to be performed.
+ */
+static int (*object_tests[])(void) = {
+ test_open_object,
+ test_open_object_invalid_params,
+ test_object_exists,
+ test_object_exists_invalid_params,
+ test_get_object_info,
+ test_get_object_info_invalid_params,
+ test_link_object,
+ test_link_object_invalid_params,
+ test_incr_decr_object_refcount,
+ test_incr_decr_object_refcount_invalid_params,
+ test_object_copy_basic,
+ test_object_copy_already_existing,
+ test_object_copy_shallow_group_copy,
+ test_object_copy_no_attributes,
+ test_object_copy_by_soft_link,
+ test_object_copy_group_with_soft_links,
+ test_object_copy_between_files,
+ test_object_copy_invalid_params,
+ test_object_comments,
+ test_object_comments_invalid_params,
+ test_object_visit,
+ test_object_visit_soft_link,
+ test_object_visit_invalid_params,
+ test_close_object,
+ test_close_object_invalid_params,
+ test_close_invalid_objects,
+ test_flush_object,
+ test_flush_object_invalid_params,
+ test_refresh_object,
+ test_refresh_object_invalid_params,
+};
+
+/*
+ * A test to check that various objects (group, dataset, datatype)
+ * can be opened by using H5Oopen, H5Oopen_by_idx and H5Oopen_by_addr.
+ *
+ * XXX: create separate objects for each test part.
+ *
+ * XXX: Add more open by idx tests
+ *
+ * XXX: test opening through dangling and resolving soft links.
+ */
+static int
+test_open_object(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t group_id2 = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t type_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("object opening");
+
+ TESTING_2("test setup");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, object, dataset, or stored datatype aren't "
+ "supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, OBJECT_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", OBJECT_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, OBJECT_OPEN_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", OBJECT_OPEN_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = generate_random_dataspace(OBJECT_OPEN_TEST_SPACE_RANK, NULL, NULL, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Oopen_group)
+ {
+ TESTING_2("H5Oopen on a group");
+
+ if ((group_id2 = H5Gcreate2(group_id, OBJECT_OPEN_TEST_GRP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", OBJECT_OPEN_TEST_GRP_NAME);
+ PART_ERROR(H5Oopen_group);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id2);
+ }
+ H5E_END_TRY;
+
+ if ((group_id2 = H5Oopen(group_id, OBJECT_OPEN_TEST_GRP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open group '%s' with H5Oopen\n", OBJECT_OPEN_TEST_GRP_NAME);
+ PART_ERROR(H5Oopen_group);
+ }
+
+ if (H5Iget_type(group_id2) != H5I_GROUP) {
+ H5_FAILED();
+ HDprintf(" ID is not a group\n");
+ PART_ERROR(H5Oopen_group);
+ }
+
+ if (H5Gclose(group_id2) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close group opened with H5Oopen\n");
+ PART_ERROR(H5Oopen_group);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oopen_group);
+
+ PART_BEGIN(H5Oopen_dset)
+ {
+ TESTING_2("H5Oopen on a dataset");
+
+ if ((dset_id = H5Dcreate2(group_id, OBJECT_OPEN_TEST_DSET_NAME, dset_dtype, fspace_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", OBJECT_OPEN_TEST_DSET_NAME);
+ PART_ERROR(H5Oopen_dset);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+
+ if ((dset_id = H5Oopen(group_id, OBJECT_OPEN_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s' with H5Oopen\n", OBJECT_OPEN_TEST_DSET_NAME);
+ PART_ERROR(H5Oopen_dset);
+ }
+
+ if (H5Iget_type(dset_id) != H5I_DATASET) {
+ H5_FAILED();
+ HDprintf(" ID is not a dataset\n");
+ PART_ERROR(H5Oopen_dset);
+ }
+
+ if (H5Dclose(dset_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close dataset opened with H5Oopen\n");
+ PART_ERROR(H5Oopen_dset);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oopen_dset);
+
+ PART_BEGIN(H5Oopen_dtype)
+ {
+ TESTING_2("H5Oopen on a committed datatype");
+
+ if ((type_id = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create datatype '%s'\n", OBJECT_OPEN_TEST_TYPE_NAME);
+ PART_ERROR(H5Oopen_dtype);
+ }
+
+ if (H5Tcommit2(group_id, OBJECT_OPEN_TEST_TYPE_NAME, type_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't commit datatype '%s'\n", OBJECT_OPEN_TEST_TYPE_NAME);
+ PART_ERROR(H5Oopen_dtype);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(type_id);
+ }
+ H5E_END_TRY;
+
+ if ((type_id = H5Oopen(group_id, OBJECT_OPEN_TEST_TYPE_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open datatype '%s' with H5Oopen\n", OBJECT_OPEN_TEST_TYPE_NAME);
+ PART_ERROR(H5Oopen_dtype);
+ }
+
+ if (H5Iget_type(type_id) != H5I_DATATYPE) {
+ H5_FAILED();
+ HDprintf(" ID is not a dataset\n");
+ PART_ERROR(H5Oopen_dtype);
+ }
+
+ if (H5Tclose(type_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close committed datatype opened with H5Oopen\n");
+ PART_ERROR(H5Oopen_dtype);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oopen_dtype);
+
+ if (group_id2 >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id2);
+ }
+ H5E_END_TRY;
+ group_id2 = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+ if (type_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(type_id);
+ }
+ H5E_END_TRY;
+ type_id = H5I_INVALID_HID;
+ }
+
+ PART_BEGIN(H5Oopen_by_idx_group)
+ {
+ TESTING_2("H5Oopen_by_idx on a group");
+
+ if ((group_id2 = H5Oopen_by_idx(container_group, OBJECT_OPEN_TEST_GROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_INC, 1, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open group '%s' with H5Oopen_by_idx\n", OBJECT_OPEN_TEST_GRP_NAME);
+ PART_ERROR(H5Oopen_by_idx_group);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oopen_by_idx_group);
+
+ PART_BEGIN(H5Oopen_by_idx_dset)
+ {
+ TESTING_2("H5Oopen_by_idx on a dataset");
+
+ if ((dset_id = H5Oopen_by_idx(container_group, OBJECT_OPEN_TEST_GROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_INC, 0, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s' with H5Oopen_by_idx\n", OBJECT_OPEN_TEST_DSET_NAME);
+ PART_ERROR(H5Oopen_by_idx_dset);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oopen_by_idx_dset);
+
+ PART_BEGIN(H5Oopen_by_idx_dtype)
+ {
+ TESTING_2("H5Oopen_by_idx on a committed datatype");
+
+ if ((type_id = H5Oopen_by_idx(container_group, OBJECT_OPEN_TEST_GROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_INC, 2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open committed datatype '%s' with H5Oopen_by_idx\n",
+ OBJECT_OPEN_TEST_TYPE_NAME);
+ PART_ERROR(H5Oopen_by_idx_dtype);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oopen_by_idx_dtype);
+
+ if (group_id2 >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id2);
+ }
+ H5E_END_TRY;
+ group_id2 = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+ if (type_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(type_id);
+ }
+ H5E_END_TRY;
+ type_id = H5I_INVALID_HID;
+ }
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ H5Tclose(dset_dtype);
+ H5Tclose(type_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that various objects (group, dataset, datatype)
+ * can't be opened when H5Oopen, H5Oopen_by_idx and H5Oopen_by_addr
+ * are passed invalid parameters.
+ */
+static int
+test_open_object_invalid_params(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t group_id2 = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("object opening with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, object, or creation order aren't supported with "
+ "this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, OBJECT_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", OBJECT_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create a GCPL\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't enable link creation order tracking and indexing on GCPL\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, OBJECT_OPEN_INVALID_PARAMS_TEST_GROUP_NAME, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ OBJECT_OPEN_INVALID_PARAMS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id2 = H5Gcreate2(group_id, OBJECT_OPEN_INVALID_PARAMS_TEST_GRP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", OBJECT_OPEN_INVALID_PARAMS_TEST_GRP_NAME);
+ goto error;
+ }
+
+ if (H5Gclose(group_id2) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Oopen_invalid_loc_id)
+ {
+ TESTING_2("H5Oopen with an invalid location ID");
+
+ H5E_BEGIN_TRY
+ {
+ group_id2 = H5Oopen(H5I_INVALID_HID, OBJECT_OPEN_INVALID_PARAMS_TEST_GRP_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (group_id2 >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Oopen succeeded with an invalid location ID!\n");
+ H5Gclose(group_id2);
+ PART_ERROR(H5Oopen_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oopen_invalid_loc_id);
+
+ PART_BEGIN(H5Oopen_invalid_obj_name)
+ {
+ TESTING_2("H5Oopen with an invalid object name");
+
+ H5E_BEGIN_TRY
+ {
+ group_id2 = H5Oopen(group_id, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (group_id2 >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Oopen succeeded with a NULL object name!\n");
+ H5Gclose(group_id2);
+ PART_ERROR(H5Oopen_invalid_obj_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ group_id2 = H5Oopen(group_id, "", H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (group_id2 >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Oopen succeeded with an invalid object name of ''!\n");
+ H5Gclose(group_id2);
+ PART_ERROR(H5Oopen_invalid_obj_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oopen_invalid_obj_name);
+
+ PART_BEGIN(H5Oopen_invalid_lapl)
+ {
+ TESTING_2("H5Oopen with an invalid LAPL");
+
+ H5E_BEGIN_TRY
+ {
+ group_id2 = H5Oopen(group_id, OBJECT_OPEN_INVALID_PARAMS_TEST_GRP_NAME, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (group_id2 >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Oopen succeeded with an invalid LAPL!\n");
+ H5Gclose(group_id2);
+ PART_ERROR(H5Oopen_invalid_lapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oopen_invalid_lapl);
+
+ PART_BEGIN(H5Oopen_by_idx_invalid_loc_id)
+ {
+ TESTING_2("H5Oopen_by_idx with an invalid location ID");
+
+ H5E_BEGIN_TRY
+ {
+ group_id2 = H5Oopen_by_idx(H5I_INVALID_HID, OBJECT_OPEN_INVALID_PARAMS_TEST_GROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_INC, 0, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (group_id2 >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Oopen_by_idx succeeded with an invalid location ID!\n");
+ H5Gclose(group_id2);
+ PART_ERROR(H5Oopen_by_idx_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oopen_by_idx_invalid_loc_id);
+
+ PART_BEGIN(H5Oopen_by_idx_invalid_grp_name)
+ {
+ TESTING_2("H5Oopen_by_idx with an invalid group name");
+
+ H5E_BEGIN_TRY
+ {
+ group_id2 = H5Oopen_by_idx(container_group, NULL, H5_INDEX_NAME, H5_ITER_INC, 0, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (group_id2 >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Oopen_by_idx succeeded with a NULL group name!\n");
+ H5Gclose(group_id2);
+ PART_ERROR(H5Oopen_by_idx_invalid_grp_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ group_id2 = H5Oopen_by_idx(container_group, "", H5_INDEX_NAME, H5_ITER_INC, 0, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (group_id2 >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Oopen_by_idx succeeded with an invalid group name of ''!\n");
+ H5Gclose(group_id2);
+ PART_ERROR(H5Oopen_by_idx_invalid_grp_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oopen_by_idx_invalid_grp_name);
+
+ PART_BEGIN(H5Oopen_by_idx_invalid_index_type)
+ {
+ TESTING_2("H5Oopen_by_idx with an invalid index type");
+
+ H5E_BEGIN_TRY
+ {
+ group_id2 = H5Oopen_by_idx(container_group, OBJECT_OPEN_INVALID_PARAMS_TEST_GROUP_NAME,
+ H5_INDEX_UNKNOWN, H5_ITER_INC, 0, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (group_id2 >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Oopen_by_idx succeeded with invalid index type H5_INDEX_UNKNOWN!\n");
+ H5Gclose(group_id2);
+ PART_ERROR(H5Oopen_by_idx_invalid_index_type);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ group_id2 = H5Oopen_by_idx(container_group, OBJECT_OPEN_INVALID_PARAMS_TEST_GROUP_NAME,
+ H5_INDEX_N, H5_ITER_INC, 0, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (group_id2 >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Oopen_by_idx succeeded with invalid index type H5_INDEX_N!\n");
+ H5Gclose(group_id2);
+ PART_ERROR(H5Oopen_by_idx_invalid_index_type);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oopen_by_idx_invalid_index_type);
+
+ PART_BEGIN(H5Oopen_by_idx_invalid_iter_order)
+ {
+ TESTING_2("H5Oopen_by_idx with an invalid iteration order");
+
+ H5E_BEGIN_TRY
+ {
+ group_id2 = H5Oopen_by_idx(container_group, OBJECT_OPEN_INVALID_PARAMS_TEST_GROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_UNKNOWN, 0, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (group_id2 >= 0) {
+ H5_FAILED();
+ HDprintf(
+ " H5Oopen_by_idx succeeded with an invalid iteration ordering H5_ITER_UNKNOWN!\n");
+ H5Gclose(group_id2);
+ PART_ERROR(H5Oopen_by_idx_invalid_iter_order);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ group_id2 = H5Oopen_by_idx(container_group, OBJECT_OPEN_INVALID_PARAMS_TEST_GROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_N, 0, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (group_id2 >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Oopen_by_idx succeeded with an invalid iteration ordering H5_ITER_N!\n");
+ H5Gclose(group_id2);
+ PART_ERROR(H5Oopen_by_idx_invalid_iter_order);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oopen_by_idx_invalid_iter_order);
+
+ PART_BEGIN(H5Oopen_by_idx_invalid_lapl)
+ {
+ TESTING_2("H5Oopen_by_idx with an invalid LAPL");
+
+ H5E_BEGIN_TRY
+ {
+ group_id2 = H5Oopen_by_idx(container_group, OBJECT_OPEN_INVALID_PARAMS_TEST_GROUP_NAME,
+ H5_INDEX_NAME, H5_ITER_INC, 0, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (group_id2 >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Oopen_by_idx succeeded with an invalid LAPL!\n");
+ H5Gclose(group_id2);
+ PART_ERROR(H5Oopen_by_idx_invalid_lapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oopen_by_idx_invalid_lapl);
+
+ PART_BEGIN(H5Oopen_by_token_invalid_loc_id)
+ {
+ TESTING_2("H5Oopen_by_token with an invalid location ID");
+
+ H5E_BEGIN_TRY
+ {
+ group_id2 = H5Oopen_by_token(H5I_INVALID_HID, H5O_TOKEN_UNDEF);
+ }
+ H5E_END_TRY;
+
+ if (group_id2 >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Oopen_by_token succeeded with an invalid location ID!\n");
+ H5Gclose(group_id2);
+ PART_ERROR(H5Oopen_by_token_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oopen_by_token_invalid_loc_id);
+
+ PART_BEGIN(H5Oopen_by_token_invalid_token)
+ {
+ TESTING_2("H5Oopen_by_token with an invalid token");
+
+ H5E_BEGIN_TRY
+ {
+ group_id2 = H5Oopen_by_token(file_id, H5O_TOKEN_UNDEF);
+ }
+ H5E_END_TRY;
+
+ if (group_id2 >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Oopen_by_token succeeded with an invalid token!\n");
+ H5Gclose(group_id2);
+ PART_ERROR(H5Oopen_by_token_invalid_token);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oopen_by_token_invalid_token);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(gcpl_id);
+ H5Gclose(group_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test for H5Oexists_by_name.
+ */
+static int
+test_object_exists(void)
+{
+ htri_t object_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t group_id2 = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dtype_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("object existence");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, object, dataset, stored datatype or soft link "
+ "aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, OBJECT_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", OBJECT_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, OBJECT_EXISTS_TEST_SUBGROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n", OBJECT_EXISTS_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = generate_random_dataspace(OBJECT_EXISTS_TEST_DSET_SPACE_RANK, NULL, NULL, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ /*
+ * NOTE: H5Oexists_by_name for hard links should always succeed.
+ * H5Oexists_by_name for a soft link may fail if the link doesn't resolve.
+ */
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Oexists_by_name_group)
+ {
+ TESTING_2("H5Oexists_by_name on a group");
+
+ if ((group_id2 = H5Gcreate2(group_id, OBJECT_EXISTS_TEST_GRP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", OBJECT_EXISTS_TEST_GRP_NAME);
+ PART_ERROR(H5Oexists_by_name_group);
+ }
+
+ if ((object_exists = H5Oexists_by_name(group_id, OBJECT_EXISTS_TEST_GRP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if object '%s' exists\n", OBJECT_EXISTS_TEST_GRP_NAME);
+ PART_ERROR(H5Oexists_by_name_group);
+ }
+
+ if (!object_exists) {
+ H5_FAILED();
+ HDprintf(" object '%s' didn't exist!\n", OBJECT_EXISTS_TEST_GRP_NAME);
+ PART_ERROR(H5Oexists_by_name_group);
+ }
+
+ if (H5Gclose(group_id2) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close group\n");
+ PART_ERROR(H5Oexists_by_name_group);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oexists_by_name_group);
+
+ PART_BEGIN(H5Oexists_by_name_dset)
+ {
+ TESTING_2("H5Oexists_by_name on a dataset");
+
+ if ((dset_id = H5Dcreate2(group_id, OBJECT_EXISTS_TEST_DSET_NAME, dset_dtype, fspace_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", OBJECT_EXISTS_TEST_DSET_NAME);
+ PART_ERROR(H5Oexists_by_name_dset);
+ }
+
+ if ((object_exists = H5Oexists_by_name(group_id, OBJECT_EXISTS_TEST_DSET_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if object '%s' exists\n", OBJECT_EXISTS_TEST_DSET_NAME);
+ PART_ERROR(H5Oexists_by_name_dset);
+ }
+
+ if (!object_exists) {
+ H5_FAILED();
+ HDprintf(" object '%s' didn't exist!\n", OBJECT_EXISTS_TEST_DSET_NAME);
+ PART_ERROR(H5Oexists_by_name_dset);
+ }
+
+ if (H5Dclose(dset_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close dataset\n");
+ PART_ERROR(H5Oexists_by_name_dset);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oexists_by_name_dset);
+
+ PART_BEGIN(H5Oexists_by_name_dtype)
+ {
+ TESTING_2("H5Oexists_by_name on a committed datatype");
+
+ if ((dtype_id = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create datatype '%s'\n", OBJECT_EXISTS_TEST_TYPE_NAME);
+ PART_ERROR(H5Oexists_by_name_dtype);
+ }
+
+ if (H5Tcommit2(group_id, OBJECT_EXISTS_TEST_TYPE_NAME, dtype_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't commit datatype '%s'\n", OBJECT_EXISTS_TEST_TYPE_NAME);
+ PART_ERROR(H5Oexists_by_name_dtype);
+ }
+
+ if ((object_exists = H5Oexists_by_name(group_id, OBJECT_EXISTS_TEST_TYPE_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if object '%s' exists\n", OBJECT_EXISTS_TEST_TYPE_NAME);
+ PART_ERROR(H5Oexists_by_name_dtype);
+ }
+
+ if (!object_exists) {
+ H5_FAILED();
+ HDprintf(" object '%s' didn't exist!\n", OBJECT_EXISTS_TEST_TYPE_NAME);
+ PART_ERROR(H5Oexists_by_name_dtype);
+ }
+
+ if (H5Tclose(dtype_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close datatype\n");
+ PART_ERROR(H5Oexists_by_name_dtype);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oexists_by_name_dtype);
+
+ PART_BEGIN(H5Oexists_by_name_soft_link)
+ {
+ TESTING_2("H5Oexists_by_name for a soft link");
+
+ if (H5Lcreate_soft("/" OBJECT_TEST_GROUP_NAME "/" OBJECT_EXISTS_TEST_SUBGROUP_NAME, group_id,
+ OBJECT_EXISTS_TEST_SOFT_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", OBJECT_EXISTS_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Oexists_by_name_soft_link);
+ }
+
+ if ((object_exists =
+ H5Oexists_by_name(group_id, OBJECT_EXISTS_TEST_SOFT_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if object '%s' exists\n", OBJECT_EXISTS_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Oexists_by_name_soft_link);
+ }
+
+ if (!object_exists) {
+ H5_FAILED();
+ HDprintf(" object '%s' didn't exist!\n", OBJECT_EXISTS_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Oexists_by_name_soft_link);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oexists_by_name_soft_link);
+
+ PART_BEGIN(H5Oexists_by_name_dangling_soft_link)
+ {
+ TESTING_2("H5Oexists_by_name for a dangling soft link");
+
+ if (H5Lcreate_soft(
+ "/" OBJECT_TEST_GROUP_NAME "/" OBJECT_EXISTS_TEST_SUBGROUP_NAME "/non_existent_object",
+ group_id, OBJECT_EXISTS_TEST_DANGLING_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", OBJECT_EXISTS_TEST_DANGLING_LINK_NAME);
+ PART_ERROR(H5Oexists_by_name_dangling_soft_link);
+ }
+
+ if ((object_exists =
+ H5Oexists_by_name(group_id, OBJECT_EXISTS_TEST_DANGLING_LINK_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if object '%s' exists\n",
+ "/" OBJECT_TEST_GROUP_NAME "/" OBJECT_EXISTS_TEST_SUBGROUP_NAME
+ "/non_existent_object");
+ PART_ERROR(H5Oexists_by_name_dangling_soft_link);
+ }
+
+ if (object_exists) {
+ H5_FAILED();
+ HDprintf(" object pointed to by dangling soft link should not have existed!\n");
+ PART_ERROR(H5Oexists_by_name_dangling_soft_link);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oexists_by_name_dangling_soft_link);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ H5Tclose(dset_dtype);
+ H5Tclose(dtype_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that H5Oexists_by_name fails
+ * when it is passed invalid parameters.
+ */
+static int
+test_object_exists_invalid_params(void)
+{
+ htri_t object_exists;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t group_id2 = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("object existence with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, or object aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, OBJECT_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", OBJECT_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, OBJECT_EXISTS_INVALID_PARAMS_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ OBJECT_EXISTS_INVALID_PARAMS_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id2 = H5Gcreate2(group_id, OBJECT_EXISTS_INVALID_PARAMS_TEST_GRP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", OBJECT_EXISTS_INVALID_PARAMS_TEST_GRP_NAME);
+ goto error;
+ }
+
+ if (H5Gclose(group_id2) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Oexists_by_name_invalid_loc_id)
+ {
+ TESTING_2("H5Oexists_by_name with an invalid location ID");
+
+ H5E_BEGIN_TRY
+ {
+ object_exists = H5Oexists_by_name(H5I_INVALID_HID, OBJECT_EXISTS_INVALID_PARAMS_TEST_GRP_NAME,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (object_exists >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Oexists_by_name succeeded with an invalid location ID!\n");
+ PART_ERROR(H5Oexists_by_name_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oexists_by_name_invalid_loc_id);
+
+ PART_BEGIN(H5Oexists_by_name_invalid_obj_name)
+ {
+ TESTING_2("H5Oexists_by_name with an invalid object name");
+
+ H5E_BEGIN_TRY
+ {
+ object_exists = H5Oexists_by_name(group_id, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (object_exists >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Oexists_by_name succeeded with a NULL object name!\n");
+ PART_ERROR(H5Oexists_by_name_invalid_obj_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ object_exists = H5Oexists_by_name(group_id, "", H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (object_exists >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Oexists_by_name succeeded with an invalid object name of ''!\n");
+ PART_ERROR(H5Oexists_by_name_invalid_obj_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oexists_by_name_invalid_obj_name);
+
+ PART_BEGIN(H5Oexists_by_name_invalid_lapl)
+ {
+ TESTING_2("H5Oexists_by_name with an invalid LAPL");
+
+ H5E_BEGIN_TRY
+ {
+ object_exists =
+ H5Oexists_by_name(group_id, OBJECT_EXISTS_INVALID_PARAMS_TEST_GRP_NAME, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (object_exists >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Oexists_by_name succeeded with an invalid LAPL!\n");
+ PART_ERROR(H5Oexists_by_name_invalid_lapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oexists_by_name_invalid_lapl);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test for H5Oget_info(_by_name/_by_idx).
+ */
+static int
+test_get_object_info(void)
+{
+ TESTING("object info retrieval");
+
+ SKIPPED();
+
+ return 0;
+}
+
+/*
+ * A test to check that an object's info can't be retrieved
+ * when H5Oget_info(_by_name/_by_idx) are passed invalid
+ * parameters.
+ */
+static int
+test_get_object_info_invalid_params(void)
+{
+ TESTING("object info retrieval with invalid parameters");
+
+ SKIPPED();
+
+ return 0;
+}
+
+/*
+ * A test for H5Olink.
+ */
+static int
+test_link_object(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t group_id2 = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t type_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("object linking");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, object, dataset, or stored datatype aren't "
+ "supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, OBJECT_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", OBJECT_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, OBJECT_LINK_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", OBJECT_LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = generate_random_dataspace(OBJECT_LINK_TEST_SPACE_RANK, NULL, NULL, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Olink_group)
+ {
+ TESTING_2("H5Olink an anonymous group");
+
+ if ((group_id2 = H5Gcreate_anon(group_id, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create an anonymous group\n");
+ PART_ERROR(H5Olink_group);
+ }
+
+ if (H5Olink(group_id2, group_id, OBJECT_LINK_TEST_GROUP_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't link the anonymous group\n");
+ PART_ERROR(H5Olink_group);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Olink_group);
+
+ PART_BEGIN(H5Olink_dataset)
+ {
+ TESTING_2("H5Olink an anonymous dataset");
+
+ if ((dset_id = H5Dcreate_anon(group_id, dset_dtype, fspace_id, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create an anonymous dataset\n");
+ PART_ERROR(H5Olink_dataset);
+ }
+
+ if (H5Olink(dset_id, group_id, OBJECT_LINK_TEST_DSET_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't link the anonymous dataset\n");
+ PART_ERROR(H5Olink_dataset);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Olink_dataset);
+
+ PART_BEGIN(H5Olink_datatype)
+ {
+ TESTING_2("H5Olink an anonymous datatype");
+
+ if (H5Tcommit_anon(group_id, dset_dtype, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create an anonymous datatype\n");
+ PART_ERROR(H5Olink_datatype);
+ }
+
+ if (H5Olink(dset_dtype, group_id, OBJECT_LINK_TEST_DTYPE_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't link the anonymous datatype\n");
+ PART_ERROR(H5Olink_datatype);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Olink_datatype);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id2) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ H5Tclose(dset_dtype);
+ H5Tclose(type_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that an object can't be linked into
+ * the file structure when H5Olink is passed invalid
+ * parameters.
+ */
+static int
+test_link_object_invalid_params(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t group_id2 = H5I_INVALID_HID;
+ herr_t status;
+
+ TESTING_MULTIPART("object linking with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, or object aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, OBJECT_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", OBJECT_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, OBJECT_LINK_INVALID_PARAMS_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", OBJECT_LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id2 = H5Gcreate_anon(group_id, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create an anonymous group\n");
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Olink_invalid_object_id)
+ {
+ TESTING_2("H5Olink with an invalid object ID");
+
+ H5E_BEGIN_TRY
+ {
+ status = H5Olink(H5I_INVALID_HID, group_id, OBJECT_LINK_TEST_GROUP_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (status >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Olink succeeded with an invalid object ID!\n");
+ PART_ERROR(H5Olink_invalid_object_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Olink_invalid_object_id);
+
+ PART_BEGIN(H5Olink_invalid_location)
+ {
+ TESTING_2("H5Olink with an invalid location ID");
+
+ H5E_BEGIN_TRY
+ {
+ status = H5Olink(group_id2, H5I_INVALID_HID, OBJECT_LINK_TEST_GROUP_NAME2, H5P_DEFAULT,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (status >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Olink succeeded with an invalid location ID!\n");
+ PART_ERROR(H5Olink_invalid_location);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Olink_invalid_location);
+
+ PART_BEGIN(H5Olink_invalid_name)
+ {
+ TESTING_2("H5Olink with an invalid name");
+
+ H5E_BEGIN_TRY
+ {
+ status = H5Olink(group_id2, group_id, NULL, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (status >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Olink succeeded with NULL as the object name!\n");
+ PART_ERROR(H5Olink_invalid_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ status = H5Olink(group_id2, group_id, "", H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (status >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Olink succeeded with an invalid object name of ''!\n");
+ PART_ERROR(H5Olink_invalid_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Olink_invalid_name);
+
+ PART_BEGIN(H5Olink_invalid_lcpl)
+ {
+ TESTING_2("H5Olink with an invalid LCPL");
+
+ H5E_BEGIN_TRY
+ {
+ status =
+ H5Olink(group_id2, group_id, OBJECT_LINK_TEST_GROUP_NAME2, H5I_INVALID_HID, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (status >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Olink succeeded with an invalid LCPL!\n");
+ PART_ERROR(H5Olink_invalid_lcpl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Olink_invalid_lcpl);
+
+ PART_BEGIN(H5Olink_invalid_lapl)
+ {
+ TESTING_2("H5Olink with an invalid LAPL");
+#ifndef NO_INVALID_PROPERTY_LIST_TESTS
+ H5E_BEGIN_TRY
+ {
+ status =
+ H5Olink(group_id2, group_id, OBJECT_LINK_TEST_GROUP_NAME2, H5P_DEFAULT, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (status >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Olink succeeded with an invalid LAPL!\n");
+ PART_ERROR(H5Olink_invalid_lapl);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Olink_invalid_lapl);
+#endif
+ }
+ PART_END(H5Olink_invalid_lapl);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id2) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test for H5Oincr_refcount/H5Odecr_refcount.
+ */
+static int
+test_incr_decr_object_refcount(void)
+{
+ H5O_info2_t oinfo; /* Object info struct */
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t group_id2 = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("increment/decrement the reference count of object");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, stored datatype, basic or more object "
+ "aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, OBJECT_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", OBJECT_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, OBJECT_REF_COUNT_TEST_SUBGROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n", OBJECT_REF_COUNT_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = generate_random_dataspace(OBJECT_REF_COUNT_TEST_DSET_SPACE_RANK, NULL, NULL, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Oincr_decr_refcount_group)
+ {
+ TESTING_2("H5Oincr_refcount/H5Odecr_refcount on a group");
+
+ if ((group_id2 = H5Gcreate2(group_id, OBJECT_REF_COUNT_TEST_GRP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", OBJECT_REF_COUNT_TEST_GRP_NAME);
+ PART_ERROR(H5Oincr_decr_refcount_group);
+ }
+
+ /* Increment the reference count */
+ if (H5Oincr_refcount(group_id2) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't increment reference count for the group '%s' \n",
+ OBJECT_REF_COUNT_TEST_GRP_NAME);
+ PART_ERROR(H5Oincr_decr_refcount_group);
+ }
+
+ /* Verify that reference count is 2 now */
+ if (H5Oget_info_by_name3(group_id, OBJECT_REF_COUNT_TEST_GRP_NAME, &oinfo, H5O_INFO_BASIC,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get reference count for the group '%s' \n",
+ OBJECT_REF_COUNT_TEST_GRP_NAME);
+ PART_ERROR(H5Oincr_decr_refcount_group);
+ }
+
+ if (oinfo.rc != 2) {
+ H5_FAILED();
+ HDprintf(" the reference count for the group '%s' isn't 2: %d\n",
+ OBJECT_REF_COUNT_TEST_GRP_NAME, oinfo.rc);
+ PART_ERROR(H5Oincr_decr_refcount_group);
+ }
+
+ /* Decrement the reference count */
+ if (H5Odecr_refcount(group_id2) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't decrement reference count for the group '%s' \n",
+ OBJECT_REF_COUNT_TEST_GRP_NAME);
+ PART_ERROR(H5Oincr_decr_refcount_group);
+ }
+
+ /* Verify that reference count is 1 now */
+ if (H5Oget_info_by_name3(group_id, OBJECT_REF_COUNT_TEST_GRP_NAME, &oinfo, H5O_INFO_BASIC,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get reference count for the group '%s' \n",
+ OBJECT_REF_COUNT_TEST_GRP_NAME);
+ PART_ERROR(H5Oincr_decr_refcount_group);
+ }
+
+ if (oinfo.rc != 1) {
+ H5_FAILED();
+ HDprintf(" the reference count for the group '%s' isn't 1: %d\n",
+ OBJECT_REF_COUNT_TEST_GRP_NAME, oinfo.rc);
+ PART_ERROR(H5Oincr_decr_refcount_group);
+ }
+
+ if (H5Gclose(group_id2) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close group\n");
+ PART_ERROR(H5Oincr_decr_refcount_group);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oincr_decr_refcount_group);
+
+ PART_BEGIN(H5Oincr_decr_refcount_dset)
+ {
+ TESTING_2("H5Oincr_refcount/H5Odecr_refcount on a dataset");
+
+ if ((dset_id = H5Dcreate2(group_id, OBJECT_REF_COUNT_TEST_DSET_NAME, dset_dtype, fspace_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", OBJECT_REF_COUNT_TEST_DSET_NAME);
+ PART_ERROR(H5Oincr_decr_refcount_dset);
+ }
+
+ /* Increment the reference count */
+ if (H5Oincr_refcount(dset_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't increment reference count for the dataset '%s' \n",
+ OBJECT_REF_COUNT_TEST_DSET_NAME);
+ PART_ERROR(H5Oincr_decr_refcount_dset);
+ }
+
+ /* Verify that reference count is 2 now */
+ if (H5Oget_info_by_name3(group_id, OBJECT_REF_COUNT_TEST_DSET_NAME, &oinfo, H5O_INFO_BASIC,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get reference count for the dataset '%s' \n",
+ OBJECT_REF_COUNT_TEST_DSET_NAME);
+ PART_ERROR(H5Oincr_decr_refcount_dset);
+ }
+
+ if (oinfo.rc != 2) {
+ H5_FAILED();
+ HDprintf(" the reference count for the dataset '%s' isn't 2: %d\n",
+ OBJECT_REF_COUNT_TEST_DSET_NAME, oinfo.rc);
+ PART_ERROR(H5Oincr_decr_refcount_dset);
+ }
+
+ /* Decrement the reference count */
+ if (H5Odecr_refcount(dset_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't decrement reference count for the dataset '%s' \n",
+ OBJECT_REF_COUNT_TEST_DSET_NAME);
+ PART_ERROR(H5Oincr_decr_refcount_dset);
+ }
+
+ /* Verify that reference count is 1 now */
+ if (H5Oget_info_by_name3(group_id, OBJECT_REF_COUNT_TEST_DSET_NAME, &oinfo, H5O_INFO_BASIC,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get reference count for the dataset '%s' \n",
+ OBJECT_REF_COUNT_TEST_DSET_NAME);
+ PART_ERROR(H5Oincr_decr_refcount_dset);
+ }
+
+ if (oinfo.rc != 1) {
+ H5_FAILED();
+ HDprintf(" the reference count for the dataset '%s' isn't 1: %d\n",
+ OBJECT_REF_COUNT_TEST_DSET_NAME, oinfo.rc);
+ PART_ERROR(H5Oincr_decr_refcount_dset);
+ }
+
+ if (H5Dclose(dset_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close dataset\n");
+ PART_ERROR(H5Oincr_decr_refcount_dset);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oincr_decr_refcount_dset);
+
+ PART_BEGIN(H5Oincr / decr_refcount_dtype)
+ {
+ TESTING_2("H5Oincr_refcount/H5Odecr_refcount on a committed datatype");
+
+ if (H5Tcommit2(group_id, OBJECT_REF_COUNT_TEST_TYPE_NAME, dset_dtype, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't commit datatype '%s'\n", OBJECT_REF_COUNT_TEST_TYPE_NAME);
+ PART_ERROR(H5Oincr_decr_refcount_dtype);
+ }
+
+ /* Increment the reference count */
+ if (H5Oincr_refcount(dset_dtype) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't increment reference count for the datatype '%s' \n",
+ OBJECT_REF_COUNT_TEST_TYPE_NAME);
+ PART_ERROR(H5Oincr_decr_refcount_dtype);
+ }
+
+ /* Verify that reference count is 2 now */
+ if (H5Oget_info_by_name3(group_id, OBJECT_REF_COUNT_TEST_TYPE_NAME, &oinfo, H5O_INFO_BASIC,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get reference count for the datatype '%s' \n",
+ OBJECT_REF_COUNT_TEST_TYPE_NAME);
+ PART_ERROR(H5Oincr_decr_refcount_dtype);
+ }
+
+ if (oinfo.rc != 2) {
+ H5_FAILED();
+ HDprintf(" the reference count for the datatype '%s' isn't 2: %d\n",
+ OBJECT_REF_COUNT_TEST_TYPE_NAME, oinfo.rc);
+ PART_ERROR(H5Oincr_decr_refcount_dtype);
+ }
+
+ /* Decrement the reference count */
+ if (H5Odecr_refcount(dset_dtype) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't decrement reference count for the datatype '%s' \n",
+ OBJECT_REF_COUNT_TEST_TYPE_NAME);
+ PART_ERROR(H5Oincr_decr_refcount_dtype);
+ }
+
+ /* Verify that reference count is 1 now */
+ if (H5Oget_info_by_name3(group_id, OBJECT_REF_COUNT_TEST_TYPE_NAME, &oinfo, H5O_INFO_BASIC,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get reference count for the datatype '%s' \n",
+ OBJECT_REF_COUNT_TEST_TYPE_NAME);
+ PART_ERROR(H5Oincr_decr_refcount_dtype);
+ }
+
+ if (oinfo.rc != 1) {
+ H5_FAILED();
+ HDprintf(" the reference count for the datatype '%s' isn't 1: %d\n",
+ OBJECT_REF_COUNT_TEST_TYPE_NAME, oinfo.rc);
+ PART_ERROR(H5Oincr_decr_refcount_dtype);
+ }
+
+ if (H5Tclose(dset_dtype) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close datatype\n");
+ PART_ERROR(H5Oincr_decr_refcount_dtype);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oincr_decr_refcount_dtype);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ H5Tclose(dset_dtype);
+ H5Dclose(dset_id);
+ H5Gclose(group_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+} /* test_incr_decr_object_refcount */
+
+/*
+ * A test to check that H5Oincr_refcount/H5Odecr_refcount
+ * fail when passed invalid parameters.
+ */
+static int
+test_incr_decr_object_refcount_invalid_params(void)
+{
+ herr_t status;
+
+ TESTING_MULTIPART("object reference count incr./decr. with an invalid parameter");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_MORE)) {
+ SKIPPED();
+ HDprintf(" API functions for more object aren't supported with this connector\n");
+ return 0;
+ }
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Oincr_refcount_invalid_param)
+ {
+ TESTING_2("H5Oincr_refcount with invalid object ID");
+
+ H5E_BEGIN_TRY
+ {
+ status = H5Oincr_refcount(H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (status >= 0) {
+ H5_FAILED();
+ HDprintf(" incremented the reference count for an invalid object ID\n");
+ PART_ERROR(H5Oincr_refcount_invalid_param);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oincr_refcount_invalid_param);
+
+ PART_BEGIN(H5Odecr_refcount_invalid_param)
+ {
+ TESTING_2("H5Odecr_refcount with invalid object ID");
+
+ H5E_BEGIN_TRY
+ {
+ status = H5Odecr_refcount(H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (status >= 0) {
+ H5_FAILED();
+ HDprintf(" decremented the reference count for an invalid object ID\n");
+ PART_ERROR(H5Odecr_refcount_invalid_param);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Odecr_refcount_invalid_param);
+ }
+ END_MULTIPART;
+
+ return 0;
+
+error:
+ return 1;
+}
+
+/*
+ * Basic tests for H5Ocopy.
+ */
+static int
+test_object_copy_basic(void)
+{
+ H5O_info2_t object_info;
+ H5G_info_t group_info;
+ htri_t object_link_exists;
+ size_t i;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t group_id2 = H5I_INVALID_HID;
+ hid_t tmp_group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t tmp_dset_id = H5I_INVALID_HID;
+ hid_t dtype_id = H5I_INVALID_HID;
+ hid_t tmp_dtype_id = H5I_INVALID_HID;
+ hid_t tmp_attr_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t attr_space_id = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("basic object copying");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_MORE) || !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, object, link, dataset, attribute, iterate, or "
+ "stored datatype aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, OBJECT_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", OBJECT_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, OBJECT_COPY_BASIC_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n", OBJECT_COPY_BASIC_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((space_id = generate_random_dataspace(OBJECT_COPY_BASIC_TEST_SPACE_RANK, NULL, NULL, FALSE)) < 0)
+ TEST_ERROR;
+ if ((attr_space_id = generate_random_dataspace(OBJECT_COPY_BASIC_TEST_SPACE_RANK, NULL, NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ /* Create the test group object, along with its nested members and the attributes attached to it. */
+ if ((group_id2 = H5Gcreate2(group_id, OBJECT_COPY_BASIC_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", OBJECT_COPY_BASIC_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < (size_t)OBJECT_COPY_BASIC_TEST_NUM_NESTED_OBJS; i++) {
+ char grp_name[OBJECT_COPY_BASIC_TEST_BUF_SIZE];
+
+ snprintf(grp_name, OBJECT_COPY_BASIC_TEST_BUF_SIZE, "grp%d", (int)i);
+
+ if ((tmp_group_id = H5Gcreate2(group_id2, grp_name, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s' under group '%s'\n", grp_name,
+ OBJECT_COPY_BASIC_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ /* Create a further nested group under the last group added */
+ if (i == (OBJECT_COPY_BASIC_TEST_NUM_NESTED_OBJS - 1)) {
+ if (H5Gclose(H5Gcreate2(tmp_group_id, OBJECT_COPY_BASIC_TEST_DEEP_NESTED_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create nested group '%s' under group '%s'\n",
+ OBJECT_COPY_BASIC_TEST_DEEP_NESTED_GROUP_NAME, grp_name);
+ goto error;
+ }
+ }
+
+ if (H5Gclose(tmp_group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close group '%s'\n", grp_name);
+ goto error;
+ }
+ }
+
+ for (i = 0; i < (size_t)OBJECT_COPY_BASIC_TEST_NUM_ATTRS; i++) {
+ char attr_name[OBJECT_COPY_BASIC_TEST_BUF_SIZE];
+
+ snprintf(attr_name, OBJECT_COPY_BASIC_TEST_BUF_SIZE, "attr%d", (int)i);
+
+ if ((tmp_attr_id = H5Acreate2(group_id2, attr_name, H5T_NATIVE_INT, attr_space_id, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s' on group '%s'\n", attr_name,
+ OBJECT_COPY_BASIC_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if (H5Aclose(tmp_attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close attribute '%s'\n", attr_name);
+ goto error;
+ }
+ }
+
+ /* Create the test dataset object, along with the attributes attached to it. */
+ if ((dset_id = H5Dcreate2(group_id, OBJECT_COPY_BASIC_TEST_DSET_NAME, dset_dtype, space_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", OBJECT_COPY_BASIC_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < (size_t)OBJECT_COPY_BASIC_TEST_NUM_ATTRS; i++) {
+ char attr_name[OBJECT_COPY_BASIC_TEST_BUF_SIZE];
+
+ snprintf(attr_name, OBJECT_COPY_BASIC_TEST_BUF_SIZE, "attr%d", (int)i);
+
+ if ((tmp_attr_id = H5Acreate2(dset_id, attr_name, H5T_NATIVE_INT, attr_space_id, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s' on dataset '%s'\n", attr_name,
+ OBJECT_COPY_BASIC_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if (H5Aclose(tmp_attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close attribute '%s'\n", attr_name);
+ goto error;
+ }
+ }
+
+ /* Create the test committed datatype object, along with the attributes attached to it. */
+ if ((dtype_id = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create datatype\n");
+ goto error;
+ }
+
+ if (H5Tcommit2(group_id, OBJECT_COPY_BASIC_TEST_DTYPE_NAME, dtype_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't commit datatype '%s'\n", OBJECT_COPY_BASIC_TEST_DTYPE_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < (size_t)OBJECT_COPY_BASIC_TEST_NUM_ATTRS; i++) {
+ char attr_name[OBJECT_COPY_BASIC_TEST_BUF_SIZE];
+
+ snprintf(attr_name, OBJECT_COPY_BASIC_TEST_BUF_SIZE, "attr%d", (int)i);
+
+ if ((tmp_attr_id = H5Acreate2(dtype_id, attr_name, H5T_NATIVE_INT, attr_space_id, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s' on committed datatype '%s'\n", attr_name,
+ OBJECT_COPY_BASIC_TEST_DTYPE_NAME);
+ goto error;
+ }
+
+ if (H5Aclose(tmp_attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close attribute '%s'\n", attr_name);
+ goto error;
+ }
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Ocopy_group)
+ {
+ TESTING_2("H5Ocopy on a group (default copy options)");
+
+ if (H5Ocopy(group_id, OBJECT_COPY_BASIC_TEST_GROUP_NAME, group_id,
+ OBJECT_COPY_BASIC_TEST_NEW_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to copy group '%s' to '%s'\n", OBJECT_COPY_BASIC_TEST_GROUP_NAME,
+ OBJECT_COPY_BASIC_TEST_NEW_GROUP_NAME);
+ PART_ERROR(H5Ocopy_group);
+ }
+
+ if ((object_link_exists =
+ H5Lexists(group_id, OBJECT_COPY_BASIC_TEST_NEW_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' to copied group exists\n",
+ OBJECT_COPY_BASIC_TEST_NEW_GROUP_NAME);
+ PART_ERROR(H5Ocopy_group);
+ }
+
+ if (!object_link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' to copied group didn't exist!\n",
+ OBJECT_COPY_BASIC_TEST_NEW_GROUP_NAME);
+ PART_ERROR(H5Ocopy_group);
+ }
+
+ /* Ensure that the new group has all the members of the copied group, and all its attributes */
+ if ((tmp_group_id = H5Gopen2(group_id, OBJECT_COPY_BASIC_TEST_NEW_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to open group copy '%s'\n", OBJECT_COPY_BASIC_TEST_NEW_GROUP_NAME);
+ PART_ERROR(H5Ocopy_group);
+ }
+
+ memset(&group_info, 0, sizeof(group_info));
+
+ /*
+ * Set link count to zero in case the connector doesn't support
+ * retrieval of group info.
+ */
+ group_info.nlinks = 0;
+
+ if (H5Gget_info(tmp_group_id, &group_info) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve group info\n");
+ PART_ERROR(H5Ocopy_group);
+ }
+
+ if (group_info.nlinks != OBJECT_COPY_BASIC_TEST_NUM_NESTED_OBJS) {
+ H5_FAILED();
+ HDprintf(" copied group contained %d members instead of %d members after a deep copy!\n",
+ (int)group_info.nlinks, OBJECT_COPY_BASIC_TEST_NUM_NESTED_OBJS);
+ PART_ERROR(H5Ocopy_group);
+ }
+
+ memset(&object_info, 0, sizeof(object_info));
+
+ /*
+ * Set attribute count to zero in case the connector doesn't
+ * support retrieval of object info.
+ */
+ object_info.num_attrs = 0;
+
+ if (H5Oget_info3(tmp_group_id, &object_info, H5O_INFO_ALL) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve object info\n");
+ PART_ERROR(H5Ocopy_group);
+ }
+
+ if (object_info.num_attrs == 0) {
+ H5_FAILED();
+ HDprintf(" copied group didn't contain any attributes after copy operation!\n");
+ PART_ERROR(H5Ocopy_group);
+ }
+
+ /* Check the attribute names, types, etc. */
+ i = 0;
+ if (H5Aiterate2(tmp_group_id, H5_INDEX_NAME, H5_ITER_INC, NULL,
+ object_copy_attribute_iter_callback, &i) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to iterate over copied group's attributes\n");
+ PART_ERROR(H5Ocopy_group);
+ }
+
+ if (i != OBJECT_COPY_BASIC_TEST_NUM_ATTRS) {
+ H5_FAILED();
+ HDprintf(
+ " number of attributes on copied group (%llu) didn't match expected number (%llu)!\n",
+ (unsigned long long)i, (unsigned long long)OBJECT_COPY_BASIC_TEST_NUM_ATTRS);
+ PART_ERROR(H5Ocopy_group);
+ }
+
+ if (H5Gclose(tmp_group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group copy\n");
+ PART_ERROR(H5Ocopy_group);
+ }
+
+ /*
+ * Ensure that the last immediate member of the copied group
+ * contains its single member after the deep copy.
+ */
+ {
+ char grp_name[OBJECT_COPY_BASIC_TEST_BUF_SIZE];
+
+ snprintf(grp_name, OBJECT_COPY_BASIC_TEST_BUF_SIZE,
+ OBJECT_COPY_BASIC_TEST_NEW_GROUP_NAME "/grp%d",
+ OBJECT_COPY_BASIC_TEST_NUM_NESTED_OBJS - 1);
+
+ if ((tmp_group_id = H5Gopen2(group_id, grp_name, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to open group '%s'\n",
+ OBJECT_COPY_BASIC_TEST_DEEP_NESTED_GROUP_NAME);
+ PART_ERROR(H5Ocopy_group);
+ }
+
+ memset(&group_info, 0, sizeof(group_info));
+
+ /*
+ * Set link count to zero in case the connector doesn't support
+ * retrieval of group info.
+ */
+ group_info.nlinks = 0;
+
+ if (H5Gget_info(tmp_group_id, &group_info) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve group info\n");
+ PART_ERROR(H5Ocopy_group);
+ }
+
+ if (group_info.nlinks != 1) {
+ H5_FAILED();
+ HDprintf(" copied group's immediate members didn't contain nested members after a "
+ "deep copy!\n");
+ PART_ERROR(H5Ocopy_group);
+ }
+
+ if (H5Gclose(tmp_group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n",
+ OBJECT_COPY_BASIC_TEST_DEEP_NESTED_GROUP_NAME);
+ PART_ERROR(H5Ocopy_group);
+ }
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ocopy_group);
+
+ if (tmp_group_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(tmp_group_id);
+ }
+ H5E_END_TRY;
+ tmp_group_id = H5I_INVALID_HID;
+ }
+
+ PART_BEGIN(H5Ocopy_dset)
+ {
+ TESTING_2("H5Ocopy on a dataset (default copy options)");
+
+ if (H5Ocopy(group_id, OBJECT_COPY_BASIC_TEST_DSET_NAME, group_id,
+ OBJECT_COPY_BASIC_TEST_NEW_DSET_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to copy dataset '%s' to '%s'\n", OBJECT_COPY_BASIC_TEST_DSET_NAME,
+ OBJECT_COPY_BASIC_TEST_NEW_DSET_NAME);
+ PART_ERROR(H5Ocopy_dset);
+ }
+
+ if ((object_link_exists =
+ H5Lexists(group_id, OBJECT_COPY_BASIC_TEST_NEW_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' to copied dataset exists\n",
+ OBJECT_COPY_BASIC_TEST_NEW_DSET_NAME);
+ PART_ERROR(H5Ocopy_dset);
+ }
+
+ if (!object_link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' to copied dataset didn't exist!\n",
+ OBJECT_COPY_BASIC_TEST_NEW_DSET_NAME);
+ PART_ERROR(H5Ocopy_dset);
+ }
+
+ /* Ensure that the new dataset has all of the attributes of the copied dataset */
+ if ((tmp_dset_id = H5Dopen2(group_id, OBJECT_COPY_BASIC_TEST_NEW_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to open dataset copy '%s'\n", OBJECT_COPY_BASIC_TEST_NEW_DSET_NAME);
+ PART_ERROR(H5Ocopy_dset);
+ }
+
+ memset(&object_info, 0, sizeof(object_info));
+
+ /*
+ * Set attribute count to zero in case the connector doesn't
+ * support retrieval of object info.
+ */
+ object_info.num_attrs = 0;
+
+ if (H5Oget_info3(tmp_dset_id, &object_info, H5O_INFO_ALL) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve object info\n");
+ PART_ERROR(H5Ocopy_dset);
+ }
+
+ if (object_info.num_attrs == 0) {
+ H5_FAILED();
+ HDprintf(" copied dataset didn't contain any attributes after copy operation!\n");
+ PART_ERROR(H5Ocopy_dset);
+ }
+
+ /* Check the attribute names, types, etc. */
+ i = 0;
+ if (H5Aiterate2(tmp_dset_id, H5_INDEX_NAME, H5_ITER_INC, NULL,
+ object_copy_attribute_iter_callback, &i) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to iterate over copied dataset's attributes\n");
+ PART_ERROR(H5Ocopy_dset);
+ }
+
+ if (i != OBJECT_COPY_BASIC_TEST_NUM_ATTRS) {
+ H5_FAILED();
+ HDprintf(" number of attributes on copied dataset (%llu) didn't match expected number "
+ "(%llu)!\n",
+ (unsigned long long)i, (unsigned long long)OBJECT_COPY_BASIC_TEST_NUM_ATTRS);
+ PART_ERROR(H5Ocopy_dset);
+ }
+
+ if (H5Dclose(tmp_dset_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close dataset copy\n");
+ PART_ERROR(H5Ocopy_dset);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ocopy_dset);
+
+ if (tmp_dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(tmp_dset_id);
+ }
+ H5E_END_TRY;
+ tmp_dset_id = H5I_INVALID_HID;
+ }
+
+ PART_BEGIN(H5Ocopy_dtype)
+ {
+ TESTING_2("H5Ocopy on a committed datatype (default copy options)");
+
+ if (H5Ocopy(group_id, OBJECT_COPY_BASIC_TEST_DTYPE_NAME, group_id,
+ OBJECT_COPY_BASIC_TEST_NEW_DTYPE_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to copy datatype '%s' to '%s'\n", OBJECT_COPY_BASIC_TEST_DTYPE_NAME,
+ OBJECT_COPY_BASIC_TEST_NEW_DTYPE_NAME);
+ PART_ERROR(H5Ocopy_dtype);
+ }
+
+ if ((object_link_exists =
+ H5Lexists(group_id, OBJECT_COPY_BASIC_TEST_NEW_DTYPE_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' to copied datatype exists\n",
+ OBJECT_COPY_BASIC_TEST_NEW_DTYPE_NAME);
+ PART_ERROR(H5Ocopy_dtype);
+ }
+
+ if (!object_link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' to copied datatype didn't exist!\n",
+ OBJECT_COPY_BASIC_TEST_NEW_DTYPE_NAME);
+ PART_ERROR(H5Ocopy_dtype);
+ }
+
+ /* Ensure that the new committed datatype has all the attributes of the copied datatype */
+ if ((tmp_dtype_id = H5Topen2(group_id, OBJECT_COPY_BASIC_TEST_NEW_DTYPE_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to open datatype copy '%s'\n", OBJECT_COPY_BASIC_TEST_NEW_DTYPE_NAME);
+ PART_ERROR(H5Ocopy_dtype);
+ }
+
+ memset(&object_info, 0, sizeof(object_info));
+
+ /*
+ * Set attribute count to zero in case the connector doesn't
+ * support retrieval of object info.
+ */
+ object_info.num_attrs = 0;
+
+ if (H5Oget_info3(tmp_dtype_id, &object_info, H5O_INFO_ALL) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve object info\n");
+ PART_ERROR(H5Ocopy_dtype);
+ }
+
+ if (object_info.num_attrs == 0) {
+ H5_FAILED();
+ HDprintf(
+ " copied committed datatype didn't contain any attributes after copy operation!\n");
+ PART_ERROR(H5Ocopy_dtype);
+ }
+
+ /* Check the attribute names, types, etc. */
+ i = 0;
+ if (H5Aiterate2(tmp_dtype_id, H5_INDEX_NAME, H5_ITER_INC, NULL,
+ object_copy_attribute_iter_callback, &i) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to iterate over copied datatype's attributes\n");
+ PART_ERROR(H5Ocopy_dtype);
+ }
+
+ if (i != OBJECT_COPY_BASIC_TEST_NUM_ATTRS) {
+ H5_FAILED();
+ HDprintf(" number of attributes on copied datatype (%llu) didn't match expected number "
+ "(%llu)!\n",
+ (unsigned long long)i, (unsigned long long)OBJECT_COPY_BASIC_TEST_NUM_ATTRS);
+ PART_ERROR(H5Ocopy_dtype);
+ }
+
+ if (H5Tclose(tmp_dtype_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close datatype copy\n");
+ PART_ERROR(H5Ocopy_dtype);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ocopy_dtype);
+
+ if (tmp_dtype_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(tmp_dtype_id);
+ }
+ H5E_END_TRY;
+ tmp_dtype_id = H5I_INVALID_HID;
+ }
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(attr_space_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dtype_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id2) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(attr_space_id);
+ H5Sclose(space_id);
+ H5Aclose(tmp_attr_id);
+ H5Tclose(dset_dtype);
+ H5Tclose(tmp_dtype_id);
+ H5Tclose(dtype_id);
+ H5Dclose(tmp_dset_id);
+ H5Dclose(dset_id);
+ H5Gclose(tmp_group_id);
+ H5Gclose(group_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * Tests to ensure that H5Ocopy fails when attempting to copy
+ * an object to a destination where the object already exists.
+ */
+static int
+test_object_copy_already_existing(void)
+{
+ herr_t err_ret;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t group_id2 = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dtype_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("object copying to location where objects already exist");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, object, dataset, or stored datatype aren't "
+ "supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, OBJECT_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", OBJECT_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, OBJECT_COPY_ALREADY_EXISTING_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ OBJECT_COPY_ALREADY_EXISTING_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((space_id =
+ generate_random_dataspace(OBJECT_COPY_ALREADY_EXISTING_TEST_SPACE_RANK, NULL, NULL, FALSE)) < 0)
+ TEST_ERROR;
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ /* Create the test group object */
+ if ((group_id2 = H5Gcreate2(group_id, OBJECT_COPY_ALREADY_EXISTING_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", OBJECT_COPY_ALREADY_EXISTING_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ /* Create the test dataset object */
+ if ((dset_id = H5Dcreate2(group_id, OBJECT_COPY_ALREADY_EXISTING_TEST_DSET_NAME, dset_dtype, space_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", OBJECT_COPY_ALREADY_EXISTING_TEST_DSET_NAME);
+ goto error;
+ }
+
+ /* Create the test committed datatype object */
+ if ((dtype_id = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create datatype\n");
+ goto error;
+ }
+
+ if (H5Tcommit2(group_id, OBJECT_COPY_ALREADY_EXISTING_TEST_DTYPE_NAME, dtype_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't commit datatype '%s'\n", OBJECT_COPY_ALREADY_EXISTING_TEST_DTYPE_NAME);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Ocopy_already_existing_group)
+ {
+ TESTING_2("H5Ocopy group to location where group already exists");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ocopy(group_id, OBJECT_COPY_ALREADY_EXISTING_TEST_GROUP_NAME, group_id,
+ OBJECT_COPY_ALREADY_EXISTING_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" group copy succeeded in location where group already exists!\n");
+ PART_ERROR(H5Ocopy_already_existing_group);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ocopy_already_existing_group);
+
+ PART_BEGIN(H5Ocopy_already_existing_dset)
+ {
+ TESTING_2("H5Ocopy dataset to location where dataset already exists");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ocopy(group_id, OBJECT_COPY_ALREADY_EXISTING_TEST_DSET_NAME, group_id,
+ OBJECT_COPY_ALREADY_EXISTING_TEST_DSET_NAME, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" dataset copy succeeded in location where dataset already exists!\n");
+ PART_ERROR(H5Ocopy_already_existing_dset);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ocopy_already_existing_dset);
+
+ PART_BEGIN(H5Ocopy_already_existing_dtype)
+ {
+ TESTING_2("H5Ocopy committed datatype to location where committed datatype already exists");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ocopy(group_id, OBJECT_COPY_ALREADY_EXISTING_TEST_DTYPE_NAME, group_id,
+ OBJECT_COPY_ALREADY_EXISTING_TEST_DTYPE_NAME, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" committed datatype copy succeeded in location where committed datatype already "
+ "exists!\n");
+ PART_ERROR(H5Ocopy_already_existing_dtype);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ocopy_already_existing_dtype);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dtype_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id2) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(space_id);
+ H5Tclose(dset_dtype);
+ H5Tclose(dtype_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to exercise the H5O_COPY_SHALLOW_HIERARCHY_FLAG flag
+ * for H5Ocopy.
+ */
+static int
+test_object_copy_shallow_group_copy(void)
+{
+ H5G_info_t group_info;
+ htri_t object_link_exists;
+ size_t i;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t group_id2 = H5I_INVALID_HID;
+ hid_t tmp_group_id = H5I_INVALID_HID;
+ hid_t ocpypl_id = H5I_INVALID_HID;
+
+ TESTING("object copying with H5O_COPY_SHALLOW_HIERARCHY_FLAG flag");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_MORE) || !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, object, or link aren't supported with this "
+ "connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, OBJECT_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", OBJECT_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, OBJECT_COPY_SHALLOW_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n", OBJECT_COPY_SHALLOW_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ /* Create the test group object, along with its nested members. */
+ if ((group_id2 = H5Gcreate2(group_id, OBJECT_COPY_SHALLOW_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", OBJECT_COPY_SHALLOW_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < (size_t)OBJECT_COPY_SHALLOW_TEST_NUM_NESTED_OBJS; i++) {
+ char grp_name[OBJECT_COPY_SHALLOW_TEST_BUF_SIZE];
+
+ snprintf(grp_name, OBJECT_COPY_SHALLOW_TEST_BUF_SIZE, "grp%d", (int)i);
+
+ if ((tmp_group_id = H5Gcreate2(group_id2, grp_name, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s' under group '%s'\n", grp_name,
+ OBJECT_COPY_SHALLOW_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ /* Create a further nested group under the last group added */
+ if (i == (OBJECT_COPY_SHALLOW_TEST_NUM_NESTED_OBJS - 1)) {
+ if (H5Gclose(H5Gcreate2(tmp_group_id, OBJECT_COPY_SHALLOW_TEST_DEEP_NESTED_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create nested group '%s' under group '%s'\n",
+ OBJECT_COPY_SHALLOW_TEST_DEEP_NESTED_GROUP_NAME, grp_name);
+ goto error;
+ }
+ }
+
+ if (H5Gclose(tmp_group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close group '%s'\n", grp_name);
+ goto error;
+ }
+ }
+
+ if ((ocpypl_id = H5Pcreate(H5P_OBJECT_COPY)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create OCopyPL\n");
+ goto error;
+ }
+
+ if (H5Pset_copy_object(ocpypl_id, H5O_COPY_SHALLOW_HIERARCHY_FLAG) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set object copying options\n");
+ goto error;
+ }
+
+ if (H5Ocopy(group_id, OBJECT_COPY_SHALLOW_TEST_GROUP_NAME, group_id,
+ OBJECT_COPY_SHALLOW_TEST_NEW_GROUP_NAME, ocpypl_id, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to copy group '%s' to '%s'\n", OBJECT_COPY_SHALLOW_TEST_GROUP_NAME,
+ OBJECT_COPY_SHALLOW_TEST_NEW_GROUP_NAME);
+ goto error;
+ }
+
+ if ((object_link_exists = H5Lexists(group_id, OBJECT_COPY_SHALLOW_TEST_NEW_GROUP_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' to copied group exists\n",
+ OBJECT_COPY_SHALLOW_TEST_NEW_GROUP_NAME);
+ goto error;
+ }
+
+ if (!object_link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' to copied group didn't exist!\n", OBJECT_COPY_SHALLOW_TEST_NEW_GROUP_NAME);
+ goto error;
+ }
+
+ /*
+ * Ensure that the new group has only the immediate members of the copied group.
+ */
+ if ((tmp_group_id = H5Gopen2(group_id, OBJECT_COPY_SHALLOW_TEST_NEW_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to open group copy '%s'\n", OBJECT_COPY_SHALLOW_TEST_NEW_GROUP_NAME);
+ goto error;
+ }
+
+ memset(&group_info, 0, sizeof(group_info));
+
+ /*
+ * Set link count to zero in case the connector doesn't support
+ * retrieval of group info.
+ */
+ group_info.nlinks = 0;
+
+ if (H5Gget_info(tmp_group_id, &group_info) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve group info\n");
+ goto error;
+ }
+
+ if (group_info.nlinks != OBJECT_COPY_SHALLOW_TEST_NUM_NESTED_OBJS) {
+ H5_FAILED();
+ HDprintf(" copied group contained %d members instead of %d members after a shallow copy!\n",
+ (int)group_info.nlinks, OBJECT_COPY_SHALLOW_TEST_NUM_NESTED_OBJS);
+ goto error;
+ }
+
+ if (H5Gclose(tmp_group_id) < 0)
+ TEST_ERROR;
+
+ /*
+ * Ensure that the last immediate member of the copied group doesn't
+ * contain any members after the shallow copy.
+ */
+ {
+ char grp_name[OBJECT_COPY_SHALLOW_TEST_BUF_SIZE];
+
+ snprintf(grp_name, OBJECT_COPY_SHALLOW_TEST_BUF_SIZE,
+ OBJECT_COPY_SHALLOW_TEST_NEW_GROUP_NAME "/grp%d",
+ OBJECT_COPY_SHALLOW_TEST_NUM_NESTED_OBJS - 1);
+
+ if ((tmp_group_id = H5Gopen2(group_id, grp_name, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to open group '%s'\n", grp_name);
+ goto error;
+ }
+
+ memset(&group_info, 0, sizeof(group_info));
+
+ /*
+ * Set link count to non-zero in case the connector doesn't support
+ * retrieval of group info.
+ */
+ group_info.nlinks = 1;
+
+ if (H5Gget_info(tmp_group_id, &group_info) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve group info\n");
+ goto error;
+ }
+
+ if (group_info.nlinks != 0) {
+ H5_FAILED();
+ HDprintf(" copied group's immediate members contained nested members after a shallow copy!\n");
+ goto error;
+ }
+
+ if (H5Gclose(tmp_group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", grp_name);
+ goto error;
+ }
+ }
+
+ if (H5Pclose(ocpypl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id2) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(ocpypl_id);
+ H5Gclose(tmp_group_id);
+ H5Gclose(group_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * Tests to exercise the H5O_COPY_WITHOUT_ATTR_FLAG flag
+ * of H5Ocopy.
+ */
+static int
+test_object_copy_no_attributes(void)
+{
+ H5O_info2_t object_info;
+ htri_t object_link_exists;
+ size_t i;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t group_id2 = H5I_INVALID_HID;
+ hid_t tmp_group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t tmp_dset_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t dtype_id = H5I_INVALID_HID;
+ hid_t tmp_dtype_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t attr_space_id = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ hid_t ocpypl_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("object copying with H5O_COPY_WITHOUT_ATTR_FLAG flag");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_MORE) || !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, object, link, dataset, attribute, or stored "
+ "datatype aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, OBJECT_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", OBJECT_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, OBJECT_COPY_NO_ATTRS_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n", OBJECT_COPY_NO_ATTRS_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((space_id = generate_random_dataspace(OBJECT_COPY_NO_ATTRS_TEST_SPACE_RANK, NULL, NULL, FALSE)) < 0)
+ TEST_ERROR;
+ if ((attr_space_id = generate_random_dataspace(OBJECT_COPY_NO_ATTRS_TEST_SPACE_RANK, NULL, NULL, TRUE)) <
+ 0)
+ TEST_ERROR;
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ /* Create the test group object, along with the attributes attached to it. */
+ if ((group_id2 = H5Gcreate2(group_id, OBJECT_COPY_NO_ATTRS_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", OBJECT_COPY_NO_ATTRS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < (size_t)OBJECT_COPY_NO_ATTRS_TEST_NUM_ATTRS; i++) {
+ char attr_name[OBJECT_COPY_NO_ATTRS_TEST_BUF_SIZE];
+
+ snprintf(attr_name, OBJECT_COPY_NO_ATTRS_TEST_BUF_SIZE, "attr%d", (int)i);
+
+ if ((attr_id = H5Acreate2(group_id2, attr_name, H5T_NATIVE_INT, attr_space_id, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s' on group '%s'\n", attr_name,
+ OBJECT_COPY_NO_ATTRS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close attribute '%s'\n", attr_name);
+ goto error;
+ }
+ }
+
+ /* Create the test dataset object, along with the attributes attached to it. */
+ if ((dset_id = H5Dcreate2(group_id, OBJECT_COPY_NO_ATTRS_TEST_DSET_NAME, dset_dtype, space_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", OBJECT_COPY_NO_ATTRS_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < (size_t)OBJECT_COPY_NO_ATTRS_TEST_NUM_ATTRS; i++) {
+ char attr_name[OBJECT_COPY_NO_ATTRS_TEST_BUF_SIZE];
+
+ snprintf(attr_name, OBJECT_COPY_NO_ATTRS_TEST_BUF_SIZE, "attr%d", (int)i);
+
+ if ((attr_id = H5Acreate2(dset_id, attr_name, H5T_NATIVE_INT, attr_space_id, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s' on dataset '%s'\n", attr_name,
+ OBJECT_COPY_NO_ATTRS_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close attribute '%s'\n", attr_name);
+ goto error;
+ }
+ }
+
+ /* Create the test committed datatype object, along with the attributes attached to it. */
+ if ((dtype_id = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create datatype\n");
+ goto error;
+ }
+
+ if (H5Tcommit2(group_id, OBJECT_COPY_NO_ATTRS_TEST_DTYPE_NAME, dtype_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't commit datatype '%s'\n", OBJECT_COPY_NO_ATTRS_TEST_DTYPE_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < (size_t)OBJECT_COPY_NO_ATTRS_TEST_NUM_ATTRS; i++) {
+ char attr_name[OBJECT_COPY_NO_ATTRS_TEST_BUF_SIZE];
+
+ snprintf(attr_name, OBJECT_COPY_NO_ATTRS_TEST_BUF_SIZE, "attr%d", (int)i);
+
+ if ((attr_id = H5Acreate2(dtype_id, attr_name, H5T_NATIVE_INT, attr_space_id, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s' on committed datatype '%s'\n", attr_name,
+ OBJECT_COPY_NO_ATTRS_TEST_DTYPE_NAME);
+ goto error;
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close attribute '%s'\n", attr_name);
+ goto error;
+ }
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Ocopy_group_no_attributes)
+ {
+ TESTING_2("H5Ocopy on a group (without attributes)");
+
+ if ((ocpypl_id = H5Pcreate(H5P_OBJECT_COPY)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create OCopyPL\n");
+ PART_ERROR(H5Ocopy_group_no_attributes);
+ }
+
+ if (H5Pset_copy_object(ocpypl_id, H5O_COPY_WITHOUT_ATTR_FLAG) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set object copying options\n");
+ PART_ERROR(H5Ocopy_group_no_attributes);
+ }
+
+ if (H5Ocopy(group_id, OBJECT_COPY_NO_ATTRS_TEST_GROUP_NAME, group_id,
+ OBJECT_COPY_NO_ATTRS_TEST_NEW_GROUP_NAME, ocpypl_id, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to copy group '%s' to '%s'\n", OBJECT_COPY_NO_ATTRS_TEST_GROUP_NAME,
+ OBJECT_COPY_NO_ATTRS_TEST_NEW_GROUP_NAME);
+ PART_ERROR(H5Ocopy_group_no_attributes);
+ }
+
+ if ((object_link_exists =
+ H5Lexists(group_id, OBJECT_COPY_NO_ATTRS_TEST_NEW_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' to copied group exists\n",
+ OBJECT_COPY_NO_ATTRS_TEST_NEW_GROUP_NAME);
+ PART_ERROR(H5Ocopy_group_no_attributes);
+ }
+
+ if (!object_link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' to copied group didn't exist!\n",
+ OBJECT_COPY_NO_ATTRS_TEST_NEW_GROUP_NAME);
+ PART_ERROR(H5Ocopy_group_no_attributes);
+ }
+
+ /* Ensure that the new group has no attributes */
+ if ((tmp_group_id = H5Gopen2(group_id, OBJECT_COPY_NO_ATTRS_TEST_NEW_GROUP_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" failed to open group copy '%s'\n", OBJECT_COPY_NO_ATTRS_TEST_NEW_GROUP_NAME);
+ PART_ERROR(H5Ocopy_group_no_attributes);
+ }
+
+ memset(&object_info, 0, sizeof(object_info));
+
+ /*
+ * Set attribute count to non-zero in case the connector doesn't
+ * support retrieval of object info.
+ */
+ object_info.num_attrs = 1;
+
+ if (H5Oget_info3(tmp_group_id, &object_info, H5O_INFO_ALL) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve object info\n");
+ PART_ERROR(H5Ocopy_group_no_attributes);
+ }
+
+ if (object_info.num_attrs != 0) {
+ H5_FAILED();
+ HDprintf(" copied group contained attributes after a non-attribute copy!\n");
+ PART_ERROR(H5Ocopy_group_no_attributes);
+ }
+
+ if (H5Pclose(ocpypl_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close OCopyPL\n");
+ PART_ERROR(H5Ocopy_group_no_attributes);
+ }
+
+ if (H5Gclose(tmp_group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group copy\n");
+ PART_ERROR(H5Ocopy_group_no_attributes);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ocopy_group_no_attributes);
+
+ if (ocpypl_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(ocpypl_id);
+ }
+ H5E_END_TRY;
+ ocpypl_id = H5I_INVALID_HID;
+ }
+ if (tmp_group_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(tmp_group_id);
+ }
+ H5E_END_TRY;
+ tmp_group_id = H5I_INVALID_HID;
+ }
+
+ PART_BEGIN(H5Ocopy_dset_no_attributes)
+ {
+ TESTING_2("H5Ocopy on a dataset (without attributes)");
+
+ if ((ocpypl_id = H5Pcreate(H5P_OBJECT_COPY)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create OCopyPL\n");
+ PART_ERROR(H5Ocopy_dset_no_attributes);
+ }
+
+ if (H5Pset_copy_object(ocpypl_id, H5O_COPY_WITHOUT_ATTR_FLAG) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set object copying options\n");
+ PART_ERROR(H5Ocopy_dset_no_attributes);
+ }
+
+ if (H5Ocopy(group_id, OBJECT_COPY_NO_ATTRS_TEST_DSET_NAME, group_id,
+ OBJECT_COPY_NO_ATTRS_TEST_NEW_DSET_NAME, ocpypl_id, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to copy dataset '%s' to '%s'\n", OBJECT_COPY_NO_ATTRS_TEST_DSET_NAME,
+ OBJECT_COPY_NO_ATTRS_TEST_NEW_DSET_NAME);
+ PART_ERROR(H5Ocopy_dset_no_attributes);
+ }
+
+ if ((object_link_exists =
+ H5Lexists(group_id, OBJECT_COPY_NO_ATTRS_TEST_NEW_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' to copied dataset exists\n",
+ OBJECT_COPY_NO_ATTRS_TEST_NEW_DSET_NAME);
+ PART_ERROR(H5Ocopy_dset_no_attributes);
+ }
+
+ if (!object_link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' to copied dataset didn't exist!\n",
+ OBJECT_COPY_NO_ATTRS_TEST_NEW_DSET_NAME);
+ PART_ERROR(H5Ocopy_dset_no_attributes);
+ }
+
+ /* Ensure that the new dataset doesn't have any attributes */
+ if ((tmp_dset_id = H5Dopen2(group_id, OBJECT_COPY_NO_ATTRS_TEST_NEW_DSET_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" failed to open dataset copy '%s'\n", OBJECT_COPY_NO_ATTRS_TEST_NEW_DSET_NAME);
+ PART_ERROR(H5Ocopy_dset_no_attributes);
+ }
+
+ memset(&object_info, 0, sizeof(object_info));
+
+ /*
+ * Set attribute count to non-zero in case the connector doesn't
+ * support retrieval of object info.
+ */
+ object_info.num_attrs = 1;
+
+ if (H5Oget_info3(tmp_dset_id, &object_info, H5O_INFO_ALL) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve object info\n");
+ PART_ERROR(H5Ocopy_dset_no_attributes);
+ }
+
+ if (object_info.num_attrs != 0) {
+ H5_FAILED();
+ HDprintf(" copied dataset contained attributes after a non-attribute copy!\n");
+ PART_ERROR(H5Ocopy_dset_no_attributes);
+ }
+
+ if (H5Pclose(ocpypl_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close OCopyPL\n");
+ PART_ERROR(H5Ocopy_dset_no_attributes);
+ }
+
+ if (H5Dclose(tmp_dset_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close dataset copy\n");
+ PART_ERROR(H5Ocopy_dset_no_attributes);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ocopy_dset_no_attributes);
+
+ if (ocpypl_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(ocpypl_id);
+ }
+ H5E_END_TRY;
+ ocpypl_id = H5I_INVALID_HID;
+ }
+ if (tmp_dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(tmp_dset_id);
+ }
+ H5E_END_TRY;
+ tmp_dset_id = H5I_INVALID_HID;
+ }
+
+ PART_BEGIN(H5Ocopy_dtype_no_attributes)
+ {
+ TESTING_2("H5Ocopy on a committed datatype (without attributes)");
+
+ if ((ocpypl_id = H5Pcreate(H5P_OBJECT_COPY)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create OCopyPL\n");
+ PART_ERROR(H5Ocopy_dtype_no_attributes);
+ }
+
+ if (H5Pset_copy_object(ocpypl_id, H5O_COPY_WITHOUT_ATTR_FLAG) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set object copying options\n");
+ PART_ERROR(H5Ocopy_dtype_no_attributes);
+ }
+
+ if (H5Ocopy(group_id, OBJECT_COPY_NO_ATTRS_TEST_DTYPE_NAME, group_id,
+ OBJECT_COPY_NO_ATTRS_TEST_NEW_DTYPE_NAME, ocpypl_id, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to copy datatype '%s' to '%s'\n", OBJECT_COPY_NO_ATTRS_TEST_DTYPE_NAME,
+ OBJECT_COPY_NO_ATTRS_TEST_NEW_DTYPE_NAME);
+ PART_ERROR(H5Ocopy_dtype_no_attributes);
+ }
+
+ if ((object_link_exists =
+ H5Lexists(group_id, OBJECT_COPY_NO_ATTRS_TEST_NEW_DTYPE_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' to copied datatype exists\n",
+ OBJECT_COPY_NO_ATTRS_TEST_NEW_DTYPE_NAME);
+ PART_ERROR(H5Ocopy_dtype_no_attributes);
+ }
+
+ if (!object_link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' to copied datatype didn't exist!\n",
+ OBJECT_COPY_NO_ATTRS_TEST_NEW_DTYPE_NAME);
+ PART_ERROR(H5Ocopy_dtype_no_attributes);
+ }
+
+ /* Ensure that the new committed datatype doesn't have any attributes */
+ if ((tmp_dtype_id = H5Topen2(group_id, OBJECT_COPY_NO_ATTRS_TEST_NEW_DTYPE_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" failed to open dataset copy '%s'\n", OBJECT_COPY_NO_ATTRS_TEST_NEW_DTYPE_NAME);
+ PART_ERROR(H5Ocopy_dtype_no_attributes);
+ }
+
+ memset(&object_info, 0, sizeof(object_info));
+
+ /*
+ * Set attribute count to non-zero in case the connector doesn't
+ * support retrieval of object info.
+ */
+ object_info.num_attrs = 1;
+
+ if (H5Oget_info3(tmp_dtype_id, &object_info, H5O_INFO_ALL) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve object info\n");
+ PART_ERROR(H5Ocopy_dtype_no_attributes);
+ }
+
+ if (object_info.num_attrs != 0) {
+ H5_FAILED();
+ HDprintf(" copied committed datatype contained attributes after a non-attribute copy!\n");
+ PART_ERROR(H5Ocopy_dtype_no_attributes);
+ }
+
+ if (H5Pclose(ocpypl_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close OCopyPL\n");
+ PART_ERROR(H5Ocopy_dtype_no_attributes);
+ }
+
+ if (H5Tclose(tmp_dtype_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close datatype copy\n");
+ PART_ERROR(H5Ocopy_dtype_no_attributes);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ocopy_dtype_no_attributes);
+
+ if (ocpypl_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(ocpypl_id);
+ }
+ H5E_END_TRY;
+ ocpypl_id = H5I_INVALID_HID;
+ }
+ if (tmp_dtype_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(tmp_dtype_id);
+ }
+ H5E_END_TRY;
+ tmp_dtype_id = H5I_INVALID_HID;
+ }
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(attr_space_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dtype_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id2) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(ocpypl_id);
+ H5Sclose(attr_space_id);
+ H5Sclose(space_id);
+ H5Aclose(attr_id);
+ H5Tclose(dset_dtype);
+ H5Tclose(tmp_dtype_id);
+ H5Tclose(dtype_id);
+ H5Dclose(tmp_dset_id);
+ H5Dclose(dset_id);
+ H5Gclose(tmp_group_id);
+ H5Gclose(group_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * Tests to exercise the behavior of H5Ocopy when the source
+ * object specified is a soft link or dangling soft link.
+ */
+static int
+test_object_copy_by_soft_link(void)
+{
+ H5O_info2_t object_info;
+ H5G_info_t group_info;
+ H5L_info2_t link_info;
+ htri_t object_link_exists;
+ size_t i;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t group_id2 = H5I_INVALID_HID;
+ hid_t tmp_group_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t attr_space_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("object copying through use of soft links");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, object, link, dataset, attribute, iterate, or "
+ "soft link aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, OBJECT_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", OBJECT_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, OBJECT_COPY_SOFT_LINK_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n", OBJECT_COPY_SOFT_LINK_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((attr_space_id = generate_random_dataspace(OBJECT_COPY_SOFT_LINK_TEST_SPACE_RANK, NULL, NULL, TRUE)) <
+ 0)
+ TEST_ERROR;
+
+ /* Create the test group object, along with its nested members and the attributes attached to it. */
+ if ((group_id2 = H5Gcreate2(group_id, OBJECT_COPY_SOFT_LINK_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", OBJECT_COPY_SOFT_LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < (size_t)OBJECT_COPY_SOFT_LINK_TEST_NUM_NESTED_OBJS; i++) {
+ char grp_name[OBJECT_COPY_SOFT_LINK_TEST_BUF_SIZE];
+
+ snprintf(grp_name, OBJECT_COPY_SOFT_LINK_TEST_BUF_SIZE, "grp%d", (int)i);
+
+ if ((tmp_group_id = H5Gcreate2(group_id2, grp_name, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s' under group '%s'\n", grp_name,
+ OBJECT_COPY_SOFT_LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ /* Create a further nested group under the last group added */
+ if (i == (OBJECT_COPY_SOFT_LINK_TEST_NUM_NESTED_OBJS - 1)) {
+ if (H5Gclose(H5Gcreate2(tmp_group_id, OBJECT_COPY_SOFT_LINK_TEST_DEEP_NESTED_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create nested group '%s' under group '%s'\n",
+ OBJECT_COPY_SOFT_LINK_TEST_DEEP_NESTED_GROUP_NAME, grp_name);
+ goto error;
+ }
+ }
+
+ if (H5Gclose(tmp_group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close group '%s'\n", grp_name);
+ goto error;
+ }
+ }
+
+ for (i = 0; i < (size_t)OBJECT_COPY_SOFT_LINK_TEST_NUM_ATTRS; i++) {
+ char attr_name[OBJECT_COPY_SOFT_LINK_TEST_BUF_SIZE];
+
+ snprintf(attr_name, OBJECT_COPY_SOFT_LINK_TEST_BUF_SIZE, "attr%d", (int)i);
+
+ if ((attr_id = H5Acreate2(group_id2, attr_name, H5T_NATIVE_INT, attr_space_id, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s' on group '%s'\n", attr_name,
+ OBJECT_COPY_SOFT_LINK_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close attribute '%s'\n", attr_name);
+ goto error;
+ }
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Ocopy_through_soft_link)
+ {
+ TESTING_2("H5Ocopy through use of a soft link");
+
+ if (H5Lcreate_soft("/" OBJECT_TEST_GROUP_NAME "/" OBJECT_COPY_SOFT_LINK_TEST_SUBGROUP_NAME
+ "/" OBJECT_COPY_SOFT_LINK_TEST_GROUP_NAME,
+ group_id, OBJECT_COPY_SOFT_LINK_TEST_SOFT_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create soft link '%s' to group for copying\n",
+ OBJECT_COPY_SOFT_LINK_TEST_SOFT_LINK_NAME);
+ PART_ERROR(H5Ocopy_through_soft_link);
+ }
+
+ if (H5Ocopy(group_id, OBJECT_COPY_SOFT_LINK_TEST_SOFT_LINK_NAME, group_id,
+ OBJECT_COPY_SOFT_LINK_TEST_NEW_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to copy group '%s' to '%s'\n", OBJECT_COPY_SOFT_LINK_TEST_GROUP_NAME,
+ OBJECT_COPY_SOFT_LINK_TEST_NEW_GROUP_NAME);
+ PART_ERROR(H5Ocopy_through_soft_link);
+ }
+
+ if ((object_link_exists =
+ H5Lexists(group_id, OBJECT_COPY_SOFT_LINK_TEST_NEW_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' to copied group exists\n",
+ OBJECT_COPY_SOFT_LINK_TEST_NEW_GROUP_NAME);
+ PART_ERROR(H5Ocopy_through_soft_link);
+ }
+
+ if (!object_link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' to copied group didn't exist!\n",
+ OBJECT_COPY_SOFT_LINK_TEST_NEW_GROUP_NAME);
+ PART_ERROR(H5Ocopy_through_soft_link);
+ }
+
+ /* Make sure the new object is an actual group and not another soft link */
+ memset(&link_info, 0, sizeof(link_info));
+ if (H5Lget_info2(group_id, OBJECT_COPY_SOFT_LINK_TEST_NEW_GROUP_NAME, &link_info, H5P_DEFAULT) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve info for link '%s'\n",
+ OBJECT_COPY_SOFT_LINK_TEST_NEW_GROUP_NAME);
+ PART_ERROR(H5Ocopy_through_soft_link);
+ }
+
+ if (link_info.type != H5L_TYPE_HARD) {
+ H5_FAILED();
+ HDprintf(
+ " after group copy through soft link, group's new link type wasn't H5L_TYPE_HARD!\n");
+ PART_ERROR(H5Ocopy_through_soft_link);
+ }
+
+ /*
+ * Ensure that the new group doesn't have any attributes and only the
+ * immediate members of the copied group.
+ */
+ if ((tmp_group_id = H5Gopen2(group_id, OBJECT_COPY_SOFT_LINK_TEST_NEW_GROUP_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" failed to open group copy '%s'\n", OBJECT_COPY_SOFT_LINK_TEST_NEW_GROUP_NAME);
+ PART_ERROR(H5Ocopy_through_soft_link);
+ }
+
+ memset(&group_info, 0, sizeof(group_info));
+
+ /*
+ * Set link count to zero in case the connector doesn't support
+ * retrieval of group info.
+ */
+ group_info.nlinks = 0;
+
+ if (H5Gget_info(tmp_group_id, &group_info) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve group info\n");
+ PART_ERROR(H5Ocopy_through_soft_link);
+ }
+
+ if (group_info.nlinks != OBJECT_COPY_SOFT_LINK_TEST_NUM_NESTED_OBJS) {
+ H5_FAILED();
+ HDprintf(
+ " copied group contained %d members instead of %d members after a shallow copy!\n",
+ (int)group_info.nlinks, OBJECT_COPY_SOFT_LINK_TEST_NUM_NESTED_OBJS);
+ PART_ERROR(H5Ocopy_through_soft_link);
+ }
+
+ memset(&object_info, 0, sizeof(object_info));
+
+ /*
+ * Set attribute count to zero in case the connector doesn't
+ * support retrieval of object info.
+ */
+ object_info.num_attrs = 0;
+
+ if (H5Oget_info3(tmp_group_id, &object_info, H5O_INFO_ALL) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve object info\n");
+ PART_ERROR(H5Ocopy_through_soft_link);
+ }
+
+ if (object_info.num_attrs == 0) {
+ H5_FAILED();
+ HDprintf(" copied group didn't contain any attributes after copy operation!\n");
+ PART_ERROR(H5Ocopy_through_soft_link);
+ }
+
+ /* Check the attribute names, types, etc. */
+ i = 0;
+ if (H5Aiterate2(tmp_group_id, H5_INDEX_NAME, H5_ITER_INC, NULL,
+ object_copy_attribute_iter_callback, &i) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to iterate over copied group's attributes\n");
+ PART_ERROR(H5Ocopy_through_soft_link);
+ }
+
+ if (i != OBJECT_COPY_SOFT_LINK_TEST_NUM_ATTRS) {
+ H5_FAILED();
+ HDprintf(
+ " number of attributes on copied group (%llu) didn't match expected number (%llu)!\n",
+ (unsigned long long)i, (unsigned long long)OBJECT_COPY_SOFT_LINK_TEST_NUM_ATTRS);
+ PART_ERROR(H5Ocopy_through_soft_link);
+ }
+
+ if (H5Gclose(tmp_group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group copy\n");
+ PART_ERROR(H5Ocopy_through_soft_link);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ocopy_through_soft_link);
+
+ if (tmp_group_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(tmp_group_id);
+ }
+ H5E_END_TRY;
+ tmp_group_id = H5I_INVALID_HID;
+ }
+
+ PART_BEGIN(H5Ocopy_through_dangling_soft_link)
+ {
+ herr_t err_ret;
+
+ TESTING_2("H5Ocopy through use of a dangling soft link");
+
+ if (H5Lcreate_soft("/" OBJECT_TEST_GROUP_NAME "/" OBJECT_COPY_SOFT_LINK_TEST_SUBGROUP_NAME
+ "/nonexistent_object",
+ group_id, OBJECT_COPY_SOFT_LINK_TEST_DANGLING_LINK_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create dangling soft link '%s'\n",
+ OBJECT_COPY_SOFT_LINK_TEST_DANGLING_LINK_NAME);
+ PART_ERROR(H5Ocopy_through_dangling_soft_link);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret =
+ H5Ocopy(group_id, OBJECT_COPY_SOFT_LINK_TEST_DANGLING_LINK_NAME, group_id,
+ OBJECT_COPY_SOFT_LINK_TEST_DANGLING_LINK_NAME "2", H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" copied non-existent object through use of a dangling soft link!\n");
+ PART_ERROR(H5Ocopy_through_dangling_soft_link);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ocopy_through_dangling_soft_link);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(attr_space_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id2) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(attr_space_id);
+ H5Aclose(attr_id);
+ H5Gclose(tmp_group_id);
+ H5Gclose(group_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * Tests for copying groups that contain soft links with
+ * H5Ocopy. Also tested is the H5O_COPY_EXPAND_SOFT_LINK_FLAG
+ * flag.
+ */
+static int
+test_object_copy_group_with_soft_links(void)
+{
+ H5G_info_t group_info;
+ htri_t object_link_exists;
+ size_t i;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t group_id2 = H5I_INVALID_HID;
+ hid_t tmp_group_id = H5I_INVALID_HID;
+ hid_t ocpypl_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("group copying when group contains soft links");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) || !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, object, link, or soft link aren't supported with "
+ "this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, OBJECT_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", OBJECT_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_SUBGROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ /* Create the test group object. */
+ if ((group_id2 = H5Gcreate2(group_id, OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ /* Create several groups at the root level and add soft links pointing to them inside
+ * the test group object.
+ */
+ for (i = 0; i < (size_t)OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_NUM_NESTED_OBJS; i++) {
+ char grp_name[OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_BUF_SIZE];
+ char lnk_name[OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_BUF_SIZE];
+ char lnk_target[2 * OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_BUF_SIZE];
+
+ snprintf(grp_name, OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_BUF_SIZE, "grp%d", (int)i);
+ snprintf(lnk_name, OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_BUF_SIZE, "link%d", (int)i);
+ snprintf(lnk_target, 2 * OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_BUF_SIZE,
+ "/" OBJECT_TEST_GROUP_NAME "/" OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_SUBGROUP_NAME "/%s",
+ grp_name);
+
+ if ((tmp_group_id = H5Gcreate2(group_id, grp_name, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s' under group '%s'\n", grp_name,
+ OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if (H5Lcreate_soft(lnk_target, group_id2, lnk_name, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create soft link '%s'\n", lnk_name);
+ goto error;
+ }
+
+ if (H5Gclose(tmp_group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close group '%s'\n", grp_name);
+ goto error;
+ }
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Ocopy_dont_expand_soft_links)
+ {
+ TESTING_2("H5Ocopy on group with soft links (soft links not expanded)");
+
+ if (H5Ocopy(group_id, OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_GROUP_NAME, group_id,
+ OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_NON_EXPAND_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to copy group '%s' to '%s'\n",
+ OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_GROUP_NAME,
+ OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_NON_EXPAND_GROUP_NAME);
+ PART_ERROR(H5Ocopy_dont_expand_soft_links);
+ }
+
+ if ((object_link_exists =
+ H5Lexists(group_id, OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_NON_EXPAND_GROUP_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' to copied group exists\n",
+ OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_NON_EXPAND_GROUP_NAME);
+ PART_ERROR(H5Ocopy_dont_expand_soft_links);
+ }
+
+ if (!object_link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' to copied group didn't exist!\n",
+ OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_NON_EXPAND_GROUP_NAME);
+ PART_ERROR(H5Ocopy_dont_expand_soft_links);
+ }
+
+ /* Ensure that the number of links is the same */
+ if ((tmp_group_id =
+ H5Gopen2(group_id, OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_NON_EXPAND_GROUP_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to open group copy '%s'\n",
+ OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_NON_EXPAND_GROUP_NAME);
+ PART_ERROR(H5Ocopy_dont_expand_soft_links);
+ }
+
+ memset(&group_info, 0, sizeof(group_info));
+
+ /*
+ * Set link count to zero in case the connector doesn't support
+ * retrieval of group info.
+ */
+ group_info.nlinks = 0;
+
+ if (H5Gget_info(tmp_group_id, &group_info) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve group info\n");
+ PART_ERROR(H5Ocopy_dont_expand_soft_links);
+ }
+
+ if (group_info.nlinks != OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_NUM_NESTED_OBJS) {
+ H5_FAILED();
+ HDprintf(" copied group contained %d members instead of %d members after copy!\n",
+ (int)group_info.nlinks, OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_NUM_NESTED_OBJS);
+ PART_ERROR(H5Ocopy_dont_expand_soft_links);
+ }
+
+ /*
+ * Iterate over the links in the copied group and ensure that they're all
+ * still soft links with their original values.
+ */
+ i = 0;
+ if (H5Literate2(tmp_group_id, H5_INDEX_NAME, H5_ITER_INC, NULL,
+ object_copy_soft_link_non_expand_callback, &i) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to iterate over links in group '%s'\n",
+ OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_NON_EXPAND_GROUP_NAME);
+ PART_ERROR(H5Ocopy_dont_expand_soft_links);
+ }
+
+ if (i != OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_NUM_NESTED_OBJS) {
+ H5_FAILED();
+ HDprintf(" number of links in copied group (%llu) didn't match expected number (%llu)!\n",
+ (unsigned long long)i,
+ (unsigned long long)OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_NUM_NESTED_OBJS);
+ PART_ERROR(H5Ocopy_dont_expand_soft_links);
+ }
+
+ if (H5Gclose(tmp_group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group copy\n");
+ PART_ERROR(H5Ocopy_dont_expand_soft_links);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ocopy_dont_expand_soft_links);
+
+ if (tmp_group_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(tmp_group_id);
+ }
+ H5E_END_TRY;
+ tmp_group_id = H5I_INVALID_HID;
+ }
+
+ PART_BEGIN(H5Ocopy_expand_soft_links)
+ {
+ TESTING_2("H5Ocopy on group with soft links (soft links expanded)");
+
+ if ((ocpypl_id = H5Pcreate(H5P_OBJECT_COPY)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create OCopyPL\n");
+ PART_ERROR(H5Ocopy_expand_soft_links);
+ }
+
+ if (H5Pset_copy_object(ocpypl_id, H5O_COPY_EXPAND_SOFT_LINK_FLAG) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set object copying options\n");
+ PART_ERROR(H5Ocopy_expand_soft_links);
+ }
+
+ if (H5Ocopy(group_id, OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_GROUP_NAME, group_id,
+ OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_EXPAND_GROUP_NAME, ocpypl_id,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to copy group '%s' to '%s'\n",
+ OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_GROUP_NAME,
+ OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_EXPAND_GROUP_NAME);
+ PART_ERROR(H5Ocopy_expand_soft_links);
+ }
+
+ if ((object_link_exists = H5Lexists(
+ group_id, OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_EXPAND_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' to copied group exists\n",
+ OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_EXPAND_GROUP_NAME);
+ PART_ERROR(H5Ocopy_expand_soft_links);
+ }
+
+ if (!object_link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' to copied group didn't exist!\n",
+ OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_EXPAND_GROUP_NAME);
+ PART_ERROR(H5Ocopy_expand_soft_links);
+ }
+
+ /* Ensure that the number of links is the same */
+ if ((tmp_group_id = H5Gopen2(group_id, OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_EXPAND_GROUP_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to open group copy '%s'\n",
+ OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_EXPAND_GROUP_NAME);
+ PART_ERROR(H5Ocopy_expand_soft_links);
+ }
+
+ memset(&group_info, 0, sizeof(group_info));
+
+ /*
+ * Set link count to zero in case the connector doesn't support
+ * retrieval of group info.
+ */
+ group_info.nlinks = 0;
+
+ if (H5Gget_info(tmp_group_id, &group_info) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve group info\n");
+ PART_ERROR(H5Ocopy_expand_soft_links);
+ }
+
+ if (group_info.nlinks != OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_NUM_NESTED_OBJS) {
+ H5_FAILED();
+ HDprintf(" copied group contained %d members instead of %d members after copy!\n",
+ (int)group_info.nlinks, OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_NUM_NESTED_OBJS);
+ PART_ERROR(H5Ocopy_expand_soft_links);
+ }
+
+ /*
+ * Iterate over the links in the copied group and ensure that they've all
+ * been expanded into hard links corresponding to the top-level groups
+ * created.
+ */
+ i = 0;
+ if (H5Literate2(tmp_group_id, H5_INDEX_NAME, H5_ITER_INC, NULL,
+ object_copy_soft_link_expand_callback, &i) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to iterate over links in group '%s'\n",
+ OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_EXPAND_GROUP_NAME);
+ PART_ERROR(H5Ocopy_expand_soft_links);
+ }
+
+ if (i != OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_NUM_NESTED_OBJS) {
+ H5_FAILED();
+ HDprintf(" number of links in copied group (%llu) didn't match expected number (%llu)!\n",
+ (unsigned long long)i,
+ (unsigned long long)OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_NUM_NESTED_OBJS);
+ PART_ERROR(H5Ocopy_expand_soft_links);
+ }
+
+ if (H5Pclose(ocpypl_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close OCopyPL\n");
+ PART_ERROR(H5Ocopy_expand_soft_links);
+ }
+
+ if (H5Gclose(tmp_group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group copy\n");
+ PART_ERROR(H5Ocopy_expand_soft_links);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ocopy_expand_soft_links);
+
+ if (ocpypl_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(ocpypl_id);
+ }
+ H5E_END_TRY;
+ ocpypl_id = H5I_INVALID_HID;
+ }
+ if (tmp_group_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(tmp_group_id);
+ }
+ H5E_END_TRY;
+ tmp_group_id = H5I_INVALID_HID;
+ }
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Gclose(group_id2) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(ocpypl_id);
+ H5Gclose(tmp_group_id);
+ H5Gclose(group_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * Tests for copying objects between two different files using
+ * H5Ocopy.
+ */
+static int
+test_object_copy_between_files(void)
+{
+ H5O_info2_t object_info;
+ H5G_info_t group_info;
+ htri_t object_link_exists;
+ size_t i;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t file_id2 = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t group_id2 = H5I_INVALID_HID;
+ hid_t tmp_group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t tmp_dset_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t dtype_id = H5I_INVALID_HID;
+ hid_t tmp_dtype_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t attr_space_id = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ hid_t ocpypl_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("object copying between files");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, object, link, dataset, attribute, stored "
+ "datatype, or iterate aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ /*
+ * Create the second file for the between file copying tests.
+ */
+ if ((file_id2 = H5Fcreate(OBJECT_COPY_BETWEEN_FILES_TEST_FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, OBJECT_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", OBJECT_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, OBJECT_COPY_BETWEEN_FILES_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ OBJECT_COPY_BETWEEN_FILES_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((space_id = generate_random_dataspace(OBJECT_COPY_BETWEEN_FILES_TEST_SPACE_RANK, NULL, NULL, FALSE)) <
+ 0)
+ TEST_ERROR;
+ if ((attr_space_id =
+ generate_random_dataspace(OBJECT_COPY_BETWEEN_FILES_TEST_SPACE_RANK, NULL, NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ /* Create the test group object, along with its nested members and the attributes attached to it. */
+ if ((group_id2 = H5Gcreate2(group_id, OBJECT_COPY_BETWEEN_FILES_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", OBJECT_COPY_BETWEEN_FILES_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < (size_t)OBJECT_COPY_BETWEEN_FILES_TEST_NUM_NESTED_OBJS; i++) {
+ char grp_name[OBJECT_COPY_BETWEEN_FILES_TEST_BUF_SIZE];
+
+ snprintf(grp_name, OBJECT_COPY_BETWEEN_FILES_TEST_BUF_SIZE, "grp%d", (int)i);
+
+ if ((tmp_group_id = H5Gcreate2(group_id2, grp_name, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s' under group '%s'\n", grp_name,
+ OBJECT_COPY_BETWEEN_FILES_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ /* Create a further nested group under the last group added */
+ if (i == (OBJECT_COPY_BETWEEN_FILES_TEST_NUM_NESTED_OBJS - 1)) {
+ if (H5Gclose(H5Gcreate2(tmp_group_id, OBJECT_COPY_BETWEEN_FILES_TEST_DEEP_NESTED_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create nested group '%s' under group '%s'\n",
+ OBJECT_COPY_BETWEEN_FILES_TEST_DEEP_NESTED_GROUP_NAME, grp_name);
+ goto error;
+ }
+ }
+
+ if (H5Gclose(tmp_group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close group '%s'\n", grp_name);
+ goto error;
+ }
+ }
+
+ for (i = 0; i < (size_t)OBJECT_COPY_BETWEEN_FILES_TEST_NUM_ATTRS; i++) {
+ char attr_name[OBJECT_COPY_BETWEEN_FILES_TEST_BUF_SIZE];
+
+ snprintf(attr_name, OBJECT_COPY_BETWEEN_FILES_TEST_BUF_SIZE, "attr%d", (int)i);
+
+ if ((attr_id = H5Acreate2(group_id2, attr_name, H5T_NATIVE_INT, attr_space_id, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s' on group '%s'\n", attr_name,
+ OBJECT_COPY_BETWEEN_FILES_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close attribute '%s'\n", attr_name);
+ goto error;
+ }
+ }
+
+ /* Create the test dataset object, along with the attributes attached to it. */
+ if ((dset_id = H5Dcreate2(group_id, OBJECT_COPY_BETWEEN_FILES_TEST_DSET_NAME, dset_dtype, space_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", OBJECT_COPY_BETWEEN_FILES_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < (size_t)OBJECT_COPY_BETWEEN_FILES_TEST_NUM_ATTRS; i++) {
+ char attr_name[OBJECT_COPY_BETWEEN_FILES_TEST_BUF_SIZE];
+
+ snprintf(attr_name, OBJECT_COPY_BETWEEN_FILES_TEST_BUF_SIZE, "attr%d", (int)i);
+
+ if ((attr_id = H5Acreate2(dset_id, attr_name, H5T_NATIVE_INT, attr_space_id, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s' on dataset '%s'\n", attr_name,
+ OBJECT_COPY_BETWEEN_FILES_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close attribute '%s'\n", attr_name);
+ goto error;
+ }
+ }
+
+ /* Create the test committed datatype object, along with the attributes attached to it. */
+ if ((dtype_id = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create datatype\n");
+ goto error;
+ }
+
+ if (H5Tcommit2(group_id, OBJECT_COPY_BETWEEN_FILES_TEST_DTYPE_NAME, dtype_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't commit datatype '%s'\n", OBJECT_COPY_BETWEEN_FILES_TEST_DTYPE_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < (size_t)OBJECT_COPY_BETWEEN_FILES_TEST_NUM_ATTRS; i++) {
+ char attr_name[OBJECT_COPY_BETWEEN_FILES_TEST_BUF_SIZE];
+
+ snprintf(attr_name, OBJECT_COPY_BETWEEN_FILES_TEST_BUF_SIZE, "attr%d", (int)i);
+
+ if ((attr_id = H5Acreate2(dtype_id, attr_name, H5T_NATIVE_INT, attr_space_id, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create attribute '%s' on committed datatype '%s'\n", attr_name,
+ OBJECT_COPY_BETWEEN_FILES_TEST_DTYPE_NAME);
+ goto error;
+ }
+
+ if (H5Aclose(attr_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close attribute '%s'\n", attr_name);
+ goto error;
+ }
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Ocopy_group_between_files)
+ {
+ TESTING_2("H5Ocopy on group between different files");
+
+ if (H5Ocopy(group_id, OBJECT_COPY_BETWEEN_FILES_TEST_GROUP_NAME, file_id2,
+ OBJECT_COPY_BETWEEN_FILES_TEST_NEW_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to copy group '%s' to second file '%s'\n",
+ OBJECT_COPY_BETWEEN_FILES_TEST_GROUP_NAME,
+ OBJECT_COPY_BETWEEN_FILES_TEST_NEW_GROUP_NAME);
+ PART_ERROR(H5Ocopy_group_between_files);
+ }
+
+ if ((object_link_exists =
+ H5Lexists(file_id2, OBJECT_COPY_BETWEEN_FILES_TEST_NEW_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' to copied group exists\n",
+ OBJECT_COPY_BETWEEN_FILES_TEST_NEW_GROUP_NAME);
+ PART_ERROR(H5Ocopy_group_between_files);
+ }
+
+ if (!object_link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' to copied group in second file '%s' didn't exist!\n",
+ OBJECT_COPY_BETWEEN_FILES_TEST_NEW_GROUP_NAME,
+ OBJECT_COPY_BETWEEN_FILES_TEST_FILE_NAME);
+ PART_ERROR(H5Ocopy_group_between_files);
+ }
+
+ /* Ensure that the new group has all the members of the copied group, and all its attributes */
+ if ((tmp_group_id =
+ H5Gopen2(file_id2, OBJECT_COPY_BETWEEN_FILES_TEST_NEW_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to open group copy '%s'\n",
+ OBJECT_COPY_BETWEEN_FILES_TEST_NEW_GROUP_NAME);
+ PART_ERROR(H5Ocopy_group_between_files);
+ }
+
+ memset(&group_info, 0, sizeof(group_info));
+
+ /*
+ * Set link count to zero in case the connector doesn't support
+ * retrieval of group info.
+ */
+ group_info.nlinks = 0;
+
+ if (H5Gget_info(tmp_group_id, &group_info) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve group info\n");
+ PART_ERROR(H5Ocopy_group_between_files);
+ }
+
+ if (group_info.nlinks != OBJECT_COPY_BETWEEN_FILES_TEST_NUM_NESTED_OBJS) {
+ H5_FAILED();
+ HDprintf(" copied group contained %d members instead of %d members after a deep copy!\n",
+ (int)group_info.nlinks, OBJECT_COPY_BETWEEN_FILES_TEST_NUM_NESTED_OBJS);
+ PART_ERROR(H5Ocopy_group_between_files);
+ }
+
+ memset(&object_info, 0, sizeof(object_info));
+
+ /*
+ * Set attribute count to zero in case the connector doesn't
+ * support retrieval of object info.
+ */
+ object_info.num_attrs = 0;
+
+ if (H5Oget_info3(tmp_group_id, &object_info, H5O_INFO_ALL) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve object info\n");
+ PART_ERROR(H5Ocopy_group_between_files);
+ }
+
+ if (object_info.num_attrs == 0) {
+ H5_FAILED();
+ HDprintf(" copied group didn't contain any attributes after copy operation!\n");
+ PART_ERROR(H5Ocopy_group_between_files);
+ }
+
+ /* Check the attribute names, types, etc. */
+ i = 0;
+ if (H5Aiterate2(tmp_group_id, H5_INDEX_NAME, H5_ITER_INC, NULL,
+ object_copy_attribute_iter_callback, &i) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to iterate over copied group's attributes\n");
+ PART_ERROR(H5Ocopy_group_between_files);
+ }
+
+ if (i != OBJECT_COPY_BETWEEN_FILES_TEST_NUM_ATTRS) {
+ H5_FAILED();
+ HDprintf(
+ " number of attributes on copied group (%llu) didn't match expected number (%llu)!\n",
+ (unsigned long long)i, (unsigned long long)OBJECT_COPY_BETWEEN_FILES_TEST_NUM_ATTRS);
+ PART_ERROR(H5Ocopy_group_between_files);
+ }
+
+ if (H5Gclose(tmp_group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group copy\n");
+ PART_ERROR(H5Ocopy_group_between_files);
+ }
+
+ /*
+ * Ensure that the last immediate member of the copied group
+ * contains its single member after the deep copy.
+ */
+ {
+ char grp_name[OBJECT_COPY_BETWEEN_FILES_TEST_BUF_SIZE];
+
+ snprintf(grp_name, OBJECT_COPY_BETWEEN_FILES_TEST_BUF_SIZE,
+ "/" OBJECT_COPY_BETWEEN_FILES_TEST_NEW_GROUP_NAME "/grp%d",
+ OBJECT_COPY_BETWEEN_FILES_TEST_NUM_NESTED_OBJS - 1);
+
+ if ((tmp_group_id = H5Gopen2(file_id2, grp_name, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to open group '%s'\n", grp_name);
+ PART_ERROR(H5Ocopy_group_between_files);
+ }
+
+ memset(&group_info, 0, sizeof(group_info));
+
+ /*
+ * Set link count to zero in case the connector doesn't support
+ * retrieval of group info.
+ */
+ group_info.nlinks = 0;
+
+ if (H5Gget_info(tmp_group_id, &group_info) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve group info\n");
+ PART_ERROR(H5Ocopy_group_between_files);
+ }
+
+ if (group_info.nlinks != 1) {
+ H5_FAILED();
+ HDprintf(" copied group's immediate members didn't contain nested members after a "
+ "deep copy!\n");
+ PART_ERROR(H5Ocopy_group_between_files);
+ }
+
+ if (H5Gclose(tmp_group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close group '%s'\n", grp_name);
+ PART_ERROR(H5Ocopy_group_between_files);
+ }
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ocopy_group_between_files);
+
+ if (tmp_group_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(tmp_group_id);
+ }
+ H5E_END_TRY;
+ tmp_group_id = H5I_INVALID_HID;
+ }
+
+ PART_BEGIN(H5Ocopy_dset_between_files)
+ {
+ TESTING_2("H5Ocopy on dataset between different files");
+
+ if (H5Ocopy(group_id, OBJECT_COPY_BETWEEN_FILES_TEST_DSET_NAME, file_id2,
+ OBJECT_COPY_BETWEEN_FILES_TEST_NEW_DSET_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to copy dataset '%s' to second file '%s'\n",
+ OBJECT_COPY_BETWEEN_FILES_TEST_DSET_NAME,
+ OBJECT_COPY_BETWEEN_FILES_TEST_NEW_DSET_NAME);
+ PART_ERROR(H5Ocopy_dset_between_files);
+ }
+
+ if ((object_link_exists =
+ H5Lexists(file_id2, OBJECT_COPY_BETWEEN_FILES_TEST_NEW_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' to copied dataset exists\n",
+ OBJECT_COPY_BETWEEN_FILES_TEST_NEW_DSET_NAME);
+ PART_ERROR(H5Ocopy_dset_between_files);
+ }
+
+ if (!object_link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' to copied dataset in second file '%s' didn't exist!\n",
+ OBJECT_COPY_BETWEEN_FILES_TEST_NEW_DSET_NAME,
+ OBJECT_COPY_BETWEEN_FILES_TEST_FILE_NAME);
+ PART_ERROR(H5Ocopy_dset_between_files);
+ }
+
+ /* Ensure that the new dataset has all the attributes of the copied dataset */
+ if ((tmp_dset_id =
+ H5Dopen2(file_id2, OBJECT_COPY_BETWEEN_FILES_TEST_NEW_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to open dataset copy '%s'\n",
+ OBJECT_COPY_BETWEEN_FILES_TEST_NEW_DSET_NAME);
+ PART_ERROR(H5Ocopy_dset_between_files);
+ }
+
+ memset(&object_info, 0, sizeof(object_info));
+
+ /*
+ * Set attribute count to zero in case the connector doesn't
+ * support retrieval of object info.
+ */
+ object_info.num_attrs = 0;
+
+ if (H5Oget_info3(tmp_dset_id, &object_info, H5O_INFO_ALL) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve object info\n");
+ PART_ERROR(H5Ocopy_dset_between_files);
+ }
+
+ if (object_info.num_attrs == 0) {
+ H5_FAILED();
+ HDprintf(" copied dataset didn't contain any attributes after copy operation!\n");
+ PART_ERROR(H5Ocopy_dset_between_files);
+ }
+
+ /* Check the attribute names, types, etc. */
+ i = 0;
+ if (H5Aiterate2(tmp_dset_id, H5_INDEX_NAME, H5_ITER_INC, NULL,
+ object_copy_attribute_iter_callback, &i) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to iterate over copied dataset's attributes\n");
+ PART_ERROR(H5Ocopy_dset_between_files);
+ }
+
+ if (i != OBJECT_COPY_BETWEEN_FILES_TEST_NUM_ATTRS) {
+ H5_FAILED();
+ HDprintf(" number of attributes on copied dataset (%llu) didn't match expected number "
+ "(%llu)!\n",
+ (unsigned long long)i, (unsigned long long)OBJECT_COPY_BETWEEN_FILES_TEST_NUM_ATTRS);
+ PART_ERROR(H5Ocopy_dset_between_files);
+ }
+
+ if (H5Dclose(tmp_dset_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close dataset copy\n");
+ PART_ERROR(H5Ocopy_dset_between_files);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ocopy_dset_between_files);
+
+ if (tmp_dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(tmp_dset_id);
+ }
+ H5E_END_TRY;
+ tmp_dset_id = H5I_INVALID_HID;
+ }
+
+ PART_BEGIN(H5Ocopy_dtype_between_files)
+ {
+ TESTING_2("H5Ocopy on committed datatype between different files");
+
+ if (H5Ocopy(group_id, OBJECT_COPY_BETWEEN_FILES_TEST_DTYPE_NAME, file_id2,
+ OBJECT_COPY_BETWEEN_FILES_TEST_NEW_DTYPE_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to copy committed datatype '%s' to second file '%s'\n",
+ OBJECT_COPY_BETWEEN_FILES_TEST_DTYPE_NAME,
+ OBJECT_COPY_BETWEEN_FILES_TEST_NEW_DTYPE_NAME);
+ PART_ERROR(H5Ocopy_dtype_between_files);
+ }
+
+ if ((object_link_exists =
+ H5Lexists(file_id2, OBJECT_COPY_BETWEEN_FILES_TEST_NEW_DTYPE_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if link '%s' to copied committed datatype exists\n",
+ OBJECT_COPY_BETWEEN_FILES_TEST_NEW_DTYPE_NAME);
+ PART_ERROR(H5Ocopy_dtype_between_files);
+ }
+
+ if (!object_link_exists) {
+ H5_FAILED();
+ HDprintf(" link '%s' to copied committed datatype in second file '%s' didn't exist!\n",
+ OBJECT_COPY_BETWEEN_FILES_TEST_NEW_DTYPE_NAME,
+ OBJECT_COPY_BETWEEN_FILES_TEST_FILE_NAME);
+ PART_ERROR(H5Ocopy_dtype_between_files);
+ }
+
+ /* Ensure that the new committed datatype has all the attributes of the copied committed datatype
+ */
+ if ((tmp_dtype_id =
+ H5Topen2(file_id2, OBJECT_COPY_BETWEEN_FILES_TEST_NEW_DTYPE_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to open committed datatype copy '%s'\n",
+ OBJECT_COPY_BETWEEN_FILES_TEST_NEW_DTYPE_NAME);
+ PART_ERROR(H5Ocopy_dtype_between_files);
+ }
+
+ memset(&object_info, 0, sizeof(object_info));
+
+ /*
+ * Set attribute count to zero in case the connector doesn't
+ * support retrieval of object info.
+ */
+ object_info.num_attrs = 0;
+
+ if (H5Oget_info3(tmp_dtype_id, &object_info, H5O_INFO_ALL) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve object info\n");
+ PART_ERROR(H5Ocopy_dtype_between_files);
+ }
+
+ if (object_info.num_attrs == 0) {
+ H5_FAILED();
+ HDprintf(
+ " copied committed datatype didn't contain any attributes after copy operation!\n");
+ PART_ERROR(H5Ocopy_dtype_between_files);
+ }
+
+ /* Check the attribute names, types, etc. */
+ i = 0;
+ if (H5Aiterate2(tmp_dtype_id, H5_INDEX_NAME, H5_ITER_INC, NULL,
+ object_copy_attribute_iter_callback, &i) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to iterate over copied datatype's attributes\n");
+ PART_ERROR(H5Ocopy_dtype_between_files);
+ }
+
+ if (i != OBJECT_COPY_BETWEEN_FILES_TEST_NUM_ATTRS) {
+ H5_FAILED();
+ HDprintf(" number of attributes on copied datatype (%llu) didn't match expected number "
+ "(%llu)!\n",
+ (unsigned long long)i, (unsigned long long)OBJECT_COPY_BETWEEN_FILES_TEST_NUM_ATTRS);
+ PART_ERROR(H5Ocopy_dtype_between_files);
+ }
+
+ if (H5Tclose(tmp_dtype_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close committed datatype copy\n");
+ PART_ERROR(H5Ocopy_dtype_between_files);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ocopy_dtype_between_files);
+
+ if (tmp_dtype_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(tmp_dtype_id);
+ }
+ H5E_END_TRY;
+ tmp_dtype_id = H5I_INVALID_HID;
+ }
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(attr_space_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dtype_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id2) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id2) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(ocpypl_id);
+ H5Sclose(attr_space_id);
+ H5Sclose(space_id);
+ H5Aclose(attr_id);
+ H5Tclose(dset_dtype);
+ H5Tclose(tmp_dtype_id);
+ H5Tclose(dtype_id);
+ H5Dclose(tmp_dset_id);
+ H5Dclose(dset_id);
+ H5Gclose(tmp_group_id);
+ H5Gclose(group_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id2);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that H5Ocopy fails when it
+ * is passed invalid parameters.
+ */
+static int
+test_object_copy_invalid_params(void)
+{
+ herr_t err_ret = -1;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t group_id2 = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("object copying with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_MORE)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, or object aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, OBJECT_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", OBJECT_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, OBJECT_COPY_INVALID_PARAMS_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n",
+ OBJECT_COPY_INVALID_PARAMS_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id2 = H5Gcreate2(group_id, OBJECT_COPY_INVALID_PARAMS_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", OBJECT_COPY_INVALID_PARAMS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Ocopy_invalid_src_loc_id)
+ {
+ TESTING_2("H5Ocopy with an invalid source location ID");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ocopy(H5I_INVALID_HID, OBJECT_COPY_INVALID_PARAMS_TEST_GROUP_NAME, group_id,
+ OBJECT_COPY_INVALID_PARAMS_TEST_GROUP_NAME2, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ocopy succeeded with an invalid source location ID!\n");
+ PART_ERROR(H5Ocopy_invalid_src_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ocopy_invalid_src_loc_id);
+
+ PART_BEGIN(H5Ocopy_invalid_src_obj_name)
+ {
+ TESTING_2("H5Ocopy with an invalid source object name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ocopy(group_id, NULL, group_id, OBJECT_COPY_INVALID_PARAMS_TEST_GROUP_NAME2,
+ H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ocopy succeeded with a NULL source object name!\n");
+ PART_ERROR(H5Ocopy_invalid_src_obj_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ocopy(group_id, "", group_id, OBJECT_COPY_INVALID_PARAMS_TEST_GROUP_NAME2,
+ H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ocopy succeeded with an invalid source object name of ''!\n");
+ PART_ERROR(H5Ocopy_invalid_src_obj_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ocopy_invalid_src_obj_name);
+
+ PART_BEGIN(H5Ocopy_invalid_dst_loc_id)
+ {
+ TESTING_2("H5Ocopy with an invalid destination location ID");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ocopy(group_id, OBJECT_COPY_INVALID_PARAMS_TEST_GROUP_NAME, H5I_INVALID_HID,
+ OBJECT_COPY_INVALID_PARAMS_TEST_GROUP_NAME2, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ocopy succeeded with an invalid destination location ID!\n");
+ PART_ERROR(H5Ocopy_invalid_dst_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ocopy_invalid_dst_loc_id);
+
+ PART_BEGIN(H5Ocopy_invalid_dst_obj_name)
+ {
+ TESTING_2("H5Ocopy with an invalid destination object name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ocopy(group_id, OBJECT_COPY_INVALID_PARAMS_TEST_GROUP_NAME, group_id, NULL,
+ H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ocopy succeeded with a NULL destination object name!\n");
+ PART_ERROR(H5Ocopy_invalid_dst_obj_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ocopy(group_id, OBJECT_COPY_INVALID_PARAMS_TEST_GROUP_NAME, group_id, "",
+ H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ocopy succeeded with an invalid destination object name of ''!\n");
+ PART_ERROR(H5Ocopy_invalid_dst_obj_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ocopy_invalid_dst_obj_name);
+
+ PART_BEGIN(H5Ocopy_invalid_ocpypl)
+ {
+ TESTING_2("H5Ocopy with an invalid OcpyPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ocopy(group_id, OBJECT_COPY_INVALID_PARAMS_TEST_GROUP_NAME, group_id,
+ OBJECT_COPY_INVALID_PARAMS_TEST_GROUP_NAME2, H5I_INVALID_HID, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ocopy succeeded with an invalid OcpyPL!\n");
+ PART_ERROR(H5Ocopy_invalid_ocpypl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ocopy_invalid_ocpypl);
+
+ PART_BEGIN(H5Ocopy_invalid_lcpl)
+ {
+ TESTING_2("H5Ocopy with an invalid LCPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ocopy(group_id, OBJECT_COPY_INVALID_PARAMS_TEST_GROUP_NAME, group_id,
+ OBJECT_COPY_INVALID_PARAMS_TEST_GROUP_NAME2, H5P_DEFAULT, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ocopy succeeded with an invalid LCPL!\n");
+ PART_ERROR(H5Ocopy_invalid_lcpl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ocopy_invalid_lcpl);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Gclose(group_id2) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test for H5Oset_comment(_by_name)/H5Oget_comment(_by_name).
+ */
+static int
+test_object_comments(void)
+{
+ TESTING("object comments");
+
+ SKIPPED();
+
+ return 0;
+}
+
+/*
+ * A test to check that H5Oset_comment(_by_name)/H5Oget_comment(_by_name)
+ * fail when passed invalid parameters.
+ */
+static int
+test_object_comments_invalid_params(void)
+{
+ TESTING("object comment ");
+
+ SKIPPED();
+
+ return 0;
+}
+
+/*
+ * A test for H5Ovisit(_by_name).
+ *
+ * XXX: Should have test for checking nested object's names/paths.
+ */
+static int
+test_object_visit(void)
+{
+ size_t i;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t group_id2 = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+ hid_t type_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("object visiting");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, object, dataset, attribute, stored datatype, "
+ "iterate, or creation order aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, OBJECT_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", OBJECT_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create a GCPL\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't enable link creation order tracking and indexing on GCPL\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, OBJECT_VISIT_TEST_SUBGROUP_NAME, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", OBJECT_VISIT_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = generate_random_dataspace(OBJECT_VISIT_TEST_SPACE_RANK, NULL, NULL, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((type_id = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create datatype '%s'\n", OBJECT_VISIT_TEST_TYPE_NAME);
+ goto error;
+ }
+
+ if ((group_id2 = H5Gcreate2(group_id, OBJECT_VISIT_TEST_GROUP_NAME, H5P_DEFAULT, gcpl_id, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", OBJECT_VISIT_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((dset_id = H5Dcreate2(group_id, OBJECT_VISIT_TEST_DSET_NAME, dset_dtype, fspace_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", OBJECT_VISIT_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if (H5Tcommit2(group_id, OBJECT_VISIT_TEST_TYPE_NAME, type_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't commit datatype '%s'\n", OBJECT_VISIT_TEST_TYPE_NAME);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ /*
+ * NOTE: A counter is passed to the iteration callback to try to match up
+ * the expected objects with a given step throughout all of the following
+ * iterations. This is to try and check that the objects are indeed being
+ * returned in the correct order.
+ */
+
+ PART_BEGIN(H5Ovisit_obj_name_increasing)
+ {
+ TESTING_2("H5Ovisit by object name in increasing order");
+
+ i = 0;
+
+ if (H5Ovisit3(group_id, H5_INDEX_NAME, H5_ITER_INC, object_visit_callback, &i, H5O_INFO_ALL) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit by object name in increasing order failed\n");
+ PART_ERROR(H5Ovisit_obj_name_increasing);
+ }
+
+ if (i != OBJECT_VISIT_TEST_NUM_OBJS_VISITED) {
+ H5_FAILED();
+ HDprintf(" some objects were not visited!\n");
+ PART_ERROR(H5Ovisit_obj_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ovisit_obj_name_increasing);
+
+ PART_BEGIN(H5Ovisit_obj_name_decreasing)
+ {
+ TESTING_2("H5Ovisit by object name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ i = OBJECT_VISIT_TEST_NUM_OBJS_VISITED;
+
+ if (H5Ovisit3(group_id, H5_INDEX_NAME, H5_ITER_DEC, object_visit_callback, &i, H5O_INFO_ALL) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit by object name in decreasing order failed\n");
+ PART_ERROR(H5Ovisit_obj_name_decreasing);
+ }
+
+ if (i != 2 * OBJECT_VISIT_TEST_NUM_OBJS_VISITED) {
+ H5_FAILED();
+ HDprintf(" some objects were not visited!\n");
+ PART_ERROR(H5Ovisit_obj_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Ovisit_obj_name_decreasing);
+#endif
+ }
+ PART_END(H5Ovisit_obj_name_decreasing);
+
+ PART_BEGIN(H5Ovisit_create_order_increasing)
+ {
+ TESTING_2("H5Ovisit by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 2 * OBJECT_VISIT_TEST_NUM_OBJS_VISITED;
+
+ if (H5Ovisit3(group_id, H5_INDEX_CRT_ORDER, H5_ITER_INC, object_visit_callback, &i,
+ H5O_INFO_ALL) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit by creation order in increasing order failed\n");
+ PART_ERROR(H5Ovisit_create_order_increasing);
+ }
+
+ if (i != 3 * OBJECT_VISIT_TEST_NUM_OBJS_VISITED) {
+ H5_FAILED();
+ HDprintf(" some objects were not visited!\n");
+ PART_ERROR(H5Ovisit_create_order_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ovisit_create_order_increasing);
+
+ PART_BEGIN(H5Ovisit_create_order_decreasing)
+ {
+ TESTING_2("H5Ovisit by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 3 * OBJECT_VISIT_TEST_NUM_OBJS_VISITED;
+
+ if (H5Ovisit3(group_id, H5_INDEX_CRT_ORDER, H5_ITER_DEC, object_visit_callback, &i,
+ H5O_INFO_ALL) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit by creation order in decreasing order failed\n");
+ PART_ERROR(H5Ovisit_create_order_decreasing);
+ }
+
+ if (i != 4 * OBJECT_VISIT_TEST_NUM_OBJS_VISITED) {
+ H5_FAILED();
+ HDprintf(" some objects were not visited!\n");
+ PART_ERROR(H5Ovisit_create_order_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ovisit_create_order_decreasing);
+
+ PART_BEGIN(H5Ovisit_file)
+ {
+ TESTING_2("H5Ovisit on a file ID");
+
+ /*
+ * XXX:
+ */
+
+ SKIPPED();
+ PART_EMPTY(H5Ovisit_file);
+ }
+ PART_END(H5Ovisit_file);
+
+ PART_BEGIN(H5Ovisit_dset)
+ {
+ TESTING_2("H5Ovisit on a dataset ID");
+
+ if (H5Ovisit3(dset_id, H5_INDEX_NAME, H5_ITER_INC, object_visit_dset_callback, NULL,
+ H5O_INFO_ALL) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit failed\n");
+ PART_ERROR(H5Ovisit_dset);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ovisit_dset);
+
+ PART_BEGIN(H5Ovisit_dtype)
+ {
+ TESTING_2("H5Ovisit on a committed datatype ID");
+
+ if (H5Ovisit3(type_id, H5_INDEX_NAME, H5_ITER_INC, object_visit_dtype_callback, NULL,
+ H5O_INFO_ALL) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit failed\n");
+ PART_ERROR(H5Ovisit_dtype);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ovisit_dtype);
+
+ PART_BEGIN(H5Ovisit_by_name_obj_name_increasing)
+ {
+ TESTING_2("H5Ovisit_by_name by object name in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 0;
+
+ /* First, test visiting using "." for the object name */
+ if (H5Ovisit_by_name3(group_id, ".", H5_INDEX_NAME, H5_ITER_INC, object_visit_callback, &i,
+ H5O_INFO_ALL, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit_by_name by object name in increasing order failed\n");
+ PART_ERROR(H5Ovisit_by_name_obj_name_increasing);
+ }
+
+ if (i != OBJECT_VISIT_TEST_NUM_OBJS_VISITED) {
+ H5_FAILED();
+ HDprintf(" some objects were not visited!\n");
+ PART_ERROR(H5Ovisit_by_name_obj_name_increasing);
+ }
+
+ /* Reset the special counter and repeat the test using an indirect object name. */
+ i = 0;
+
+ if (H5Ovisit_by_name3(container_group, OBJECT_VISIT_TEST_SUBGROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_INC, object_visit_callback, &i, H5O_INFO_ALL, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit_by_name by object name in increasing order failed\n");
+ PART_ERROR(H5Ovisit_by_name_obj_name_increasing);
+ }
+
+ if (i != OBJECT_VISIT_TEST_NUM_OBJS_VISITED) {
+ H5_FAILED();
+ HDprintf(" some objects were not visited!\n");
+ PART_ERROR(H5Ovisit_by_name_obj_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ovisit_by_name_obj_name_increasing);
+
+ PART_BEGIN(H5Ovisit_by_name_obj_name_decreasing)
+ {
+ TESTING_2("H5Ovisit_by_name by object name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ i = OBJECT_VISIT_TEST_NUM_OBJS_VISITED;
+
+ /* First, test visiting using "." for the object name */
+ if (H5Ovisit_by_name3(group_id, ".", H5_INDEX_NAME, H5_ITER_DEC, object_visit_callback, &i,
+ H5O_INFO_ALL, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit_by_name by object name in decreasing order failed\n");
+ PART_ERROR(H5Ovisit_by_name_obj_name_decreasing);
+ }
+
+ if (i != 2 * OBJECT_VISIT_TEST_NUM_OBJS_VISITED) {
+ H5_FAILED();
+ HDprintf(" some objects were not visited!\n");
+ PART_ERROR(H5Ovisit_by_name_obj_name_decreasing);
+ }
+
+ /* Reset the special counter and repeat the test using an indirect object name. */
+ i = OBJECT_VISIT_TEST_NUM_OBJS_VISITED;
+
+ if (H5Ovisit_by_name3(container_group, OBJECT_VISIT_TEST_SUBGROUP_NAME, H5_INDEX_NAME,
+ H5_ITER_DEC, object_visit_callback, &i, H5O_INFO_ALL, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit_by_name by object name in decreasing order failed\n");
+ PART_ERROR(H5Ovisit_by_name_obj_name_decreasing);
+ }
+
+ if (i != 2 * OBJECT_VISIT_TEST_NUM_OBJS_VISITED) {
+ H5_FAILED();
+ HDprintf(" some objects were not visited!\n");
+ PART_ERROR(H5Ovisit_by_name_obj_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Ovisit_by_name_obj_name_decreasing);
+#endif
+ }
+ PART_END(H5Ovisit_by_name_obj_name_decreasing);
+
+ PART_BEGIN(H5Ovisit_by_name_create_order_increasing)
+ {
+ TESTING_2("H5Ovisit_by_name by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 2 * OBJECT_VISIT_TEST_NUM_OBJS_VISITED;
+
+ /* First, test visiting using "." for the object name */
+ if (H5Ovisit_by_name3(group_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, object_visit_callback, &i,
+ H5O_INFO_ALL, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit_by_name by creation order in increasing order failed\n");
+ PART_ERROR(H5Ovisit_by_name_create_order_increasing);
+ }
+
+ if (i != 3 * OBJECT_VISIT_TEST_NUM_OBJS_VISITED) {
+ H5_FAILED();
+ HDprintf(" some objects were not visited!\n");
+ PART_ERROR(H5Ovisit_by_name_create_order_increasing);
+ }
+
+ /* Reset the special counter and repeat the test using an indirect object name. */
+ i = 2 * OBJECT_VISIT_TEST_NUM_OBJS_VISITED;
+
+ if (H5Ovisit_by_name3(container_group, OBJECT_VISIT_TEST_SUBGROUP_NAME, H5_INDEX_CRT_ORDER,
+ H5_ITER_INC, object_visit_callback, &i, H5O_INFO_ALL, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit_by_name by creation order in increasing order failed\n");
+ PART_ERROR(H5Ovisit_by_name_create_order_increasing);
+ }
+
+ if (i != 3 * OBJECT_VISIT_TEST_NUM_OBJS_VISITED) {
+ H5_FAILED();
+ HDprintf(" some objects were not visited!\n");
+ PART_ERROR(H5Ovisit_by_name_create_order_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ovisit_by_name_create_order_increasing);
+
+ PART_BEGIN(H5Ovisit_by_name_create_order_decreasing)
+ {
+ TESTING_2("H5Ovisit_by_name by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 3 * OBJECT_VISIT_TEST_NUM_OBJS_VISITED;
+
+ /* First, test visiting using "." for the object name */
+ if (H5Ovisit_by_name3(group_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, object_visit_callback, &i,
+ H5O_INFO_ALL, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit_by_name by creation order in decreasing order failed\n");
+ PART_ERROR(H5Ovisit_by_name_create_order_decreasing);
+ }
+
+ if (i != 4 * OBJECT_VISIT_TEST_NUM_OBJS_VISITED) {
+ H5_FAILED();
+ HDprintf(" some objects were not visited!\n");
+ PART_ERROR(H5Ovisit_by_name_create_order_decreasing);
+ }
+
+ /* Reset the special counter and repeat the test using an indirect object name. */
+ i = 3 * OBJECT_VISIT_TEST_NUM_OBJS_VISITED;
+
+ if (H5Ovisit_by_name3(container_group, OBJECT_VISIT_TEST_SUBGROUP_NAME, H5_INDEX_CRT_ORDER,
+ H5_ITER_DEC, object_visit_callback, &i, H5O_INFO_ALL, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit_by_name by creation order in decreasing order failed\n");
+ PART_ERROR(H5Ovisit_by_name_create_order_decreasing);
+ }
+
+ if (i != 4 * OBJECT_VISIT_TEST_NUM_OBJS_VISITED) {
+ H5_FAILED();
+ HDprintf(" some objects were not visited!\n");
+ PART_ERROR(H5Ovisit_by_name_create_order_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ovisit_by_name_create_order_decreasing);
+
+ PART_BEGIN(H5Ovisit_by_name_file)
+ {
+ TESTING_2("H5Ovisit_by_name on a file ID");
+
+ /*
+ * XXX:
+ */
+
+ SKIPPED();
+ PART_EMPTY(H5Ovisit_by_name_file);
+ }
+ PART_END(H5Ovisit_by_name_file);
+
+ PART_BEGIN(H5Ovisit_by_name_dset)
+ {
+ TESTING_2("H5Ovisit_by_name on a dataset ID");
+
+ if (H5Ovisit_by_name3(group_id, OBJECT_VISIT_TEST_DSET_NAME, H5_INDEX_NAME, H5_ITER_INC,
+ object_visit_dset_callback, NULL, H5O_INFO_ALL, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit_by_name failed\n");
+ PART_ERROR(H5Ovisit_by_name_dset);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ovisit_by_name_dset);
+
+ PART_BEGIN(H5Ovisit_by_name_dtype)
+ {
+ TESTING_2("H5Ovisit_by_name on a committed datatype ID");
+
+ if (H5Ovisit_by_name3(group_id, OBJECT_VISIT_TEST_TYPE_NAME, H5_INDEX_NAME, H5_ITER_INC,
+ object_visit_dtype_callback, NULL, H5O_INFO_ALL, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit_by_name failed\n");
+ PART_ERROR(H5Ovisit_by_name_dtype);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ovisit_by_name_dtype);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Tclose(type_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id2) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ H5Tclose(dset_dtype);
+ H5Tclose(type_id);
+ H5Dclose(dset_id);
+ H5Pclose(gcpl_id);
+ H5Gclose(group_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test for H5Ovisit(_by_name) on soft links. Since
+ * H5Ovisit(_by_name) ignores soft links, this test is
+ * meant to verify that behavior by placing objects and
+ * the soft links pointing to those objects in separate
+ * groups. Visiting is done only on the group containing
+ * the links to ensure that the objects in the other group
+ * do not get visited.
+ */
+static int
+test_object_visit_soft_link(void)
+{
+ size_t i;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t subgroup_id = H5I_INVALID_HID, subgroup_id2 = H5I_INVALID_HID;
+ hid_t linked_group_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("object visiting with soft links");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER) || !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, object, soft link, iterate, or creation order "
+ "aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, OBJECT_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", OBJECT_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create a GCPL\n");
+ goto error;
+ }
+
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't enable link creation order tracking and indexing on GCPL\n");
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, OBJECT_VISIT_SOFT_LINK_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", OBJECT_VISIT_SOFT_LINK_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ /* Create group to hold soft links */
+ if ((subgroup_id = H5Gcreate2(group_id, OBJECT_VISIT_SOFT_LINK_TEST_GROUP_NAME1, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", OBJECT_VISIT_SOFT_LINK_TEST_GROUP_NAME1);
+ goto error;
+ }
+
+ /* Create group to hold objects pointed to by soft links */
+ if ((subgroup_id2 = H5Gcreate2(group_id, OBJECT_VISIT_SOFT_LINK_TEST_GROUP_NAME2, H5P_DEFAULT, gcpl_id,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", OBJECT_VISIT_SOFT_LINK_TEST_GROUP_NAME2);
+ goto error;
+ }
+
+ /* Create objects under subgroup 2 */
+ if ((linked_group_id = H5Gcreate2(subgroup_id2, OBJECT_VISIT_SOFT_LINK_TEST_OBJ_NAME1, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", OBJECT_VISIT_SOFT_LINK_TEST_OBJ_NAME1);
+ goto error;
+ }
+
+ if (H5Gclose(linked_group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close group '%s'\n", OBJECT_VISIT_SOFT_LINK_TEST_OBJ_NAME1);
+ goto error;
+ }
+
+ if ((linked_group_id = H5Gcreate2(subgroup_id2, OBJECT_VISIT_SOFT_LINK_TEST_OBJ_NAME2, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", OBJECT_VISIT_SOFT_LINK_TEST_OBJ_NAME2);
+ goto error;
+ }
+
+ if (H5Gclose(linked_group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close group '%s'\n", OBJECT_VISIT_SOFT_LINK_TEST_OBJ_NAME2);
+ goto error;
+ }
+
+ if ((linked_group_id = H5Gcreate2(subgroup_id2, OBJECT_VISIT_SOFT_LINK_TEST_OBJ_NAME3, H5P_DEFAULT,
+ gcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", OBJECT_VISIT_SOFT_LINK_TEST_OBJ_NAME3);
+ goto error;
+ }
+
+ if (H5Gclose(linked_group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close group '%s'\n", OBJECT_VISIT_SOFT_LINK_TEST_OBJ_NAME3);
+ goto error;
+ }
+
+ if (H5Gclose(subgroup_id2) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close group '%s'\n", OBJECT_VISIT_SOFT_LINK_TEST_GROUP_NAME2);
+ goto error;
+ }
+
+ /* Create soft links under subgroup 1 to point to the previously-created objects */
+ if (H5Lcreate_soft("/" OBJECT_TEST_GROUP_NAME "/" OBJECT_VISIT_SOFT_LINK_TEST_SUBGROUP_NAME
+ "/" OBJECT_VISIT_SOFT_LINK_TEST_GROUP_NAME2 "/" OBJECT_VISIT_SOFT_LINK_TEST_OBJ_NAME1,
+ subgroup_id, OBJECT_VISIT_SOFT_LINK_TEST_LINK_NAME1, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", OBJECT_VISIT_SOFT_LINK_TEST_LINK_NAME1);
+ goto error;
+ }
+
+ if (H5Lcreate_soft("/" OBJECT_TEST_GROUP_NAME "/" OBJECT_VISIT_SOFT_LINK_TEST_SUBGROUP_NAME
+ "/" OBJECT_VISIT_SOFT_LINK_TEST_GROUP_NAME2 "/" OBJECT_VISIT_SOFT_LINK_TEST_OBJ_NAME2,
+ subgroup_id, OBJECT_VISIT_SOFT_LINK_TEST_LINK_NAME2, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", OBJECT_VISIT_SOFT_LINK_TEST_LINK_NAME2);
+ goto error;
+ }
+
+ if (H5Lcreate_soft("/" OBJECT_TEST_GROUP_NAME "/" OBJECT_VISIT_SOFT_LINK_TEST_SUBGROUP_NAME
+ "/" OBJECT_VISIT_SOFT_LINK_TEST_GROUP_NAME2 "/" OBJECT_VISIT_SOFT_LINK_TEST_OBJ_NAME3,
+ subgroup_id, OBJECT_VISIT_SOFT_LINK_TEST_LINK_NAME3, H5P_DEFAULT, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create soft link '%s'\n", OBJECT_VISIT_SOFT_LINK_TEST_LINK_NAME3);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ /*
+ * NOTE: A counter is passed to the iteration callback to try to match up
+ * the expected objects with a given step throughout all of the following
+ * iterations. This is to try and check that the objects are indeed being
+ * returned in the correct order.
+ */
+
+ PART_BEGIN(H5Ovisit_obj_name_increasing)
+ {
+ TESTING_2("H5Ovisit by object name in increasing order");
+
+ i = 0;
+
+ if (H5Ovisit3(subgroup_id, H5_INDEX_NAME, H5_ITER_INC, object_visit_soft_link_callback, &i,
+ H5O_INFO_ALL) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit by object name in increasing order failed\n");
+ PART_ERROR(H5Ovisit_obj_name_increasing);
+ }
+
+ if (i != OBJECT_VISIT_SOFT_LINK_TEST_NUM_OBJS_VISITED) {
+ H5_FAILED();
+ HDprintf(" some objects were not visited!\n");
+ PART_ERROR(H5Ovisit_obj_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ovisit_obj_name_increasing);
+
+ PART_BEGIN(H5Ovisit_obj_name_decreasing)
+ {
+ TESTING_2("H5Ovisit by object name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ i = OBJECT_VISIT_SOFT_LINK_TEST_NUM_OBJS_VISITED;
+
+ if (H5Ovisit3(subgroup_id, H5_INDEX_NAME, H5_ITER_DEC, object_visit_soft_link_callback, &i,
+ H5O_INFO_ALL) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit by object name in decreasing order failed\n");
+ PART_ERROR(H5Ovisit_obj_name_decreasing);
+ }
+
+ if (i != 2 * OBJECT_VISIT_SOFT_LINK_TEST_NUM_OBJS_VISITED) {
+ H5_FAILED();
+ HDprintf(" some objects were not visited!\n");
+ PART_ERROR(H5Ovisit_obj_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Ovisit_obj_name_decreasing);
+#endif
+ }
+ PART_END(H5Ovisit_obj_name_decreasing);
+
+ PART_BEGIN(H5Ovisit_create_order_increasing)
+ {
+ TESTING_2("H5Ovisit by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 2 * OBJECT_VISIT_SOFT_LINK_TEST_NUM_OBJS_VISITED;
+
+ if (H5Ovisit3(subgroup_id, H5_INDEX_CRT_ORDER, H5_ITER_INC, object_visit_soft_link_callback, &i,
+ H5O_INFO_ALL) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit by creation order in increasing order failed\n");
+ PART_ERROR(H5Ovisit_create_order_increasing);
+ }
+
+ if (i != 3 * OBJECT_VISIT_SOFT_LINK_TEST_NUM_OBJS_VISITED) {
+ H5_FAILED();
+ HDprintf(" some objects were not visited!\n");
+ PART_ERROR(H5Ovisit_create_order_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ovisit_create_order_increasing);
+
+ PART_BEGIN(H5Ovisit_create_order_decreasing)
+ {
+ TESTING_2("H5Ovisit by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 3 * OBJECT_VISIT_SOFT_LINK_TEST_NUM_OBJS_VISITED;
+
+ if (H5Ovisit3(subgroup_id, H5_INDEX_CRT_ORDER, H5_ITER_DEC, object_visit_soft_link_callback, &i,
+ H5O_INFO_ALL) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit by creation order in decreasing order failed\n");
+ PART_ERROR(H5Ovisit_create_order_decreasing);
+ }
+
+ if (i != 4 * OBJECT_VISIT_SOFT_LINK_TEST_NUM_OBJS_VISITED) {
+ H5_FAILED();
+ HDprintf(" some objects were not visited!\n");
+ PART_ERROR(H5Ovisit_create_order_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ovisit_create_order_decreasing);
+
+ PART_BEGIN(H5Ovisit_by_name_obj_name_increasing)
+ {
+ TESTING_2("H5Ovisit_by_name by object name in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 0;
+
+ /* First, test visiting using "." for the object name */
+ if (H5Ovisit_by_name3(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_INC,
+ object_visit_soft_link_callback, &i, H5O_INFO_ALL, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit_by_name by object name in increasing order failed\n");
+ PART_ERROR(H5Ovisit_by_name_obj_name_increasing);
+ }
+
+ if (i != OBJECT_VISIT_SOFT_LINK_TEST_NUM_OBJS_VISITED) {
+ H5_FAILED();
+ HDprintf(" some objects were not visited!\n");
+ PART_ERROR(H5Ovisit_by_name_obj_name_increasing);
+ }
+
+ /* Reset the special counter and repeat the test using an indirect object name. */
+ i = 0;
+
+ /* Repeat the test using an indirect object name */
+ if (H5Ovisit_by_name3(group_id, OBJECT_VISIT_SOFT_LINK_TEST_GROUP_NAME1, H5_INDEX_NAME,
+ H5_ITER_INC, object_visit_soft_link_callback, &i, H5O_INFO_ALL,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit_by_name by object name in increasing order failed\n");
+ PART_ERROR(H5Ovisit_by_name_obj_name_increasing);
+ }
+
+ if (i != OBJECT_VISIT_SOFT_LINK_TEST_NUM_OBJS_VISITED) {
+ H5_FAILED();
+ HDprintf(" some objects were not visited!\n");
+ PART_ERROR(H5Ovisit_by_name_obj_name_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ovisit_by_name_obj_name_increasing);
+
+ PART_BEGIN(H5Ovisit_by_name_obj_name_decreasing)
+ {
+ TESTING_2("H5Ovisit_by_name by object name in decreasing order");
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Reset the counter to the appropriate value for the next test */
+ i = OBJECT_VISIT_SOFT_LINK_TEST_NUM_OBJS_VISITED;
+
+ /* First, test visiting using "." for the object name */
+ if (H5Ovisit_by_name3(subgroup_id, ".", H5_INDEX_NAME, H5_ITER_DEC,
+ object_visit_soft_link_callback, &i, H5O_INFO_ALL, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit_by_name by object name in decreasing order failed\n");
+ PART_ERROR(H5Ovisit_by_name_obj_name_decreasing);
+ }
+
+ if (i != 2 * OBJECT_VISIT_SOFT_LINK_TEST_NUM_OBJS_VISITED) {
+ H5_FAILED();
+ HDprintf(" some objects were not visited!\n");
+ PART_ERROR(H5Ovisit_by_name_obj_name_decreasing);
+ }
+
+ /* Reset the special counter and repeat the test using an indirect object name. */
+ i = OBJECT_VISIT_SOFT_LINK_TEST_NUM_OBJS_VISITED;
+
+ /* Repeat the test using an indirect object name */
+ if (H5Ovisit_by_name3(group_id, OBJECT_VISIT_SOFT_LINK_TEST_GROUP_NAME1, H5_INDEX_NAME,
+ H5_ITER_DEC, object_visit_soft_link_callback, &i, H5O_INFO_ALL,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit_by_name by object name in decreasing order failed\n");
+ PART_ERROR(H5Ovisit_by_name_obj_name_decreasing);
+ }
+
+ if (i != 2 * OBJECT_VISIT_SOFT_LINK_TEST_NUM_OBJS_VISITED) {
+ H5_FAILED();
+ HDprintf(" some objects were not visited!\n");
+ PART_ERROR(H5Ovisit_by_name_obj_name_decreasing);
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ PART_EMPTY(H5Ovisit_by_name_obj_name_decreasing);
+#endif
+ }
+ PART_END(H5Ovisit_by_name_obj_name_decreasing);
+
+ PART_BEGIN(H5Ovisit_by_name_create_order_increasing)
+ {
+ TESTING_2("H5Ovisit_by_name by creation order in increasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 2 * OBJECT_VISIT_SOFT_LINK_TEST_NUM_OBJS_VISITED;
+
+ /* First, test visiting using "." for the object name */
+ if (H5Ovisit_by_name3(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC,
+ object_visit_soft_link_callback, &i, H5O_INFO_ALL, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit_by_name by creation order in increasing order failed\n");
+ PART_ERROR(H5Ovisit_by_name_create_order_increasing);
+ }
+
+ if (i != 3 * OBJECT_VISIT_SOFT_LINK_TEST_NUM_OBJS_VISITED) {
+ H5_FAILED();
+ HDprintf(" some objects were not visited!\n");
+ PART_ERROR(H5Ovisit_by_name_create_order_increasing);
+ }
+
+ /* Reset the special counter and repeat the test using an indirect object name. */
+ i = 2 * OBJECT_VISIT_SOFT_LINK_TEST_NUM_OBJS_VISITED;
+
+ /* Repeat the test using an indirect object name */
+ if (H5Ovisit_by_name3(group_id, OBJECT_VISIT_SOFT_LINK_TEST_GROUP_NAME1, H5_INDEX_CRT_ORDER,
+ H5_ITER_INC, object_visit_soft_link_callback, &i, H5O_INFO_ALL,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit_by_name by creation order in increasing order failed\n");
+ PART_ERROR(H5Ovisit_by_name_create_order_increasing);
+ }
+
+ if (i != 3 * OBJECT_VISIT_SOFT_LINK_TEST_NUM_OBJS_VISITED) {
+ H5_FAILED();
+ HDprintf(" some objects were not visited!\n");
+ PART_ERROR(H5Ovisit_by_name_create_order_increasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ovisit_by_name_create_order_increasing);
+
+ PART_BEGIN(H5Ovisit_by_name_create_order_decreasing)
+ {
+ TESTING_2("H5Ovisit_by_name by creation order in decreasing order");
+
+ /* Reset the counter to the appropriate value for the next test */
+ i = 3 * OBJECT_VISIT_SOFT_LINK_TEST_NUM_OBJS_VISITED;
+
+ /* First, test visiting using "." for the object name */
+ if (H5Ovisit_by_name3(subgroup_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC,
+ object_visit_soft_link_callback, &i, H5O_INFO_ALL, H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit_by_name by creation order in decreasing order failed\n");
+ PART_ERROR(H5Ovisit_by_name_create_order_decreasing);
+ }
+
+ if (i != 4 * OBJECT_VISIT_SOFT_LINK_TEST_NUM_OBJS_VISITED) {
+ H5_FAILED();
+ HDprintf(" some objects were not visited!\n");
+ PART_ERROR(H5Ovisit_by_name_create_order_decreasing);
+ }
+
+ /* Reset the special counter and repeat the test using an indirect object name. */
+ i = 3 * OBJECT_VISIT_SOFT_LINK_TEST_NUM_OBJS_VISITED;
+
+ /* Repeat the test using an indirect object name */
+ if (H5Ovisit_by_name3(group_id, OBJECT_VISIT_SOFT_LINK_TEST_GROUP_NAME1, H5_INDEX_CRT_ORDER,
+ H5_ITER_DEC, object_visit_soft_link_callback, &i, H5O_INFO_ALL,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit_by_name by creation order in decreasing order failed\n");
+ PART_ERROR(H5Ovisit_by_name_create_order_decreasing);
+ }
+
+ if (i != 4 * OBJECT_VISIT_SOFT_LINK_TEST_NUM_OBJS_VISITED) {
+ H5_FAILED();
+ HDprintf(" some objects were not visited!\n");
+ PART_ERROR(H5Ovisit_by_name_create_order_decreasing);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ovisit_by_name_create_order_decreasing);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(subgroup_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(gcpl_id);
+ H5Gclose(linked_group_id);
+ H5Gclose(subgroup_id);
+ H5Gclose(subgroup_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that H5Ovisit(_by_name) fails when
+ * it is passed invalid parameters.
+ */
+static int
+test_object_visit_invalid_params(void)
+{
+ herr_t err_ret = -1;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t group_id2 = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("object visiting with invalid parameters");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ITERATE)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or iterate aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, OBJECT_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", OBJECT_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, OBJECT_VISIT_INVALID_PARAMS_TEST_SUBGROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ OBJECT_VISIT_INVALID_PARAMS_TEST_SUBGROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id2 = H5Gcreate2(group_id, OBJECT_VISIT_INVALID_PARAMS_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", OBJECT_VISIT_INVALID_PARAMS_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Ovisit_invalid_obj_id)
+ {
+ TESTING_2("H5Ovisit with an invalid object ID");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ovisit3(H5I_INVALID_HID, H5_INDEX_NAME, H5_ITER_INC, object_visit_noop_callback,
+ NULL, H5O_INFO_ALL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit succeeded with an invalid object ID!\n");
+ PART_ERROR(H5Ovisit_invalid_obj_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ovisit_invalid_obj_id);
+
+ PART_BEGIN(H5Ovisit_invalid_index_type)
+ {
+ TESTING_2("H5Ovisit with an invalid index type");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ovisit3(group_id, H5_INDEX_UNKNOWN, H5_ITER_INC, object_visit_noop_callback, NULL,
+ H5O_INFO_ALL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit succeeded with invalid index type H5_INDEX_UNKNOWN!\n");
+ PART_ERROR(H5Ovisit_invalid_index_type);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ovisit3(group_id, H5_INDEX_N, H5_ITER_INC, object_visit_noop_callback, NULL,
+ H5O_INFO_ALL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit succeeded with invalid index type H5_INDEX_N!\n");
+ PART_ERROR(H5Ovisit_invalid_index_type);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ovisit_invalid_index_type);
+
+ PART_BEGIN(H5Ovisit_invalid_iter_order)
+ {
+ TESTING_2("H5Ovisit with an invalid iteration ordering");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ovisit3(group_id, H5_INDEX_NAME, H5_ITER_UNKNOWN, object_visit_noop_callback,
+ NULL, H5O_INFO_ALL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit succeeded with invalid iteration ordering H5_ITER_UNKNOWN!\n");
+ PART_ERROR(H5Ovisit_invalid_iter_order);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ovisit3(group_id, H5_INDEX_NAME, H5_ITER_N, object_visit_noop_callback, NULL,
+ H5O_INFO_ALL);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit succeeded with invalid iteration ordering H5_ITER_N!\n");
+ PART_ERROR(H5Ovisit_invalid_iter_order);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ovisit_invalid_iter_order);
+
+ PART_BEGIN(H5Ovisit_by_name_invalid_loc_id)
+ {
+ TESTING_2("H5Ovisit_by_name with an invalid location ID");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ovisit_by_name3(H5I_INVALID_HID, ".", H5_INDEX_NAME, H5_ITER_N,
+ object_visit_noop_callback, NULL, H5O_INFO_ALL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit_by_name succeeded with an invalid location ID!\n");
+ PART_ERROR(H5Ovisit_by_name_invalid_loc_id);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ovisit_by_name_invalid_loc_id);
+
+ PART_BEGIN(H5Ovisit_by_name_invalid_obj_name)
+ {
+ TESTING_2("H5Ovisit_by_name with an invalid object name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ovisit_by_name3(group_id, NULL, H5_INDEX_NAME, H5_ITER_N,
+ object_visit_noop_callback, NULL, H5O_INFO_ALL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit_by_name succeeded with a NULL object name!\n");
+ PART_ERROR(H5Ovisit_by_name_invalid_obj_name);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ovisit_by_name3(group_id, "", H5_INDEX_NAME, H5_ITER_N,
+ object_visit_noop_callback, NULL, H5O_INFO_ALL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit_by_name succeeded with an invalid object name of ''!\n");
+ PART_ERROR(H5Ovisit_by_name_invalid_obj_name);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ovisit_by_name_invalid_obj_name);
+
+ PART_BEGIN(H5Ovisit_by_name_invalid_index_type)
+ {
+ TESTING_2("H5Ovisit_by_name with an invalid index type");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ovisit_by_name3(group_id, ".", H5_INDEX_UNKNOWN, H5_ITER_N,
+ object_visit_noop_callback, NULL, H5O_INFO_ALL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit_by_name succeeded with invalid index type H5_INDEX_UNKNOWN!\n");
+ PART_ERROR(H5Ovisit_by_name_invalid_index_type);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ovisit_by_name3(group_id, ".", H5_INDEX_N, H5_ITER_N, object_visit_noop_callback,
+ NULL, H5O_INFO_ALL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit_by_name succeeded with invalid index type H5_INDEX_N!\n");
+ PART_ERROR(H5Ovisit_by_name_invalid_index_type);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ovisit_by_name_invalid_index_type);
+
+ PART_BEGIN(H5Ovisit_by_name_invalid_iter_order)
+ {
+ TESTING_2("H5Ovisit_by_name with an invalid iteration ordering");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ovisit_by_name3(group_id, ".", H5_INDEX_NAME, H5_ITER_UNKNOWN,
+ object_visit_noop_callback, NULL, H5O_INFO_ALL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit_by_name succeeded with invalid iteration ordering H5_ITER_UNKNOWN!\n");
+ PART_ERROR(H5Ovisit_by_name_invalid_iter_order);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ovisit_by_name3(group_id, ".", H5_INDEX_NAME, H5_ITER_N,
+ object_visit_noop_callback, NULL, H5O_INFO_ALL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit_by_name succeeded with invalid iteration ordering H5_ITER_N!\n");
+ PART_ERROR(H5Ovisit_by_name_invalid_iter_order);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ovisit_by_name_invalid_iter_order);
+
+ PART_BEGIN(H5Ovisit_by_name_invalid_lapl)
+ {
+ TESTING_2("H5Ovisit_by_name with an invalid LAPL");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Ovisit_by_name3(group_id, ".", H5_INDEX_NAME, H5_ITER_INC,
+ object_visit_noop_callback, NULL, H5O_INFO_ALL, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Ovisit_by_name succeeded with an invalid LAPL!\n");
+ PART_ERROR(H5Ovisit_by_name_invalid_lapl);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Ovisit_by_name_invalid_lapl);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Gclose(group_id2) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test for H5Oclose.
+ */
+static int
+test_close_object(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t group_id2 = H5I_INVALID_HID;
+ hid_t dtype_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dset_dtype = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("H5Oclose");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_STORED_DATATYPES)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, object, dataset, attribute, or stored datatype "
+ "aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, OBJECT_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", OBJECT_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, OBJECT_CLOSE_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container subgroup '%s'\n", OBJECT_CLOSE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = generate_random_dataspace(OBJECT_CLOSE_TEST_SPACE_RANK, NULL, NULL, FALSE)) < 0)
+ TEST_ERROR;
+
+ if ((dset_dtype = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Oclose_group)
+ {
+ TESTING_2("H5Oclose on a group");
+
+ if ((group_id2 = H5Gcreate2(group_id, OBJECT_CLOSE_TEST_GRP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create group '%s'\n", OBJECT_CLOSE_TEST_GRP_NAME);
+ PART_ERROR(H5Oclose_group);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id2);
+ }
+ H5E_END_TRY;
+
+ if ((group_id2 = H5Oopen(group_id, OBJECT_CLOSE_TEST_GRP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open group '%s' with H5Oopen\n", OBJECT_CLOSE_TEST_GRP_NAME);
+ PART_ERROR(H5Oclose_group);
+ }
+
+ if (H5Oclose(group_id2) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close group '%s' with H5Oclose\n", OBJECT_CLOSE_TEST_GRP_NAME);
+ PART_ERROR(H5Oclose_group);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oclose_group);
+
+ PART_BEGIN(H5Oclose_dset)
+ {
+ TESTING_2("H5Oclose on a dataset");
+
+ if ((dset_id = H5Dcreate2(group_id, OBJECT_CLOSE_TEST_DSET_NAME, dset_dtype, fspace_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", OBJECT_CLOSE_TEST_DSET_NAME);
+ PART_ERROR(H5Oclose_dset);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+
+ if ((dset_id = H5Oopen(group_id, OBJECT_CLOSE_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s' with H5Oopen\n", OBJECT_CLOSE_TEST_DSET_NAME);
+ PART_ERROR(H5Oclose_dset);
+ }
+
+ if (H5Oclose(dset_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close dataset '%s' with H5Oclose\n", OBJECT_CLOSE_TEST_DSET_NAME);
+ PART_ERROR(H5Oclose_dset);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oclose_dset);
+
+ PART_BEGIN(H5Oclose_dtype)
+ {
+ TESTING_2("H5Oclose on a committed datatype");
+
+ if ((dtype_id = generate_random_datatype(H5T_NO_CLASS, FALSE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create datatype '%s'\n", OBJECT_CLOSE_TEST_TYPE_NAME);
+ PART_ERROR(H5Oclose_dtype);
+ }
+
+ if (H5Tcommit2(group_id, OBJECT_CLOSE_TEST_TYPE_NAME, dtype_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't commit datatype '%s'\n", OBJECT_CLOSE_TEST_TYPE_NAME);
+ PART_ERROR(H5Oclose_dtype);
+ }
+
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(dtype_id);
+ }
+ H5E_END_TRY;
+
+ if ((dtype_id = H5Oopen(group_id, OBJECT_CLOSE_TEST_TYPE_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open datatype '%s' with H5Oopen\n", OBJECT_CLOSE_TEST_TYPE_NAME);
+ PART_ERROR(H5Oclose_dtype);
+ }
+
+ if (H5Oclose(dtype_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close datatype '%s' with H5Oclose\n", OBJECT_CLOSE_TEST_TYPE_NAME);
+ PART_ERROR(H5Oclose_dtype);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oclose_dtype);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(dset_dtype) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ H5Tclose(dset_dtype);
+ H5Tclose(dtype_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that H5Oclose fails when it
+ * is passed invalid parameters.
+ */
+static int
+test_close_object_invalid_params(void)
+{
+ herr_t err_ret = -1;
+ hid_t file_id = H5I_INVALID_HID;
+
+ TESTING("H5Oclose with an invalid object ID");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file or object aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Oclose(H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+
+ if (err_ret >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Oclose succeeded with an invalid object ID!\n");
+ goto error;
+ }
+
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that various objects (file, dataspace, property list,
+ * and attribute) can't be closed with H5Oclose.
+ */
+static int
+test_close_invalid_objects(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t attr_dtype = H5I_INVALID_HID;
+ hid_t attr_space_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ herr_t status;
+
+ TESTING_MULTIPART("H5Oclose invalid objects");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & (H5VL_CAP_FLAG_FILE_BASIC)) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, object, dataset, attribute, or stored datatype "
+ "aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((fapl_id = H5Pcreate(H5P_FILE_ACCESS)) < 0)
+ TEST_ERROR;
+
+ if ((file_id = H5Fopen(H5_api_test_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, OBJECT_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", OBJECT_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, OBJECT_CLOSE_INVALID_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", OBJECT_OPEN_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((attr_space_id = generate_random_dataspace(OBJECT_CLOSE_INVALID_TEST_SPACE_RANK, NULL, NULL, TRUE)) <
+ 0)
+ TEST_ERROR;
+
+ if ((attr_dtype = generate_random_datatype(H5T_NO_CLASS, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((attr_id = H5Acreate2(group_id, OBJECT_CLOSE_INVALID_TEST_ATTRIBUTE_NAME, attr_dtype, attr_space_id,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Oclose_file)
+ {
+ TESTING_2("H5Oclose with an invalid object - file");
+
+ H5E_BEGIN_TRY
+ {
+ status = H5Oclose(file_id);
+ }
+ H5E_END_TRY;
+
+ if (status >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Oclose succeeded with an invalid object (file)!\n");
+ PART_ERROR(H5Oclose_file);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oclose_file);
+
+ PART_BEGIN(H5Oclose_plist)
+ {
+ TESTING_2("H5Oclose with an invalid object - property list");
+
+ H5E_BEGIN_TRY
+ {
+ status = H5Oclose(fapl_id);
+ }
+ H5E_END_TRY;
+
+ if (status >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Oclose succeeded with an invalid object (property list)!\n");
+ PART_ERROR(H5Oclose_plist);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oclose_plist);
+
+ PART_BEGIN(H5Oclose_dspace)
+ {
+ TESTING_2("H5Oclose with an invalid object - data space");
+
+ H5E_BEGIN_TRY
+ {
+ status = H5Oclose(attr_space_id);
+ }
+ H5E_END_TRY;
+
+ if (status >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Oclose succeeded with an invalid object (data space)!\n");
+ PART_ERROR(H5Oclose_dspace);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oclose_dspace);
+
+ PART_BEGIN(H5Oclose_attribute)
+ {
+ TESTING_2("H5Oclose with an invalid object - attribute");
+
+ H5E_BEGIN_TRY
+ {
+ status = H5Oclose(attr_id);
+ }
+ H5E_END_TRY;
+
+ if (status >= 0) {
+ H5_FAILED();
+ HDprintf(" H5Oclose succeeded with an invalid object (attribute)!\n");
+ PART_ERROR(H5Oclose_attribute);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Oclose_attribute);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Tclose(attr_dtype) < 0)
+ TEST_ERROR;
+ if (H5Aclose(attr_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(attr_space_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Tclose(attr_dtype);
+ H5Sclose(attr_space_id);
+ H5Aclose(attr_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Fclose(file_id);
+ H5Pclose(fapl_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+} /* test_close_invalid_objects */
+
+/*
+ * A test for H5Oflush.
+ */
+static int
+test_flush_object(void)
+{
+ TESTING("H5Oflush");
+
+ SKIPPED();
+
+ return 0;
+}
+
+/*
+ * A test to check that H5Oflush fails when
+ * it is passed invalid parameters.
+ */
+static int
+test_flush_object_invalid_params(void)
+{
+ TESTING("H5Oflush with invalid parameters");
+
+ SKIPPED();
+
+ return 0;
+}
+
+/*
+ * A test for H5Orefresh.
+ */
+static int
+test_refresh_object(void)
+{
+ TESTING("H5Orefresh");
+
+ SKIPPED();
+
+ return 0;
+}
+
+/*
+ * A test to check that H5Orefresh fails when
+ * it is passed invalid parameters.
+ */
+static int
+test_refresh_object_invalid_params(void)
+{
+ TESTING("H5Orefresh with invalid parameters");
+
+ SKIPPED();
+
+ return 0;
+}
+
+/*
+ * H5Ocopy test callback to check that an object's attributes got copied
+ * over successfully to the new object.
+ */
+static herr_t
+object_copy_attribute_iter_callback(hid_t location_id, const char *attr_name, const H5A_info_t *ainfo,
+ void *op_data)
+{
+ size_t *counter = (size_t *)op_data;
+ htri_t types_equal;
+ char expected_name[256];
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t attr_type = H5I_INVALID_HID;
+ herr_t ret_value = H5_ITER_CONT;
+
+ UNUSED(ainfo);
+ UNUSED(op_data);
+
+ snprintf(expected_name, 256, "attr%d", (int)(*counter));
+
+ if (HDstrncmp(attr_name, expected_name, 256)) {
+ HDprintf(" attribute name '%s' did not match expected name '%s'\n", attr_name, expected_name);
+ ret_value = H5_ITER_ERROR;
+ goto done;
+ }
+
+ if ((attr_id = H5Aopen(location_id, attr_name, H5P_DEFAULT)) < 0) {
+ HDprintf(" failed to open attribute '%s'\n", attr_name);
+ ret_value = H5_ITER_ERROR;
+ goto done;
+ }
+
+ if ((attr_type = H5Aget_type(attr_id)) < 0) {
+ HDprintf(" failed to retrieve attribute's datatype\n");
+ ret_value = H5_ITER_ERROR;
+ goto done;
+ }
+
+ if ((types_equal = H5Tequal(attr_type, H5T_NATIVE_INT)) < 0) {
+ HDprintf(" failed to determine if attribute's datatype matched what is expected\n");
+ ret_value = H5_ITER_ERROR;
+ goto done;
+ }
+
+ if (!types_equal) {
+ HDprintf(" attribute datatype did not match expected H5T_NATIVE_INT\n");
+ ret_value = H5_ITER_ERROR;
+ goto done;
+ }
+
+done:
+ if (attr_type >= 0)
+ H5Tclose(attr_type);
+ if (attr_id >= 0)
+ H5Aclose(attr_id);
+
+ (*counter)++;
+
+ return ret_value;
+}
+
+/*
+ * H5Ocopy callback to check that a copied group's soft links
+ * have not been expanded when the default copy options are
+ * used.
+ */
+static herr_t
+object_copy_soft_link_non_expand_callback(hid_t group, const char *name, const H5L_info2_t *info,
+ void *op_data)
+{
+ size_t *counter = (size_t *)op_data;
+ void *link_val_buf = NULL;
+ char expected_link_val[OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_BUF_SIZE];
+ herr_t ret_value = H5_ITER_CONT;
+
+ /* Make sure the link type is soft */
+ if (H5L_TYPE_SOFT != info->type) {
+ HDprintf(" link type was not H5L_TYPE_SOFT; link must have been expanded!\n");
+ ret_value = H5_ITER_ERROR;
+ goto done;
+ }
+
+ if (NULL == (link_val_buf = calloc(1, info->u.val_size))) {
+ HDprintf(" failed to allocate buffer for link value\n");
+ ret_value = H5_ITER_ERROR;
+ goto done;
+ }
+
+ /* Retrieve the link's value */
+ if (H5Lget_val(group, name, link_val_buf, info->u.val_size, H5P_DEFAULT) < 0) {
+ HDprintf(" failed to retrieve value of link '%s'\n", name);
+ ret_value = H5_ITER_ERROR;
+ goto done;
+ }
+
+ /* Make sure link's value matches what is expected */
+ snprintf(expected_link_val, OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_BUF_SIZE,
+ "/" OBJECT_TEST_GROUP_NAME "/" OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_SUBGROUP_NAME "/grp%d",
+ (int)(*counter));
+
+ if (strncmp(link_val_buf, expected_link_val, OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_BUF_SIZE)) {
+ HDprintf(" value '%s' for link '%s' did not match expected value '%s'\n", (char *)link_val_buf,
+ name, expected_link_val);
+ ret_value = H5_ITER_ERROR;
+ goto done;
+ }
+
+done:
+ if (link_val_buf)
+ free(link_val_buf);
+
+ (*counter)++;
+
+ return ret_value;
+}
+
+/*
+ * H5Ocopy callback to check that a copied group's soft links
+ * have been expanded when the H5O_COPY_EXPAND_SOFT_LINK_FLAG
+ * flag is specified.
+ */
+static herr_t
+object_copy_soft_link_expand_callback(hid_t group, const char *name, const H5L_info2_t *info, void *op_data)
+{
+ size_t *counter = (size_t *)op_data;
+ char expected_link_name[OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_BUF_SIZE];
+ herr_t ret_value = H5_ITER_CONT;
+
+ UNUSED(group);
+
+ /* Make sure the link type is hard */
+ if (H5L_TYPE_HARD != info->type) {
+ HDprintf(" link type was not H5L_TYPE_HARD; link must not have been expanded!\n");
+ ret_value = H5_ITER_ERROR;
+ goto done;
+ }
+
+ /* Ensure that the link's name still follows the 'link1', 'link2', etc. pattern */
+ snprintf(expected_link_name, OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_BUF_SIZE, "link%d", (int)(*counter));
+
+ if (strncmp(name, expected_link_name, OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_BUF_SIZE)) {
+ HDprintf(" link name '%s' did not match expected name '%s'\n", name, expected_link_name);
+ ret_value = H5_ITER_ERROR;
+ goto done;
+ }
+
+done:
+ (*counter)++;
+
+ return ret_value;
+}
+
+/*
+ * H5Ovisit callback to simply iterate recursively through all of the objects in a
+ * group and check to make sure their names match what is expected.
+ */
+static herr_t
+object_visit_callback(hid_t o_id, const char *name, const H5O_info2_t *object_info, void *op_data)
+{
+ size_t *i = (size_t *)op_data;
+ size_t counter_val = *((size_t *)op_data);
+ herr_t ret_val = 0;
+
+ UNUSED(o_id);
+
+ if (!HDstrncmp(name, ".", strlen(".") + 1) &&
+ (counter_val == 0 || counter_val == 4 || counter_val == 8 || counter_val == 12)) {
+ if (H5O_TYPE_GROUP == object_info->type)
+ goto done;
+ else
+ HDprintf(" type for object '%s' was not H5O_TYPE_GROUP\n", name);
+ }
+ else if (!HDstrncmp(name, OBJECT_VISIT_TEST_GROUP_NAME, strlen(OBJECT_VISIT_TEST_GROUP_NAME) + 1) &&
+ (counter_val == 2 || counter_val == 6 || counter_val == 9 || counter_val == 15)) {
+ if (H5O_TYPE_GROUP == object_info->type)
+ goto done;
+ else
+ HDprintf(" type for object '%s' was not H5O_TYPE_GROUP\n", name);
+ }
+ else if (!HDstrncmp(name, OBJECT_VISIT_TEST_DSET_NAME, strlen(OBJECT_VISIT_TEST_DSET_NAME) + 1) &&
+ (counter_val == 1 || counter_val == 7 || counter_val == 10 || counter_val == 14)) {
+ if (H5O_TYPE_DATASET == object_info->type)
+ goto done;
+ else
+ HDprintf(" type for object '%s' was not H5O_TYPE_DATASET\n", name);
+ }
+ else if (!HDstrncmp(name, OBJECT_VISIT_TEST_TYPE_NAME, strlen(OBJECT_VISIT_TEST_TYPE_NAME) + 1) &&
+ (counter_val == 3 || counter_val == 5 || counter_val == 11 || counter_val == 13)) {
+ if (H5O_TYPE_NAMED_DATATYPE == object_info->type)
+ goto done;
+ else
+ HDprintf(" type for object '%s' was not H5O_TYPE_NAMED_DATATYPE\n", name);
+ }
+ else
+ HDprintf(" object '%s' didn't match known names or came in an incorrect order\n", name);
+
+ ret_val = -1;
+
+done:
+ (*i)++;
+
+ return ret_val;
+}
+
+/*
+ * H5Ovisit callback for visiting a singular dataset.
+ */
+static herr_t
+object_visit_dset_callback(hid_t o_id, const char *name, const H5O_info2_t *object_info, void *op_data)
+{
+ herr_t ret_val = 0;
+
+ UNUSED(o_id);
+ UNUSED(op_data);
+
+ if (HDstrncmp(name, ".", strlen(".") + 1)) {
+ HDprintf(" object '%s' didn't match known names\n", name);
+ return -1;
+ }
+
+ if (H5O_TYPE_DATASET != object_info->type) {
+ HDprintf(" object type was not H5O_TYPE_DATASET\n");
+ return -1;
+ }
+
+ return ret_val;
+}
+
+/*
+ * H5Ovisit callback for visiting a singular committed datatype.
+ */
+static herr_t
+object_visit_dtype_callback(hid_t o_id, const char *name, const H5O_info2_t *object_info, void *op_data)
+{
+ herr_t ret_val = 0;
+
+ UNUSED(o_id);
+ UNUSED(op_data);
+
+ if (HDstrncmp(name, ".", strlen(".") + 1)) {
+ HDprintf(" object '%s' didn't match known names\n", name);
+ return -1;
+ }
+
+ if (H5O_TYPE_NAMED_DATATYPE != object_info->type) {
+ HDprintf(" object type was not H5O_TYPE_NAMED_DATATYPE\n");
+ return -1;
+ }
+
+ return ret_val;
+}
+
+/*
+ * H5Ovisit callback for testing ignoring of
+ * soft links during object visiting.
+ */
+static herr_t
+object_visit_soft_link_callback(hid_t o_id, const char *name, const H5O_info2_t *object_info, void *op_data)
+{
+ size_t *i = (size_t *)op_data;
+ size_t counter_val = *((size_t *)op_data);
+ herr_t ret_val = 0;
+
+ UNUSED(o_id);
+
+ if (!HDstrncmp(name, ".", strlen(".") + 1) && (counter_val <= 5)) {
+ if (H5O_TYPE_GROUP == object_info->type)
+ goto done;
+ else
+ HDprintf(" type for object '%s' was not H5O_TYPE_GROUP\n", name);
+ }
+ else
+ HDprintf(" object '%s' didn't match known names or came in an incorrect order\n", name);
+
+ ret_val = -1;
+
+done:
+ (*i)++;
+
+ return ret_val;
+}
+
+/*
+ * H5Ovisit callback to simply iterate through all of the objects in a given
+ * group.
+ */
+static herr_t
+object_visit_noop_callback(hid_t o_id, const char *name, const H5O_info2_t *object_info, void *op_data)
+{
+ UNUSED(o_id);
+ UNUSED(name);
+ UNUSED(object_info);
+ UNUSED(op_data);
+
+ return 0;
+}
+
+/*
+ * Cleanup temporary test files
+ */
+static void
+cleanup_files(void)
+{
+ H5Fdelete(OBJECT_COPY_BETWEEN_FILES_TEST_FILE_NAME, H5P_DEFAULT);
+}
+
+int
+H5_api_object_test(void)
+{
+ size_t i;
+ int nerrors;
+
+ HDprintf("**********************************************\n");
+ HDprintf("* *\n");
+ HDprintf("* API Object Tests *\n");
+ HDprintf("* *\n");
+ HDprintf("**********************************************\n\n");
+
+ for (i = 0, nerrors = 0; i < ARRAY_LENGTH(object_tests); i++) {
+ nerrors += (*object_tests[i])() ? 1 : 0;
+ }
+
+ HDprintf("\n");
+
+ HDprintf("Cleaning up testing files\n");
+ cleanup_files();
+
+ return nerrors;
+}
diff --git a/test/API/H5_api_object_test.h b/test/API/H5_api_object_test.h
new file mode 100644
index 0000000..5470843
--- /dev/null
+++ b/test/API/H5_api_object_test.h
@@ -0,0 +1,191 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifndef H5_API_OBJECT_TEST_H
+#define H5_API_OBJECT_TEST_H
+
+#include "H5_api_test.h"
+
+int H5_api_object_test(void);
+
+/***********************************************
+ * *
+ * API Object test defines *
+ * *
+ ***********************************************/
+
+#define OBJECT_OPEN_TEST_SPACE_RANK 2
+#define OBJECT_OPEN_TEST_GROUP_NAME "object_open_test"
+#define OBJECT_OPEN_TEST_GRP_NAME "object_open_test_group"
+#define OBJECT_OPEN_TEST_DSET_NAME "object_open_test_dset"
+#define OBJECT_OPEN_TEST_TYPE_NAME "object_open_test_type"
+
+#define OBJECT_OPEN_INVALID_PARAMS_TEST_GROUP_NAME "object_open_invalid_params_test"
+#define OBJECT_OPEN_INVALID_PARAMS_TEST_GRP_NAME "object_open_invalid_params_test_group"
+
+#define OBJECT_CLOSE_INVALID_TEST_GROUP_NAME "object_close_invalid_params_test"
+#define OBJECT_CLOSE_INVALID_TEST_ATTRIBUTE_NAME "object_close_invalid_test_attribute"
+#define OBJECT_CLOSE_INVALID_TEST_SPACE_RANK 2
+
+#define OBJECT_EXISTS_TEST_DSET_SPACE_RANK 2
+#define OBJECT_EXISTS_TEST_SUBGROUP_NAME "object_exists_test"
+#define OBJECT_EXISTS_TEST_DANGLING_LINK_NAME "object_exists_test_dangling_soft_link"
+#define OBJECT_EXISTS_TEST_SOFT_LINK_NAME "object_exists_test_soft_link"
+#define OBJECT_EXISTS_TEST_GRP_NAME "object_exists_test_group"
+#define OBJECT_EXISTS_TEST_TYPE_NAME "object_exists_test_type"
+#define OBJECT_EXISTS_TEST_DSET_NAME "object_exists_test_dset"
+
+#define OBJECT_EXISTS_INVALID_PARAMS_TEST_SUBGROUP_NAME "object_exists_invalid_params_test"
+#define OBJECT_EXISTS_INVALID_PARAMS_TEST_GRP_NAME "object_exists_invalid_params_test_group"
+
+#define OBJECT_COPY_BASIC_TEST_DEEP_NESTED_GROUP_NAME "deep_nested_group"
+#define OBJECT_COPY_BASIC_TEST_NUM_NESTED_OBJS 3
+#define OBJECT_COPY_BASIC_TEST_NEW_GROUP_NAME "copied_group"
+#define OBJECT_COPY_BASIC_TEST_NEW_DSET_NAME "copied_dset"
+#define OBJECT_COPY_BASIC_TEST_NEW_DTYPE_NAME "copied_dtype"
+#define OBJECT_COPY_BASIC_TEST_SUBGROUP_NAME "object_copy_basic_test"
+#define OBJECT_COPY_BASIC_TEST_GROUP_NAME "group_to_copy"
+#define OBJECT_COPY_BASIC_TEST_DSET_NAME "dset_to_copy"
+#define OBJECT_COPY_BASIC_TEST_DTYPE_NAME "dtype_to_copy"
+#define OBJECT_COPY_BASIC_TEST_SPACE_RANK 2
+#define OBJECT_COPY_BASIC_TEST_NUM_ATTRS 3
+#define OBJECT_COPY_BASIC_TEST_BUF_SIZE 256
+
+#define OBJECT_COPY_ALREADY_EXISTING_TEST_SUBGROUP_NAME "object_copy_existing_objects_test"
+#define OBJECT_COPY_ALREADY_EXISTING_TEST_GROUP_NAME "group_to_copy"
+#define OBJECT_COPY_ALREADY_EXISTING_TEST_DSET_NAME "dset_to_copy"
+#define OBJECT_COPY_ALREADY_EXISTING_TEST_DTYPE_NAME "dtype_to_copy"
+#define OBJECT_COPY_ALREADY_EXISTING_TEST_SPACE_RANK 2
+
+#define OBJECT_COPY_SHALLOW_TEST_DEEP_NESTED_GROUP_NAME "deep_nested_group"
+#define OBJECT_COPY_SHALLOW_TEST_NUM_NESTED_OBJS 3
+#define OBJECT_COPY_SHALLOW_TEST_SUBGROUP_NAME "object_copy_shallow_group_copy_test"
+#define OBJECT_COPY_SHALLOW_TEST_NEW_GROUP_NAME "copied_group"
+#define OBJECT_COPY_SHALLOW_TEST_GROUP_NAME "group_to_copy"
+#define OBJECT_COPY_SHALLOW_TEST_BUF_SIZE 256
+
+#define OBJECT_COPY_NO_ATTRS_TEST_SUBGROUP_NAME "object_copy_no_attributes_test"
+#define OBJECT_COPY_NO_ATTRS_TEST_NEW_GROUP_NAME "copied_group"
+#define OBJECT_COPY_NO_ATTRS_TEST_NEW_DSET_NAME "copied_dset"
+#define OBJECT_COPY_NO_ATTRS_TEST_NEW_DTYPE_NAME "copied_dtype"
+#define OBJECT_COPY_NO_ATTRS_TEST_GROUP_NAME "group_to_copy"
+#define OBJECT_COPY_NO_ATTRS_TEST_DSET_NAME "dset_to_copy"
+#define OBJECT_COPY_NO_ATTRS_TEST_DTYPE_NAME "dtype_to_copy"
+#define OBJECT_COPY_NO_ATTRS_TEST_SPACE_RANK 2
+#define OBJECT_COPY_NO_ATTRS_TEST_NUM_ATTRS 3
+#define OBJECT_COPY_NO_ATTRS_TEST_BUF_SIZE 256
+
+#define OBJECT_COPY_SOFT_LINK_TEST_DEEP_NESTED_GROUP_NAME "deep_nested_group"
+#define OBJECT_COPY_SOFT_LINK_TEST_DANGLING_LINK_NAME "dangling_link"
+#define OBJECT_COPY_SOFT_LINK_TEST_NUM_NESTED_OBJS 3
+#define OBJECT_COPY_SOFT_LINK_TEST_SUBGROUP_NAME "object_copy_soft_link_test"
+#define OBJECT_COPY_SOFT_LINK_TEST_SOFT_LINK_NAME "soft_link_to_group_to_copy"
+#define OBJECT_COPY_SOFT_LINK_TEST_NEW_GROUP_NAME "copied_group"
+#define OBJECT_COPY_SOFT_LINK_TEST_GROUP_NAME "group_to_copy"
+#define OBJECT_COPY_SOFT_LINK_TEST_SPACE_RANK 2
+#define OBJECT_COPY_SOFT_LINK_TEST_NUM_ATTRS 3
+#define OBJECT_COPY_SOFT_LINK_TEST_BUF_SIZE 256
+
+#define OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_EXPAND_DANGLING_GROUP_NAME "expanded_dangling_soft_links_group"
+#define OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_NON_EXPAND_GROUP_NAME "non_expanded_soft_links_group"
+#define OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_EXPAND_GROUP_NAME "expanded_soft_links_group"
+#define OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_NUM_NESTED_OBJS 3
+#define OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_SUBGROUP_NAME "object_copy_group_with_soft_links_test"
+#define OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_GROUP_NAME "group_to_copy"
+#define OBJECT_COPY_GROUP_WITH_SOFT_LINKS_TEST_BUF_SIZE 256
+
+#define OBJECT_COPY_BETWEEN_FILES_TEST_DEEP_NESTED_GROUP_NAME "deep_nested_group"
+#define OBJECT_COPY_BETWEEN_FILES_TEST_NUM_NESTED_OBJS 3
+#define OBJECT_COPY_BETWEEN_FILES_TEST_SUBGROUP_NAME "object_copy_between_files_test"
+#define OBJECT_COPY_BETWEEN_FILES_TEST_NEW_GROUP_NAME "copied_group"
+#define OBJECT_COPY_BETWEEN_FILES_TEST_NEW_DSET_NAME "copied_dset"
+#define OBJECT_COPY_BETWEEN_FILES_TEST_NEW_DTYPE_NAME "copied_dtype"
+#define OBJECT_COPY_BETWEEN_FILES_TEST_FILE_NAME "object_copy_test_file.h5"
+#define OBJECT_COPY_BETWEEN_FILES_TEST_GROUP_NAME "group_to_copy"
+#define OBJECT_COPY_BETWEEN_FILES_TEST_DSET_NAME "dset_to_copy"
+#define OBJECT_COPY_BETWEEN_FILES_TEST_DTYPE_NAME "dtype_to_copy"
+#define OBJECT_COPY_BETWEEN_FILES_TEST_SPACE_RANK 2
+#define OBJECT_COPY_BETWEEN_FILES_TEST_NUM_ATTRS 3
+#define OBJECT_COPY_BETWEEN_FILES_TEST_BUF_SIZE 256
+
+#define OBJECT_COPY_INVALID_PARAMS_TEST_SUBGROUP_NAME "object_copy_invalid_params_test"
+#define OBJECT_COPY_INVALID_PARAMS_TEST_GROUP_NAME "object_copy_invalid_params_group"
+#define OBJECT_COPY_INVALID_PARAMS_TEST_GROUP_NAME2 "object_copy_invalid_params_group_copy"
+
+#define OBJECT_VISIT_TEST_NUM_OBJS_VISITED 4
+#define OBJECT_VISIT_TEST_SUBGROUP_NAME "object_visit_test"
+#define OBJECT_VISIT_TEST_SPACE_RANK 2
+#define OBJECT_VISIT_TEST_GROUP_NAME "object_visit_test_group"
+#define OBJECT_VISIT_TEST_DSET_NAME "object_visit_test_dset"
+#define OBJECT_VISIT_TEST_TYPE_NAME "object_visit_test_type"
+
+#define OBJECT_VISIT_SOFT_LINK_TEST_NUM_OBJS_VISITED 1
+#define OBJECT_VISIT_SOFT_LINK_TEST_SUBGROUP_NAME "object_visit_soft_link"
+#define OBJECT_VISIT_SOFT_LINK_TEST_GROUP_NAME1 "links_group"
+#define OBJECT_VISIT_SOFT_LINK_TEST_GROUP_NAME2 "objects_group"
+#define OBJECT_VISIT_SOFT_LINK_TEST_LINK_NAME1 "soft_link1"
+#define OBJECT_VISIT_SOFT_LINK_TEST_LINK_NAME2 "soft_link2"
+#define OBJECT_VISIT_SOFT_LINK_TEST_LINK_NAME3 "soft_link3"
+#define OBJECT_VISIT_SOFT_LINK_TEST_OBJ_NAME1 "group1"
+#define OBJECT_VISIT_SOFT_LINK_TEST_OBJ_NAME2 "group2"
+#define OBJECT_VISIT_SOFT_LINK_TEST_OBJ_NAME3 "group3"
+
+#define OBJECT_VISIT_DANGLING_LINK_TEST_SUBGROUP_NAME "object_visit_dangling_link_test"
+#define OBJECT_VISIT_DANGLING_LINK_TEST_LINK_NAME1 "dangling_link1"
+#define OBJECT_VISIT_DANGLING_LINK_TEST_LINK_NAME2 "dangling_link2"
+#define OBJECT_VISIT_DANGLING_LINK_TEST_LINK_NAME3 "dangling_link3"
+
+#define OBJECT_VISIT_INVALID_PARAMS_TEST_SUBGROUP_NAME "object_visit_invalid_params_test"
+#define OBJECT_VISIT_INVALID_PARAMS_TEST_GROUP_NAME "object_visit_invalid_params_group"
+
+#define OBJECT_CLOSE_TEST_SPACE_RANK 2
+#define OBJECT_CLOSE_TEST_GROUP_NAME "object_close_test"
+#define OBJECT_CLOSE_TEST_GRP_NAME "object_close_test_group"
+#define OBJECT_CLOSE_TEST_DSET_NAME "object_close_test_dset"
+#define OBJECT_CLOSE_TEST_TYPE_NAME "object_close_test_type"
+
+#define OBJECT_LINK_TEST_GROUP_NAME "object_link_test_group"
+#define OBJECT_LINK_TEST_GROUP_NAME2 "object_link_test_group_link"
+#define OBJECT_LINK_TEST_DSET_NAME "object_link_test_dataset"
+#define OBJECT_LINK_TEST_DTYPE_NAME "object_link_test_datatype"
+#define OBJECT_LINK_TEST_SPACE_RANK 2
+
+#define OBJECT_LINK_INVALID_PARAMS_TEST_GROUP_NAME "object_link_invalid_params_test_group"
+
+#define OBJ_REF_GET_TYPE_TEST_SUBGROUP_NAME "obj_ref_get_obj_type_test"
+#define OBJ_REF_GET_TYPE_TEST_DSET_NAME "ref_dset"
+#define OBJ_REF_GET_TYPE_TEST_TYPE_NAME "ref_dtype"
+#define OBJ_REF_GET_TYPE_TEST_SPACE_RANK 2
+
+#define OBJ_REF_DATASET_WRITE_TEST_SUBGROUP_NAME "obj_ref_write_test"
+#define OBJ_REF_DATASET_WRITE_TEST_REF_DSET_NAME "ref_dset"
+#define OBJ_REF_DATASET_WRITE_TEST_REF_TYPE_NAME "ref_dtype"
+#define OBJ_REF_DATASET_WRITE_TEST_SPACE_RANK 1
+#define OBJ_REF_DATASET_WRITE_TEST_DSET_NAME "obj_ref_dset"
+
+#define OBJ_REF_DATASET_READ_TEST_SUBGROUP_NAME "obj_ref_read_test"
+#define OBJ_REF_DATASET_READ_TEST_REF_DSET_NAME "ref_dset"
+#define OBJ_REF_DATASET_READ_TEST_REF_TYPE_NAME "ref_dtype"
+#define OBJ_REF_DATASET_READ_TEST_SPACE_RANK 1
+#define OBJ_REF_DATASET_READ_TEST_DSET_NAME "obj_ref_dset"
+
+#define OBJ_REF_DATASET_EMPTY_WRITE_TEST_SUBGROUP_NAME "obj_ref_empty_write_test"
+#define OBJ_REF_DATASET_EMPTY_WRITE_TEST_SPACE_RANK 1
+#define OBJ_REF_DATASET_EMPTY_WRITE_TEST_DSET_NAME "obj_ref_dset"
+
+#define OBJECT_REF_COUNT_TEST_SUBGROUP_NAME "ref_count_test"
+#define OBJECT_REF_COUNT_TEST_GRP_NAME "ref_count_test_group"
+#define OBJECT_REF_COUNT_TEST_DSET_NAME "ref_count_dset"
+#define OBJECT_REF_COUNT_TEST_TYPE_NAME "ref_count_dtype"
+#define OBJECT_REF_COUNT_TEST_DSET_SPACE_RANK 2
+
+#endif
diff --git a/test/API/H5_api_test.c b/test/API/H5_api_test.c
new file mode 100644
index 0000000..6d61b75
--- /dev/null
+++ b/test/API/H5_api_test.c
@@ -0,0 +1,227 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * A test suite which only makes public HDF5 API calls and which is meant
+ * to test the native VOL connector or a specified HDF5 VOL connector (or
+ * set of connectors stacked with each other). This test suite must assume
+ * that a VOL connector could only implement the File interface. Therefore,
+ * the suite should check that a particular piece of functionality is supported
+ * by the VOL connector before actually testing it. If the functionality is
+ * not supported, the test should simply be skipped, perhaps with a note as
+ * to why the test was skipped, if possible.
+ *
+ * If the VOL connector being used supports the creation of groups, this
+ * test suite will attempt to organize the output of these various tests
+ * into groups based on their respective HDF5 interface.
+ */
+
+#include "H5_api_test.h"
+
+#include "H5_api_attribute_test.h"
+#include "H5_api_dataset_test.h"
+#include "H5_api_datatype_test.h"
+#include "H5_api_file_test.h"
+#include "H5_api_group_test.h"
+#include "H5_api_link_test.h"
+#include "H5_api_misc_test.h"
+#include "H5_api_object_test.h"
+#include "H5_api_test_util.h"
+#ifdef H5_API_TEST_HAVE_ASYNC
+#include "H5_api_async_test.h"
+#endif
+
+char H5_api_test_filename[H5_API_TEST_FILENAME_MAX_LENGTH];
+
+const char *test_path_prefix;
+
+/* X-macro to define the following for each test:
+ * - enum type
+ * - name
+ * - test function
+ * - enabled by default
+ */
+#ifdef H5_API_TEST_HAVE_ASYNC
+#define H5_API_TESTS \
+ X(H5_API_TEST_NULL, "", NULL, 0) \
+ X(H5_API_TEST_FILE, "file", H5_api_file_test, 1) \
+ X(H5_API_TEST_GROUP, "group", H5_api_group_test, 1) \
+ X(H5_API_TEST_DATASET, "dataset", H5_api_dataset_test, 1) \
+ X(H5_API_TEST_DATATYPE, "datatype", H5_api_datatype_test, 1) \
+ X(H5_API_TEST_ATTRIBUTE, "attribute", H5_api_attribute_test, 1) \
+ X(H5_API_TEST_LINK, "link", H5_api_link_test, 1) \
+ X(H5_API_TEST_OBJECT, "object", H5_api_object_test, 1) \
+ X(H5_API_TEST_MISC, "misc", H5_api_misc_test, 1) \
+ X(H5_API_TEST_ASYNC, "async", H5_api_async_test, 1) \
+ X(H5_API_TEST_MAX, "", NULL, 0)
+#else
+#define H5_API_TESTS \
+ X(H5_API_TEST_NULL, "", NULL, 0) \
+ X(H5_API_TEST_FILE, "file", H5_api_file_test, 1) \
+ X(H5_API_TEST_GROUP, "group", H5_api_group_test, 1) \
+ X(H5_API_TEST_DATASET, "dataset", H5_api_dataset_test, 1) \
+ X(H5_API_TEST_DATATYPE, "datatype", H5_api_datatype_test, 1) \
+ X(H5_API_TEST_ATTRIBUTE, "attribute", H5_api_attribute_test, 1) \
+ X(H5_API_TEST_LINK, "link", H5_api_link_test, 1) \
+ X(H5_API_TEST_OBJECT, "object", H5_api_object_test, 1) \
+ X(H5_API_TEST_MISC, "misc", H5_api_misc_test, 1) \
+ X(H5_API_TEST_MAX, "", NULL, 0)
+#endif
+
+#define X(a, b, c, d) a,
+enum H5_api_test_type { H5_API_TESTS };
+#undef X
+#define X(a, b, c, d) b,
+static const char *const H5_api_test_name[] = {H5_API_TESTS};
+#undef X
+#define X(a, b, c, d) c,
+static int (*H5_api_test_func[])(void) = {H5_API_TESTS};
+#undef X
+#define X(a, b, c, d) d,
+static int H5_api_test_enabled[] = {H5_API_TESTS};
+#undef X
+
+static enum H5_api_test_type
+H5_api_test_name_to_type(const char *test_name)
+{
+ enum H5_api_test_type i = 0;
+
+ while (strcmp(H5_api_test_name[i], test_name) && i != H5_API_TEST_MAX)
+ i++;
+
+ return ((i == H5_API_TEST_MAX) ? H5_API_TEST_NULL : i);
+}
+
+static void
+H5_api_test_run(void)
+{
+ enum H5_api_test_type i;
+
+ for (i = H5_API_TEST_FILE; i < H5_API_TEST_MAX; i++)
+ if (H5_api_test_enabled[i])
+ (void)H5_api_test_func[i]();
+}
+
+/******************************************************************************/
+
+int
+main(int argc, char **argv)
+{
+ const char *vol_connector_name;
+ unsigned seed;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hbool_t err_occurred = FALSE;
+
+ /* Simple argument checking, TODO can improve that later */
+ if (argc > 1) {
+ enum H5_api_test_type i = H5_api_test_name_to_type(argv[1]);
+ if (i != H5_API_TEST_NULL) {
+ /* Run only specific API test */
+ memset(H5_api_test_enabled, 0, sizeof(H5_api_test_enabled));
+ H5_api_test_enabled[i] = 1;
+ }
+ }
+
+#ifdef H5_HAVE_PARALLEL
+ /* If HDF5 was built with parallel enabled, go ahead and call MPI_Init before
+ * running these tests. Even though these are meant to be serial tests, they will
+ * likely be run using mpirun (or similar) and we cannot necessarily expect HDF5 or
+ * an HDF5 VOL connector to call MPI_Init.
+ */
+ MPI_Init(&argc, &argv);
+#endif
+
+ /* h5_reset(); */
+
+ n_tests_run_g = 0;
+ n_tests_passed_g = 0;
+ n_tests_failed_g = 0;
+ n_tests_skipped_g = 0;
+
+ seed = (unsigned)HDtime(NULL);
+ srand(seed);
+
+ if (NULL == (test_path_prefix = HDgetenv(HDF5_API_TEST_PATH_PREFIX)))
+ test_path_prefix = "";
+
+ HDsnprintf(H5_api_test_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s%s", test_path_prefix,
+ TEST_FILE_NAME);
+
+ if (NULL == (vol_connector_name = HDgetenv(HDF5_VOL_CONNECTOR))) {
+ HDprintf("No VOL connector selected; using native VOL connector\n");
+ vol_connector_name = "native";
+ }
+
+ HDprintf("Running API tests with VOL connector '%s'\n\n", vol_connector_name);
+ HDprintf("Test parameters:\n");
+ HDprintf(" - Test file name: '%s'\n", H5_api_test_filename);
+ HDprintf(" - Test seed: %u\n", seed);
+ HDprintf("\n\n");
+
+ /* Retrieve the VOL cap flags - work around an HDF5
+ * library issue by creating a FAPL
+ */
+ if ((fapl_id = H5Pcreate(H5P_FILE_ACCESS)) < 0) {
+ HDfprintf(stderr, "Unable to create FAPL\n");
+ err_occurred = TRUE;
+ goto done;
+ }
+
+ vol_cap_flags_g = H5VL_CAP_FLAG_NONE;
+ if (H5Pget_vol_cap_flags(fapl_id, &vol_cap_flags_g) < 0) {
+ HDfprintf(stderr, "Unable to retrieve VOL connector capability flags\n");
+ err_occurred = TRUE;
+ goto done;
+ }
+
+ /*
+ * Create the file that will be used for all of the tests,
+ * except for those which test file creation.
+ */
+ if (create_test_container(H5_api_test_filename, vol_cap_flags_g) < 0) {
+ HDfprintf(stderr, "Unable to create testing container file '%s'\n", H5_api_test_filename);
+ err_occurred = TRUE;
+ goto done;
+ }
+
+ /* Run all the tests that are enabled */
+ H5_api_test_run();
+
+ HDprintf("Cleaning up testing files\n");
+ H5Fdelete(H5_api_test_filename, fapl_id);
+
+ if (n_tests_run_g > 0) {
+ HDprintf("%zu/%zu (%.2f%%) API tests passed with VOL connector '%s'\n", n_tests_passed_g,
+ n_tests_run_g, ((double)n_tests_passed_g / (double)n_tests_run_g * 100.0),
+ vol_connector_name);
+ HDprintf("%zu/%zu (%.2f%%) API tests did not pass with VOL connector '%s'\n", n_tests_failed_g,
+ n_tests_run_g, ((double)n_tests_failed_g / (double)n_tests_run_g * 100.0),
+ vol_connector_name);
+ HDprintf("%zu/%zu (%.2f%%) API tests were skipped with VOL connector '%s'\n", n_tests_skipped_g,
+ n_tests_run_g, ((double)n_tests_skipped_g / (double)n_tests_run_g * 100.0),
+ vol_connector_name);
+ }
+
+done:
+ if (fapl_id >= 0 && H5Pclose(fapl_id) < 0) {
+ HDfprintf(stderr, "Unable to close FAPL\n");
+ err_occurred = TRUE;
+ }
+
+ H5close();
+
+#ifdef H5_HAVE_PARALLEL
+ MPI_Finalize();
+#endif
+
+ HDexit(((err_occurred || n_tests_failed_g > 0) ? EXIT_FAILURE : EXIT_SUCCESS));
+}
diff --git a/test/API/H5_api_test.h b/test/API/H5_api_test.h
new file mode 100644
index 0000000..296d296
--- /dev/null
+++ b/test/API/H5_api_test.h
@@ -0,0 +1,73 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifndef H5_API_TEST_H
+#define H5_API_TEST_H
+
+#include <hdf5.h>
+#include <H5private.h>
+
+#include "h5test.h"
+
+#include "H5_api_test_config.h"
+#include "H5_api_test_util.h"
+#include "H5_api_tests_disabled.h"
+
+/* Define H5VL_VERSION if not already defined */
+#ifndef H5VL_VERSION
+#define H5VL_VERSION 0
+#endif
+
+/* Define macro to wait forever depending on version */
+#if H5VL_VERSION >= 2
+#define H5_API_TEST_WAIT_FOREVER H5ES_WAIT_FOREVER
+#else
+#define H5_API_TEST_WAIT_FOREVER UINT64_MAX
+#endif
+
+/******************************************************************************/
+
+/* The name of the file that all of the tests will operate on */
+#define TEST_FILE_NAME "H5_api_test.h5"
+extern char H5_api_test_filename[];
+
+extern const char *test_path_prefix;
+
+/*
+ * Environment variable specifying a prefix string to add to
+ * filenames generated by the API tests
+ */
+#define HDF5_API_TEST_PATH_PREFIX "HDF5_API_TEST_PATH_PREFIX"
+
+/* The names of a set of container groups which hold objects
+ * created by each of the different types of tests.
+ */
+#define GROUP_TEST_GROUP_NAME "group_tests"
+#define ATTRIBUTE_TEST_GROUP_NAME "attribute_tests"
+#define DATASET_TEST_GROUP_NAME "dataset_tests"
+#define DATATYPE_TEST_GROUP_NAME "datatype_tests"
+#define LINK_TEST_GROUP_NAME "link_tests"
+#define OBJECT_TEST_GROUP_NAME "object_tests"
+#define MISCELLANEOUS_TEST_GROUP_NAME "miscellaneous_tests"
+
+#define ARRAY_LENGTH(array) sizeof(array) / sizeof(array[0])
+
+#define UNUSED(o) (void)(o);
+
+#define H5_API_TEST_FILENAME_MAX_LENGTH 1024
+
+/* The maximum size of a dimension in an HDF5 dataspace as allowed
+ * for this testing suite so as not to try to create too large
+ * of a dataspace/datatype. */
+#define MAX_DIM_SIZE 16
+
+#endif
diff --git a/test/API/H5_api_test_config.h.in b/test/API/H5_api_test_config.h.in
new file mode 100644
index 0000000..c1833fa
--- /dev/null
+++ b/test/API/H5_api_test_config.h.in
@@ -0,0 +1,66 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifndef H5_API_TEST_CONFIG_H
+#define H5_API_TEST_CONFIG_H
+
+#include "hdf5.h"
+
+#cmakedefine H5_API_TEST_HAVE_ASYNC
+
+#ifdef H5_HAVE_PARALLEL
+#cmakedefine MPIEXEC_EXECUTABLE "@MPIEXEC_EXECUTABLE@"
+#cmakedefine MPIEXEC "@MPIEXEC@" /* For compatibility */
+#ifndef MPIEXEC_EXECUTABLE
+# define MPIEXEC_EXECUTABLE MPIEXEC
+#endif
+#cmakedefine MPIEXEC_NUMPROC_FLAG "@MPIEXEC_NUMPROC_FLAG@"
+#cmakedefine MPIEXEC_PREFLAGS "@MPIEXEC_PREFLAGS@"
+#cmakedefine MPIEXEC_POSTFLAGS "@MPIEXEC_POSTFLAGS@"
+/* Server-specific flags if different */
+#cmakedefine MPIEXEC_SERVER_PREFLAGS "@MPIEXEC_SERVER_PREFLAGS@"
+#cmakedefine MPIEXEC_SERVER_POSTFLAGS "@MPIEXEC_SERVER_POSTFLAGS@"
+#cmakedefine MPIEXEC_MAX_NUMPROCS @MPIEXEC_MAX_NUMPROCS@
+#endif /* H5_HAVE_PARALLEL */
+
+#cmakedefine DART_TESTING_TIMEOUT @DART_TESTING_TIMEOUT@
+#ifndef DART_TESTING_TIMEOUT
+# define DART_TESTING_TIMEOUT 1500
+#endif
+
+#cmakedefine H5_API_TEST_ENV_VARS "@H5_API_TEST_ENV_VARS@"
+
+#cmakedefine H5_API_TEST_INIT_COMMAND "@H5_API_TEST_INIT_COMMAND@"
+
+#cmakedefine H5_API_TEST_SERVER_START_MSG "@H5_API_TEST_SERVER_START_MSG@"
+#ifndef H5_API_TEST_SERVER_START_MSG
+# define H5_API_TEST_SERVER_START_MSG "Waiting"
+#endif
+#cmakedefine H5_API_TEST_SERVER_EXIT_COMMAND "@H5_API_TEST_SERVER_EXIT_COMMAND@"
+
+#cmakedefine H5_API_TEST_CLIENT_HELPER_START_MSG "@H5_API_TEST_CLIENT_HELPER_START_MSG@"
+#ifndef H5_API_TEST_CLIENT_HELPER_START_MSG
+# define H5_API_TEST_CLIENT_HELPER_START_MSG "Waiting"
+#endif
+#cmakedefine H5_API_TEST_CLIENT_HELPER_EXIT_COMMAND "@H5_API_TEST_CLIENT_HELPER_EXIT_COMMAND@"
+
+#cmakedefine H5_API_TEST_CLIENT_INIT_TOKEN_REGEX "@H5_API_TEST_CLIENT_INIT_TOKEN_REGEX@"
+#ifndef H5_API_TEST_CLIENT_INIT_TOKEN_REGEX
+# define H5_API_TEST_CLIENT_INIT_TOKEN_REGEX "^token"
+#endif
+#cmakedefine H5_API_TEST_CLIENT_INIT_TOKEN_VAR "@H5_API_TEST_CLIENT_INIT_TOKEN_VAR@"
+#ifndef H5_API_TEST_CLIENT_INIT_TOKEN_VAR
+# define H5_API_TEST_CLIENT_INIT_TOKEN_VAR "TOKEN"
+#endif
+
+
+#endif /* H5_API_TEST_CONFIG_H */
diff --git a/test/API/H5_api_test_util.c b/test/API/H5_api_test_util.c
new file mode 100644
index 0000000..7fec2b6
--- /dev/null
+++ b/test/API/H5_api_test_util.c
@@ -0,0 +1,819 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include "H5_api_test.h"
+#include "H5_api_test_util.h"
+
+/*
+ * The maximum allowable size of a generated datatype.
+ *
+ * NOTE: HDF5 currently has limits on the maximum size of
+ * a datatype of an object, as this information is stored
+ * in the object header. In order to provide maximum
+ * compatibility between the native VOL connector and others
+ * for this test suite, we limit the size of a datatype here.
+ * This value should be adjusted as future HDF5 development
+ * allows.
+ */
+#define GENERATED_DATATYPE_MAX_SIZE 65536
+
+/*
+ * The maximum size of a datatype for compact objects that
+ * must fit within the size of a native HDF5 object header message.
+ * This is typically used for attributes and compact datasets.
+ */
+#define COMPACT_DATATYPE_MAX_SIZE 1024
+
+/* The maximum level of recursion that the generate_random_datatype()
+ * function should go down to, before being forced to choose a base type
+ * in order to not cause a stack overflow.
+ */
+#define TYPE_GEN_RECURSION_MAX_DEPTH 3
+
+/* The maximum number of members allowed in an HDF5 compound type, as
+ * generated by the generate_random_datatype() function, for ease of
+ * development.
+ */
+#define COMPOUND_TYPE_MAX_MEMBERS 4
+
+/* The maximum number and size of the dimensions of an HDF5 array
+ * datatype, as generated by the generate_random_datatype() function.
+ */
+#define ARRAY_TYPE_MAX_DIMS 4
+
+/* The maximum number of members and the maximum size of those
+ * members' names for an HDF5 enum type, as generated by the
+ * generate_random_datatype() function.
+ */
+#define ENUM_TYPE_MAX_MEMBER_NAME_LENGTH 256
+#define ENUM_TYPE_MAX_MEMBERS 16
+
+/* The maximum size of an HDF5 string datatype, as created by the
+ * generate_random_datatype() function.
+ */
+#define STRING_TYPE_MAX_SIZE 1024
+
+/*
+ * The maximum dimensionality and dimension size of a dataspace
+ * generated for an attribute or compact dataset.
+ */
+#define COMPACT_SPACE_MAX_DIM_SIZE 4
+#define COMPACT_SPACE_MAX_DIMS 3
+
+/*
+ * Helper function to generate a random HDF5 datatype in order to thoroughly
+ * test support for datatypes. The parent_class parameter is to support
+ * recursive generation of datatypes. In most cases, this function should be
+ * called with H5T_NO_CLASS for the parent_class parameter.
+ */
+/*
+ * XXX: limit size of datatype generated
+ */
+hid_t
+generate_random_datatype(H5T_class_t parent_class, hbool_t is_compact)
+{
+ static int depth = 0;
+ hsize_t *array_dims = NULL;
+ size_t i;
+ hid_t compound_members[COMPOUND_TYPE_MAX_MEMBERS];
+ hid_t datatype = H5I_INVALID_HID;
+
+ depth++;
+
+ for (i = 0; i < COMPOUND_TYPE_MAX_MEMBERS; i++)
+ compound_members[i] = H5I_INVALID_HID;
+
+ switch (rand() % H5T_NCLASSES) {
+case_integer:
+ case H5T_INTEGER: {
+ switch (rand() % 16) {
+ case 0:
+ if ((datatype = H5Tcopy(H5T_STD_I8BE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't copy predefined integer type\n");
+ goto done;
+ }
+
+ break;
+
+ case 1:
+ if ((datatype = H5Tcopy(H5T_STD_I8LE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't copy predefined integer type\n");
+ goto done;
+ }
+
+ break;
+
+ case 2:
+ if ((datatype = H5Tcopy(H5T_STD_I16BE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't copy predefined integer type\n");
+ goto done;
+ }
+
+ break;
+
+ case 3:
+ if ((datatype = H5Tcopy(H5T_STD_I16LE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't copy predefined integer type\n");
+ goto done;
+ }
+
+ break;
+
+ case 4:
+ if ((datatype = H5Tcopy(H5T_STD_I32BE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't copy predefined integer type\n");
+ goto done;
+ }
+
+ break;
+
+ case 5:
+ if ((datatype = H5Tcopy(H5T_STD_I32LE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't copy predefined integer type\n");
+ goto done;
+ }
+
+ break;
+
+ case 6:
+ if ((datatype = H5Tcopy(H5T_STD_I64BE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't copy predefined integer type\n");
+ goto done;
+ }
+
+ break;
+
+ case 7:
+ if ((datatype = H5Tcopy(H5T_STD_I64LE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't copy predefined integer type\n");
+ goto done;
+ }
+
+ break;
+
+ case 8:
+ if ((datatype = H5Tcopy(H5T_STD_U8BE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't copy predefined integer type\n");
+ goto done;
+ }
+
+ break;
+
+ case 9:
+ if ((datatype = H5Tcopy(H5T_STD_U8LE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't copy predefined integer type\n");
+ goto done;
+ }
+
+ break;
+
+ case 10:
+ if ((datatype = H5Tcopy(H5T_STD_U16BE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't copy predefined integer type\n");
+ goto done;
+ }
+
+ break;
+
+ case 11:
+ if ((datatype = H5Tcopy(H5T_STD_U16LE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't copy predefined integer type\n");
+ goto done;
+ }
+
+ break;
+
+ case 12:
+ if ((datatype = H5Tcopy(H5T_STD_U32BE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't copy predefined integer type\n");
+ goto done;
+ }
+
+ break;
+
+ case 13:
+ if ((datatype = H5Tcopy(H5T_STD_U32LE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't copy predefined integer type\n");
+ goto done;
+ }
+
+ break;
+
+ case 14:
+ if ((datatype = H5Tcopy(H5T_STD_U64BE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't copy predefined integer type\n");
+ goto done;
+ }
+
+ break;
+
+ case 15:
+ if ((datatype = H5Tcopy(H5T_STD_U64LE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't copy predefined integer type\n");
+ goto done;
+ }
+
+ break;
+
+ default:
+ H5_FAILED();
+ HDprintf(" invalid value for predefined integer type; should not happen\n");
+ goto done;
+ }
+
+ break;
+ }
+
+case_float:
+ case H5T_FLOAT: {
+ switch (rand() % 4) {
+ case 0:
+ if ((datatype = H5Tcopy(H5T_IEEE_F32BE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't copy predefined floating-point type\n");
+ goto done;
+ }
+
+ break;
+
+ case 1:
+ if ((datatype = H5Tcopy(H5T_IEEE_F32LE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't copy predefined floating-point type\n");
+ goto done;
+ }
+
+ break;
+
+ case 2:
+ if ((datatype = H5Tcopy(H5T_IEEE_F64BE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't copy predefined floating-point type\n");
+ goto done;
+ }
+
+ break;
+
+ case 3:
+ if ((datatype = H5Tcopy(H5T_IEEE_F64LE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't copy predefined floating-point type\n");
+ goto done;
+ }
+
+ break;
+
+ default:
+ H5_FAILED();
+ HDprintf(" invalid value for floating point type; should not happen\n");
+ goto done;
+ }
+
+ break;
+ }
+
+case_time:
+ case H5T_TIME: {
+ /* Time datatype is unsupported, try again */
+ goto reroll;
+ break;
+ }
+
+case_string:
+ case H5T_STRING: {
+ /* Note: currently only H5T_CSET_ASCII is supported for the character set and
+ * only H5T_STR_NULLTERM is supported for string padding for variable-length
+ * strings and only H5T_STR_NULLPAD is supported for string padding for
+ * fixed-length strings, but these may change in the future.
+ */
+ if (0 == (rand() % 2)) {
+ if ((datatype = H5Tcreate(H5T_STRING, (size_t)(rand() % STRING_TYPE_MAX_SIZE) + 1)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create fixed-length string datatype\n");
+ goto done;
+ }
+
+ if (H5Tset_strpad(datatype, H5T_STR_NULLPAD) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set H5T_STR_NULLPAD for fixed-length string type\n");
+ goto done;
+ }
+ }
+ else {
+ /*
+ * Currently, all VL datatypes are disabled.
+ */
+ goto reroll;
+
+#if 0
+ if ((datatype = H5Tcreate(H5T_STRING, H5T_VARIABLE)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create variable-length string datatype\n");
+ goto done;
+ }
+
+ if (H5Tset_strpad(datatype, H5T_STR_NULLTERM) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set H5T_STR_NULLTERM for variable-length string type\n");
+ goto done;
+ }
+#endif
+ }
+
+ if (H5Tset_cset(datatype, H5T_CSET_ASCII) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set string datatype character set\n");
+ goto done;
+ }
+
+ break;
+ }
+
+case_bitfield:
+ case H5T_BITFIELD: {
+ /* Bitfield datatype is unsupported, try again */
+ goto reroll;
+ break;
+ }
+
+case_opaque:
+ case H5T_OPAQUE: {
+ /* Opaque datatype is unsupported, try again */
+ goto reroll;
+ break;
+ }
+
+case_compound:
+ case H5T_COMPOUND: {
+ size_t num_members;
+ size_t next_offset = 0;
+ size_t compound_size = 0;
+
+ /* Currently only allows arrays of integer, float or string. Pick another type if we
+ * are creating an array of something other than these. Also don't allow recursion
+ * to go too deep. Pick another type that doesn't recursively call this function. */
+ if (H5T_ARRAY == parent_class || depth > TYPE_GEN_RECURSION_MAX_DEPTH)
+ goto reroll;
+
+ if ((datatype = H5Tcreate(H5T_COMPOUND, 1)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create compound datatype\n");
+ goto done;
+ }
+
+ num_members = (size_t)(rand() % COMPOUND_TYPE_MAX_MEMBERS + 1);
+
+ for (i = 0; i < num_members; i++) {
+ size_t member_size;
+ char member_name[256];
+
+ HDsnprintf(member_name, 256, "compound_member%zu", i);
+
+ if ((compound_members[i] = generate_random_datatype(H5T_NO_CLASS, is_compact)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create compound datatype member %zu\n", i);
+ goto done;
+ }
+
+ if (!(member_size = H5Tget_size(compound_members[i]))) {
+ H5_FAILED();
+ HDprintf(" couldn't get compound member %zu size\n", i);
+ goto done;
+ }
+
+ compound_size += member_size;
+
+ if (H5Tset_size(datatype, compound_size) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set size for compound datatype\n");
+ goto done;
+ }
+
+ if (H5Tinsert(datatype, member_name, next_offset, compound_members[i]) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't insert compound datatype member %zu\n", i);
+ goto done;
+ }
+
+ next_offset += member_size;
+ }
+
+ break;
+ }
+
+case_reference:
+ case H5T_REFERENCE: {
+ /* Temporarily disable generation of reference datatypes */
+ goto reroll;
+
+ /* Currently only allows arrays of integer, float or string. Pick another type if we
+ * are creating an array of something other than these. */
+ if (H5T_ARRAY == parent_class)
+ goto reroll;
+
+ if (0 == (rand() % 2)) {
+ if ((datatype = H5Tcopy(H5T_STD_REF_OBJ)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't copy object reference datatype\n");
+ goto done;
+ }
+ }
+ else {
+ /* Region references are currently unsupported */
+ goto reroll;
+
+ if ((datatype = H5Tcopy(H5T_STD_REF_DSETREG)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't copy region reference datatype\n");
+ goto done;
+ }
+ }
+
+ break;
+ }
+
+case_enum:
+ case H5T_ENUM: {
+ /* Currently doesn't currently support ARRAY of ENUM, so try another type
+ * if this happens. */
+ if (H5T_ARRAY == parent_class)
+ goto reroll;
+
+ if ((datatype = H5Tenum_create(H5T_NATIVE_INT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create enum datatype\n");
+ goto done;
+ }
+
+ for (i = 0; i < (size_t)(rand() % ENUM_TYPE_MAX_MEMBERS + 1); i++) {
+ char name[ENUM_TYPE_MAX_MEMBER_NAME_LENGTH];
+ int value = rand();
+
+ HDsnprintf(name, ENUM_TYPE_MAX_MEMBER_NAME_LENGTH, "enum_val%zu", i);
+
+ if (H5Tenum_insert(datatype, name, &value) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't insert member into enum datatype\n");
+ goto done;
+ }
+ }
+
+ break;
+ }
+
+case_vlen:
+ case H5T_VLEN: {
+ /* Variable-length datatypes are unsupported, try again */
+ goto reroll;
+ break;
+ }
+
+case_array:
+ case H5T_ARRAY: {
+ unsigned ndims;
+ hid_t base_datatype = H5I_INVALID_HID;
+
+ /* Currently doesn't currently support ARRAY of ARRAY, so try another type
+ * if this happens. Also check for too much recursion. */
+ if (H5T_ARRAY == parent_class || depth > TYPE_GEN_RECURSION_MAX_DEPTH)
+ goto reroll;
+
+ ndims = (unsigned)(rand() % ARRAY_TYPE_MAX_DIMS + 1);
+
+ if (NULL == (array_dims = (hsize_t *)HDmalloc(ndims * sizeof(*array_dims))))
+ goto done;
+
+ for (i = 0; i < ndims; i++)
+ array_dims[i] = (hsize_t)(rand() % MAX_DIM_SIZE + 1);
+
+ if ((base_datatype = generate_random_datatype(H5T_ARRAY, is_compact)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create array base datatype\n");
+ goto done;
+ }
+
+ if ((datatype = H5Tarray_create2(base_datatype, ndims, array_dims)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create array datatype\n");
+ goto done;
+ }
+
+ break;
+ }
+
+ default:
+ H5_FAILED();
+ HDprintf(" invalid datatype class\n");
+ break;
+ } /* end if */
+
+done:
+ if (depth > 0)
+ depth--;
+
+ if (datatype < 0) {
+ for (i = 0; i < COMPOUND_TYPE_MAX_MEMBERS; i++) {
+ if (compound_members[i] > 0 && H5Tclose(compound_members[i]) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't close compound member %zu\n", i);
+ }
+ }
+ }
+
+ if (array_dims) {
+ HDfree(array_dims);
+ array_dims = NULL;
+ }
+
+ if (is_compact && (depth == 0)) {
+ size_t type_size;
+
+ /*
+ * Check to make sure that the generated datatype does
+ * not exceed the maximum compact datatype size if a
+ * compact datatype was requested.
+ */
+ if (0 == (type_size = H5Tget_size(datatype))) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve datatype's size\n");
+ H5Tclose(datatype);
+ datatype = H5I_INVALID_HID;
+ }
+ else {
+ if (type_size > COMPACT_DATATYPE_MAX_SIZE) {
+ /*
+ * Generate a new datatype.
+ */
+ H5Tclose(datatype);
+ datatype = H5I_INVALID_HID;
+ goto reroll;
+ }
+ }
+ }
+
+ return datatype;
+
+reroll:
+ if (depth > 0)
+ depth--;
+
+ /*
+ * The datatype generation resulted in a datatype that is currently invalid
+ * for these tests, try again.
+ */
+ switch (rand() % H5T_NCLASSES) {
+ case H5T_INTEGER:
+ goto case_integer;
+ case H5T_FLOAT:
+ goto case_float;
+ case H5T_TIME:
+ goto case_time;
+ case H5T_STRING:
+ goto case_string;
+ case H5T_BITFIELD:
+ goto case_bitfield;
+ case H5T_OPAQUE:
+ goto case_opaque;
+ case H5T_COMPOUND:
+ goto case_compound;
+ case H5T_REFERENCE:
+ goto case_reference;
+ case H5T_ENUM:
+ goto case_enum;
+ case H5T_VLEN:
+ goto case_vlen;
+ case H5T_ARRAY:
+ goto case_array;
+ default:
+ H5_FAILED();
+ HDprintf(" invalid value for goto\n");
+ break;
+ }
+
+ return H5I_INVALID_HID;
+}
+
+/*
+ * Helper function to generate a random HDF5 dataspace in order to thoroughly
+ * test support for dataspaces.
+ */
+hid_t
+generate_random_dataspace(int rank, const hsize_t *max_dims, hsize_t *dims_out, hbool_t is_compact)
+{
+ hsize_t dataspace_dims[H5S_MAX_RANK];
+ size_t i;
+ hid_t dataspace_id = H5I_INVALID_HID;
+
+ if (rank < 0)
+ TEST_ERROR;
+ if (is_compact && (rank > COMPACT_SPACE_MAX_DIMS)) {
+ HDprintf(" current rank of compact dataspace (%lld) exceeds maximum dimensionality (%lld)\n",
+ (long long)rank, (long long)COMPACT_SPACE_MAX_DIMS);
+ TEST_ERROR;
+ }
+
+ /*
+ * XXX: if max_dims is specified, make sure that the dimensions generated
+ * are not larger than this.
+ */
+ for (i = 0; i < (size_t)rank; i++) {
+ if (is_compact)
+ dataspace_dims[i] = (hsize_t)(rand() % COMPACT_SPACE_MAX_DIM_SIZE + 1);
+ else
+ dataspace_dims[i] = (hsize_t)(rand() % MAX_DIM_SIZE + 1);
+
+ if (dims_out)
+ dims_out[i] = dataspace_dims[i];
+ }
+
+ if ((dataspace_id = H5Screate_simple(rank, dataspace_dims, max_dims)) < 0)
+ TEST_ERROR;
+
+ return dataspace_id;
+
+error:
+ return H5I_INVALID_HID;
+}
+
+int
+create_test_container(char *filename, uint64_t vol_cap_flags)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+
+ if (!(vol_cap_flags & H5VL_CAP_FLAG_FILE_BASIC)) {
+ HDprintf(" VOL connector doesn't support file creation\n");
+ goto error;
+ }
+
+ if ((file_id = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ HDprintf(" couldn't create testing container file '%s'\n", filename);
+ goto error;
+ }
+
+ if (vol_cap_flags & H5VL_CAP_FLAG_GROUP_BASIC) {
+ /* Create container groups for each of the test interfaces
+ * (group, attribute, dataset, etc.).
+ */
+ if ((group_id = H5Gcreate2(file_id, GROUP_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) >=
+ 0) {
+ HDprintf(" created container group for Group tests\n");
+ H5Gclose(group_id);
+ }
+
+ if ((group_id = H5Gcreate2(file_id, ATTRIBUTE_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) >= 0) {
+ HDprintf(" created container group for Attribute tests\n");
+ H5Gclose(group_id);
+ }
+
+ if ((group_id =
+ H5Gcreate2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) >= 0) {
+ HDprintf(" created container group for Dataset tests\n");
+ H5Gclose(group_id);
+ }
+
+ if ((group_id =
+ H5Gcreate2(file_id, DATATYPE_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) >= 0) {
+ HDprintf(" created container group for Datatype tests\n");
+ H5Gclose(group_id);
+ }
+
+ if ((group_id = H5Gcreate2(file_id, LINK_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) >=
+ 0) {
+ HDprintf(" created container group for Link tests\n");
+ H5Gclose(group_id);
+ }
+
+ if ((group_id = H5Gcreate2(file_id, OBJECT_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) >=
+ 0) {
+ HDprintf(" created container group for Object tests\n");
+ H5Gclose(group_id);
+ }
+
+ if ((group_id = H5Gcreate2(file_id, MISCELLANEOUS_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) >= 0) {
+ HDprintf(" created container group for Miscellaneous tests\n");
+ H5Gclose(group_id);
+ }
+ }
+
+ if (H5Fclose(file_id) < 0) {
+ HDprintf(" failed to close testing container\n");
+ goto error;
+ }
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return -1;
+}
+
+/*
+ * Add a prefix to the given filename. The caller
+ * is responsible for freeing the returned filename
+ * pointer with HDfree().
+ */
+herr_t
+prefix_filename(const char *prefix, const char *filename, char **filename_out)
+{
+ char *out_buf = NULL;
+ herr_t ret_value = SUCCEED;
+
+ if (!prefix) {
+ HDprintf(" invalid file prefix\n");
+ ret_value = FAIL;
+ goto done;
+ }
+ if (!filename || (*filename == '\0')) {
+ HDprintf(" invalid filename\n");
+ ret_value = FAIL;
+ goto done;
+ }
+ if (!filename_out) {
+ HDprintf(" invalid filename_out buffer\n");
+ ret_value = FAIL;
+ goto done;
+ }
+
+ if (NULL == (out_buf = HDmalloc(H5_API_TEST_FILENAME_MAX_LENGTH))) {
+ HDprintf(" couldn't allocated filename buffer\n");
+ ret_value = FAIL;
+ goto done;
+ }
+
+ HDsnprintf(out_buf, H5_API_TEST_FILENAME_MAX_LENGTH, "%s%s", prefix, filename);
+
+ *filename_out = out_buf;
+
+done:
+ return ret_value;
+}
+
+/*
+ * Calls H5Fdelete on the given filename. If a prefix string
+ * is given, adds that prefix string to the filename before
+ * calling H5Fdelete
+ */
+herr_t
+remove_test_file(const char *prefix, const char *filename)
+{
+ const char *test_file;
+ char *prefixed_filename = NULL;
+ herr_t ret_value = SUCCEED;
+
+ if (prefix) {
+ if (prefix_filename(prefix, filename, &prefixed_filename) < 0) {
+ HDprintf(" couldn't prefix filename\n");
+ ret_value = FAIL;
+ goto done;
+ }
+
+ test_file = prefixed_filename;
+ }
+ else
+ test_file = filename;
+
+ if (H5Fdelete(test_file, H5P_DEFAULT) < 0) {
+ HDprintf(" couldn't remove file '%s'\n", test_file);
+ ret_value = FAIL;
+ goto done;
+ }
+
+done:
+ HDfree(prefixed_filename);
+
+ return ret_value;
+}
diff --git a/test/API/H5_api_test_util.h b/test/API/H5_api_test_util.h
new file mode 100644
index 0000000..86b0e3e
--- /dev/null
+++ b/test/API/H5_api_test_util.h
@@ -0,0 +1,24 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifndef H5_API_TEST_UTIL_H_
+#define H5_API_TEST_UTIL_H_
+
+#include "hdf5.h"
+
+hid_t generate_random_datatype(H5T_class_t parent_class, hbool_t is_compact);
+hid_t generate_random_dataspace(int rank, const hsize_t *max_dims, hsize_t *dims_out, hbool_t is_compact);
+int create_test_container(char *filename, uint64_t vol_cap_flags);
+herr_t prefix_filename(const char *prefix, const char *filename, char **filename_out);
+herr_t remove_test_file(const char *prefix, const char *filename);
+
+#endif /* H5_API_TEST_UTIL_H_ */
diff --git a/test/API/H5_api_tests_disabled.h b/test/API/H5_api_tests_disabled.h
new file mode 100644
index 0000000..672d2d9
--- /dev/null
+++ b/test/API/H5_api_tests_disabled.h
@@ -0,0 +1,46 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifndef H5_API_TESTS_DISABLED_H
+#define H5_API_TESTS_DISABLED_H
+
+#include "H5_api_test_config.h"
+
+/* Contains #defines to temporarily disable API tests based
+ * on problematic or unsupported functionality */
+
+#define NO_LARGE_TESTS
+#define NO_ATTR_FILL_VALUE_SUPPORT
+#define NO_DECREASING_ALPHA_ITER_ORDER
+#define NO_USER_DEFINED_LINKS
+#define NO_EXTERNAL_LINKS
+#define NO_ITERATION_RESTART
+#define NO_FILE_MOUNTS
+#define NO_CLEAR_ON_SHRINK
+#define NO_DOUBLE_OBJECT_OPENS
+#define NO_OBJECT_GET_NAME
+#define WRONG_DATATYPE_OBJ_COUNT
+#define NO_SHARED_DATATYPES
+#define NO_INVALID_PROPERTY_LIST_TESTS
+#define NO_MAX_LINK_CRT_ORDER_RESET
+#define NO_PREVENT_HARD_LINKS_ACROSS_FILES
+#define NO_SOFT_LINK_MANY_DANGLING
+#define NO_ID_PREVENTS_OBJ_DELETE
+#define NO_WRITE_SAME_ELEMENT_TWICE
+#define NO_PREVENT_CREATE_SAME_ATTRIBUTE_TWICE
+#define NO_DELETE_NONEXISTENT_ATTRIBUTE
+#define NO_TRUNCATE_OPEN_FILE
+#define NO_CHECK_SELECTION_BOUNDS
+#define NO_VALIDATE_DATASPACE
+#define NO_REFERENCE_TO_DELETED
+
+#endif /* H5_API_TESTS_DISABLED_H */
diff --git a/test/API/README.md b/test/API/README.md
new file mode 100644
index 0000000..aec6eaa
--- /dev/null
+++ b/test/API/README.md
@@ -0,0 +1,87 @@
+# HDF5 API Tests
+
+This directory contains several test applications that exercise HDF5's
+public API and serve as regression tests for HDF5 [VOL Connectors](https://portal.hdfgroup.org/display/HDF5/Virtual+Object+Layer).
+
+## Build Process and options
+
+These HDF5 API tests are disabled by default, but can be enabled by passing the
+`-DHDF5_TEST_API=ON` option to CMake. The following build options are available
+to influence how the API tests get built:
+
+### CMake
+
+To set an option, it should be prepended with `-D` when passed to the `cmake` command.
+For example,
+
+ cmake -DHDF5_TEST_API=OFF ..
+
+`HDF5_TEST_API` (Default: `ON`) - Determines whether the API tests will be built.
+
+`HDF5_TEST_API_INSTALL` (Default: `ON`) - Determines whether the API tests should be installed
+on the system.
+
+`HDF5_TEST_API_ENABLE_ASYNC` (Default: `OFF`) - Determines whether tests for HDF5's asynchronous
+I/O capabilities should be enabled. Note that the "native" HDF5 VOL connector doesn't support
+this functionality, so these tests are directed towards VOL connectors that do.
+
+`HDF5_TEST_ENABLE_DRIVER` (Default: `OFF`) - Determines whether the API test driver program should
+be built. This driver program is useful when a VOL connector relies upon a server executable
+(as well as possible additional executables) in order to function. The driver program can be
+supplied with a server executable and
+
+`HDF5_TEST_API_SERVER` (Default: empty string) - If `HDF5_TEST_ENABLE_DRIVER` is set to `ON`, this
+option should be edited to point to the server executable that the driver program should attempt
+to launch before running the API tests.
+
+### Autotools
+
+Currently unsupported
+
+### Usage
+
+These API tests currently only support usage with the native HDF5 VOL connector and HDF5 VOL
+connectors that can be loaded dynamically as a plugin. For information on how to build a VOL
+connector in this manner, refer to section 2.3 of the [HDF5 VOL Connector Author Guide](https://portal.hdfgroup.org/display/HDF5/HDF5+VOL+Connector+Authors+Guide?preview=/53610813/59903039/vol_connector_author_guide.pdf).
+
+TODO: section on building VOL connectors alongside HDF5 for use with tests
+
+These API tests can also be used to test an HDF5 VOL connector that is external to the library.
+For convenience, the `HDF5_TEST_API_INSTALL` option can be used to install these tests on the
+system where other HDF5 executables (such as `h5dump`) are installed.
+
+To run these tests with your VOL connector, set the following two environment variables:
+
+`HDF5_VOL_CONNECTOR` - This environment variable should be set to the name chosen for the VOL connector
+to be used. For example, HDF5's DAOS VOL connector uses the name "[daos](https://github.com/HDFGroup/vol-daos/blob/v1.2.0/src/daos_vol.h#L30)" and would therefore set:
+
+ HDF5_VOL_CONNECTOR=daos
+
+`HDF5_PLUGIN_PATH` - This environment variable should be set to the directory that contains the built
+library for the VOL connector to be used.
+
+Once these are set, the HDF5 API tests will attempt to automatically load the specified VOL connector
+and use it when running tests. If HDF5 is unable to locate or load the VOL connector specified, it
+will fall back to running the tests with the native HDF5 VOL connector and an error similar to the
+following will appear in the test output:
+
+ HDF5-DIAG: Error detected in HDF5 (X.XX.X) MPI-process 0:
+ #000: /home/user/git/hdf5/src/H5.c line 1010 in H5open(): library initialization failed
+ major: Function entry/exit
+ minor: Unable to initialize object
+ #001: /home/user/git/hdf5/src/H5.c line 277 in H5_init_library(): unable to initialize vol interface
+ major: Function entry/exit
+ minor: Unable to initialize object
+ #002: /home/user/git/hdf5/src/H5VLint.c line 199 in H5VL_init_phase2(): unable to set default VOL connector
+ major: Virtual Object Layer
+ minor: Can't set value
+ #003: /home/user/git/hdf5/src/H5VLint.c line 429 in H5VL__set_def_conn(): can't register connector
+ major: Virtual Object Layer
+ minor: Unable to register new ID
+ #004: /home/user/git/hdf5/src/H5VLint.c line 1321 in H5VL__register_connector_by_name(): unable to load VOL connector
+ major: Virtual Object Layer
+ minor: Unable to initialize object
+
+### Help and Support
+
+For help with building or using the HDF5 API tests, please contact the [HDF Help Desk](https://portal.hdfgroup.org/display/support/The+HDF+Help+Desk).
diff --git a/test/API/driver/CMakeLists.txt b/test/API/driver/CMakeLists.txt
new file mode 100644
index 0000000..23ba053
--- /dev/null
+++ b/test/API/driver/CMakeLists.txt
@@ -0,0 +1,34 @@
+cmake_minimum_required (VERSION 3.18)
+project(H5_API_TEST_DRIVER CXX)
+
+if (NOT KWSYS_USE_LOCALCONTENT)
+ set (KWSYS_URL ${KWSYS_TGZ_ORIGPATH}/${KWSYS_TGZ_ORIGNAME})
+else ()
+ set (KWSYS_URL ${TGZPATH}/${KWSYS_TGZ_ORIGNAME})
+endif ()
+# Only tgz files
+FetchContent_Declare (KWSYS
+ URL ${KWSYS_URL}
+ URL_HASH ""
+)
+FetchContent_GetProperties(KWSYS)
+if(NOT kwsys_POPULATED)
+ FetchContent_Populate(KWSYS)
+
+ # Copy an additional/replacement files into the populated source
+ #file(COPY ${HDF_RESOURCES_DIR}/KWSYS/CMakeLists.txt DESTINATION ${hdf5_kwsys_SOURCE_DIR})
+
+ set(CMAKE_CXX_STANDARD 11)
+
+ set(KWSYS_NAMESPACE h5_api_test_sys)
+ set(KWSYS_USE_SystemTools 1)
+ set(KWSYS_USE_Process 1)
+ set(KWSYS_USE_RegularExpression 1)
+
+ add_subdirectory(${hdf5_kwsysb_SOURCE_DIR} ${hdf5_kwsys_BINARY_DIR})
+endif()
+
+include_directories(${hdf5_kwsys_BINARY_DIR})
+
+add_executable(h5_api_test_driver h5_api_test_driver.cxx)
+target_link_libraries(h5_api_test_driver h5_api_test_sys)
diff --git a/test/API/driver/h5_api_test_driver.cxx b/test/API/driver/h5_api_test_driver.cxx
new file mode 100644
index 0000000..b5d9821
--- /dev/null
+++ b/test/API/driver/h5_api_test_driver.cxx
@@ -0,0 +1,910 @@
+#include "h5_api_test_driver.hxx"
+
+#include "H5_api_test_config.h"
+
+#include <cstdio>
+#include <sstream>
+#include <iostream>
+#include <cstring>
+#include <cstdlib>
+
+#if !defined(_WIN32) || defined(__CYGWIN__)
+# include <unistd.h>
+# include <sys/wait.h>
+#endif
+
+#include <h5_api_test_sys/RegularExpression.hxx>
+#include <h5_api_test_sys/SystemTools.hxx>
+
+using std::vector;
+using std::string;
+using std::cerr;
+
+// The main function as this class should only be used by this program
+int
+main(int argc, char *argv[])
+{
+ H5APITestDriver d;
+ return d.Main(argc, argv);
+}
+
+//----------------------------------------------------------------------------
+H5APITestDriver::H5APITestDriver()
+{
+ this->ClientArgStart = 0;
+ this->ClientArgCount = 0;
+ this->ClientHelperArgStart = 0;
+ this->ClientHelperArgCount = 0;
+ this->ClientInitArgStart = 0;
+ this->ClientInitArgCount = 0;
+ this->ServerArgStart = 0;
+ this->ServerArgCount = 0;
+ this->AllowErrorInOutput = false;
+ // try to make sure that this times out before dart so it can kill all the processes
+ this->TimeOut = DART_TESTING_TIMEOUT - 10.0;
+ this->ServerExitTimeOut = 2; /* 2 seconds timeout for server to exit */
+ this->ClientHelper = false;
+ this->ClientInit = false;
+ this->TestServer = false;
+ this->TestSerial = false;
+ this->IgnoreServerResult = false;
+}
+
+//----------------------------------------------------------------------------
+H5APITestDriver::~H5APITestDriver()
+{
+}
+
+//----------------------------------------------------------------------------
+void
+H5APITestDriver::SeparateArguments(const char *str, vector<string> &flags)
+{
+ string arg = str;
+ string::size_type pos1 = 0;
+ string::size_type pos2 = arg.find_first_of(" ;");
+ if (pos2 == arg.npos) {
+ flags.push_back(str);
+ return;
+ }
+ while (pos2 != arg.npos) {
+ flags.push_back(arg.substr(pos1, pos2 - pos1));
+ pos1 = pos2 + 1;
+ pos2 = arg.find_first_of(" ;", pos1 + 1);
+ }
+ flags.push_back(arg.substr(pos1, pos2 - pos1));
+}
+
+//----------------------------------------------------------------------------
+void
+H5APITestDriver::CollectConfiguredOptions()
+{
+ if (this->TimeOut < 0)
+ this->TimeOut = 1500;
+
+#ifdef H5_API_TEST_ENV_VARS
+ this->SeparateArguments(H5_API_TEST_ENV_VARS, this->ClientEnvVars);
+#endif
+
+ // now find all the mpi information if mpi run is set
+#ifdef MPIEXEC_EXECUTABLE
+ this->MPIRun = MPIEXEC_EXECUTABLE;
+#else
+ return;
+#endif
+ int maxNumProc = 1;
+
+# ifdef MPIEXEC_MAX_NUMPROCS
+ if (!this->TestSerial)
+ maxNumProc = MPIEXEC_MAX_NUMPROCS;
+# endif
+# ifdef MPIEXEC_NUMPROC_FLAG
+ this->MPINumProcessFlag = MPIEXEC_NUMPROC_FLAG;
+# endif
+# ifdef MPIEXEC_PREFLAGS
+ this->SeparateArguments(MPIEXEC_PREFLAGS, this->MPIClientPreFlags);
+# endif
+# ifdef MPIEXEC_POSTFLAGS
+ this->SeparateArguments(MPIEXEC_POSTFLAGS, this->MPIClientPostFlags);
+# endif
+# ifdef MPIEXEC_SERVER_PREFLAGS
+ this->SeparateArguments(MPIEXEC_SERVER_PREFLAGS, this->MPIServerPreFlags);
+#else
+ this->MPIServerPreFlags = this->MPIClientPreFlags;
+# endif
+# ifdef MPIEXEC_SERVER_POSTFLAGS
+ this->SeparateArguments(MPIEXEC_SERVER_POSTFLAGS, this->MPIServerPostFlags);
+#else
+ this->MPIServerPostFlags = this->MPIClientPostFlags;
+# endif
+ std::stringstream ss;
+ ss << maxNumProc;
+ this->MPIServerNumProcessFlag = "1";
+ this->MPIClientNumProcessFlag = ss.str();
+}
+
+//----------------------------------------------------------------------------
+/// This adds the debug/build configuration crap for the executable on windows.
+static string
+FixExecutablePath(const string &path)
+{
+#ifdef CMAKE_INTDIR
+ string parent_dir =
+ h5_api_test_sys::SystemTools::GetFilenamePath(path.c_str());
+
+ string filename =
+ h5_api_test_sys::SystemTools::GetFilenameName(path);
+
+ if (!h5_api_test_sys::SystemTools::StringEndsWith(parent_dir.c_str(), CMAKE_INTDIR)) {
+ parent_dir += "/" CMAKE_INTDIR;
+ }
+ return parent_dir + "/" + filename;
+#endif
+
+ return path;
+}
+
+//----------------------------------------------------------------------------
+int
+H5APITestDriver::ProcessCommandLine(int argc, char *argv[])
+{
+ int *ArgCountP = NULL;
+ int i;
+ for (i = 1; i < argc; ++i) {
+ if (strcmp(argv[i], "--client") == 0) {
+ this->ClientExecutable = ::FixExecutablePath(argv[i + 1]);
+ ++i; /* Skip executable */
+ this->ClientArgStart = i + 1;
+ this->ClientArgCount = this->ClientArgStart;
+ ArgCountP = &this->ClientArgCount;
+ continue;
+ }
+ if (strcmp(argv[i], "--client-helper") == 0) {
+ std::cerr << "Client Helper" << std::endl;
+ this->ClientHelper = true;
+ this->ClientHelperExecutable = ::FixExecutablePath(argv[i + 1]);
+ ++i; /* Skip executable */
+ this->ClientHelperArgStart = i + 1;
+ this->ClientHelperArgCount = this->ClientHelperArgStart;
+ ArgCountP = &this->ClientHelperArgCount;
+ continue;
+ }
+ if (strcmp(argv[i], "--client-init") == 0) {
+ std::cerr << "Client Init" << std::endl;
+ this->ClientInit = true;
+ this->ClientInitExecutable = ::FixExecutablePath(argv[i + 1]);
+ ++i; /* Skip executable */
+ this->ClientInitArgStart = i + 1;
+ this->ClientInitArgCount = this->ClientInitArgStart;
+ ArgCountP = &this->ClientInitArgCount;
+ continue;
+ }
+ if (strcmp(argv[i], "--server") == 0) {
+ std::cerr << "Test Server" << std::endl;
+ this->TestServer = true;
+ this->ServerExecutable = ::FixExecutablePath(argv[i + 1]);
+ ++i; /* Skip executable */
+ this->ServerArgStart = i + 1;
+ this->ServerArgCount = this->ServerArgStart;
+ ArgCountP = &this->ServerArgCount;
+ continue;
+ }
+ if (strcmp(argv[i], "--timeout") == 0) {
+ this->TimeOut = atoi(argv[i + 1]);
+ std::cerr << "The timeout was set to " << this->TimeOut << std::endl;
+ ArgCountP = NULL;
+ continue;
+ }
+ if (strncmp(argv[i], "--allow-errors", strlen("--allow-errors")) == 0) {
+ this->AllowErrorInOutput = true;
+ std::cerr << "The allow errors in output flag was set to " <<
+ this->AllowErrorInOutput << std::endl;
+ ArgCountP = NULL;
+ continue;
+ }
+ if (strncmp(argv[i], "--allow-server-errors", strlen("--allow-server-errors")) == 0) {
+ this->IgnoreServerResult = true;
+ std::cerr << "The allow server errors in output flag was set to " <<
+ this->IgnoreServerResult << std::endl;
+ ArgCountP = NULL;
+ continue;
+ }
+ if (strcmp(argv[i], "--serial") == 0) {
+ this->TestSerial = true;
+ std::cerr << "This is a serial test" << std::endl;
+ ArgCountP = NULL;
+ continue;
+ }
+ if (ArgCountP)
+ (*ArgCountP)++;
+ }
+
+ return 1;
+}
+
+//----------------------------------------------------------------------------
+void
+H5APITestDriver::CreateCommandLine(vector<const char*> &commandLine,
+ const char *cmd, int isServer, int isHelper, const char *numProc, int argStart,
+ int argCount, char *argv[])
+{
+ if (!isServer && this->ClientEnvVars.size()) {
+ for (unsigned int i = 0; i < this->ClientEnvVars.size(); ++i)
+ commandLine.push_back(this->ClientEnvVars[i].c_str());
+#ifdef H5_API_TEST_CLIENT_INIT_TOKEN_VAR
+ if (this->ClientTokenVar.size())
+ commandLine.push_back(this->ClientTokenVar.c_str());
+#endif
+ }
+
+ if (!isHelper && this->MPIRun.size()) {
+ commandLine.push_back(this->MPIRun.c_str());
+ commandLine.push_back(this->MPINumProcessFlag.c_str());
+ commandLine.push_back(numProc);
+
+ if (isServer)
+ for (unsigned int i = 0; i < this->MPIServerPreFlags.size(); ++i)
+ commandLine.push_back(this->MPIServerPreFlags[i].c_str());
+ else
+ for (unsigned int i = 0; i < this->MPIClientPreFlags.size(); ++i)
+ commandLine.push_back(this->MPIClientPreFlags[i].c_str());
+ }
+
+ commandLine.push_back(cmd);
+
+ if (isServer)
+ for (unsigned int i = 0; i < this->MPIServerPostFlags.size(); ++i)
+ commandLine.push_back(MPIServerPostFlags[i].c_str());
+ else
+ for (unsigned int i = 0; i < this->MPIClientPostFlags.size(); ++i)
+ commandLine.push_back(MPIClientPostFlags[i].c_str());
+
+ // remaining flags for the test
+ for (int ii = argStart; ii < argCount; ++ii) {
+ commandLine.push_back(argv[ii]);
+ }
+
+ commandLine.push_back(0);
+}
+
+//----------------------------------------------------------------------------
+int
+H5APITestDriver::StartServer(h5_api_test_sysProcess *server, const char *name,
+ vector<char> &out, vector<char> &err)
+{
+ if (!server)
+ return 1;
+
+ cerr << "H5APITestDriver: starting process " << name << "\n";
+ h5_api_test_sysProcess_SetTimeout(server, this->TimeOut);
+ h5_api_test_sysProcess_Execute(server);
+ int foundWaiting = 0;
+ string output;
+ while (!foundWaiting) {
+ int pipe = this->WaitForAndPrintLine(name, server, output, 100.0, out,
+ err, H5_API_TEST_SERVER_START_MSG, &foundWaiting);
+ if (pipe == h5_api_test_sysProcess_Pipe_None
+ || pipe == h5_api_test_sysProcess_Pipe_Timeout) {
+ break;
+ }
+ }
+ if (foundWaiting) {
+ cerr << "H5APITestDriver: " << name << " successfully started.\n";
+ return 1;
+ } else {
+ cerr << "H5APITestDriver: " << name << " never started.\n";
+ h5_api_test_sysProcess_Kill(server);
+ return 0;
+ }
+}
+
+//----------------------------------------------------------------------------
+int
+H5APITestDriver::StartClientHelper(h5_api_test_sysProcess *client,
+ const char *name, vector<char> &out, vector<char> &err)
+{
+ if (!client)
+ return 1;
+
+ cerr << "H5APITestDriver: starting process " << name << "\n";
+ h5_api_test_sysProcess_SetTimeout(client, this->TimeOut);
+ h5_api_test_sysProcess_Execute(client);
+ int foundWaiting = 0;
+ string output;
+ while (!foundWaiting) {
+ int pipe = this->WaitForAndPrintLine(name, client, output, 100.0, out,
+ err, H5_API_TEST_CLIENT_HELPER_START_MSG, &foundWaiting);
+ if (pipe == h5_api_test_sysProcess_Pipe_None
+ || pipe == h5_api_test_sysProcess_Pipe_Timeout) {
+ break;
+ }
+ }
+ if (foundWaiting) {
+ cerr << "H5APITestDriver: " << name << " successfully started.\n";
+ return 1;
+ } else {
+ cerr << "H5APITestDriver: " << name << " never started.\n";
+ h5_api_test_sysProcess_Kill(client);
+ return 0;
+ }
+}
+
+//----------------------------------------------------------------------------
+int
+H5APITestDriver::StartClientInit(h5_api_test_sysProcess *client,
+ const char *name, vector<char> &out, vector<char> &err)
+{
+ if (!client)
+ return 1;
+
+ cerr << "H5APITestDriver: starting process " << name << "\n";
+ h5_api_test_sysProcess_SetTimeout(client, this->TimeOut);
+ h5_api_test_sysProcess_Execute(client);
+ int foundToken = 0;
+ string output, token;
+ while (!foundToken) {
+ int pipe = this->WaitForAndPrintLine(name, client, output, 100.0, out,
+ err, NULL, NULL);
+ if (pipe == h5_api_test_sysProcess_Pipe_None
+ || pipe == h5_api_test_sysProcess_Pipe_Timeout) {
+ break;
+ }
+ if (this->OutputStringHasToken(name, H5_API_TEST_CLIENT_INIT_TOKEN_REGEX, output, token)) {
+ foundToken = 1;
+ this->ClientTokenVar = std::string(H5_API_TEST_CLIENT_INIT_TOKEN_VAR)
+ + std::string("=") + std::string(token);
+ break;
+ }
+ }
+
+ if (foundToken) {
+ cerr << "H5APITestDriver: " << name << " token: " << token << " was found.\n";
+ return 1;
+ } else {
+ cerr << "H5APITestDriver: " << name << " token was not found.\n";
+ return 0;
+ }
+}
+
+//----------------------------------------------------------------------------
+int
+H5APITestDriver::StartClient(h5_api_test_sysProcess *client, const char *name)
+{
+ if (!client)
+ return 1;
+
+ cerr << "H5APITestDriver: starting process " << name << "\n";
+ h5_api_test_sysProcess_SetTimeout(client, this->TimeOut);
+ h5_api_test_sysProcess_Execute(client);
+ if (h5_api_test_sysProcess_GetState(client)
+ == h5_api_test_sysProcess_State_Executing) {
+ cerr << "H5APITestDriver: " << name << " successfully started.\n";
+ return 1;
+ } else {
+ this->ReportStatus(client, name);
+ h5_api_test_sysProcess_Kill(client);
+ return 0;
+ }
+}
+
+//----------------------------------------------------------------------------
+void
+H5APITestDriver::Stop(h5_api_test_sysProcess *p, const char *name)
+{
+ if (p) {
+ cerr << "H5APITestDriver: killing process " << name << "\n";
+ h5_api_test_sysProcess_Kill(p);
+ h5_api_test_sysProcess_WaitForExit(p, 0);
+ }
+}
+
+//----------------------------------------------------------------------------
+int
+H5APITestDriver::OutputStringHasError(const char *pname, string &output)
+{
+ const char* possibleMPIErrors[] = {"error", "Error", "Missing:",
+ "core dumped", "process in local group is dead", "Segmentation fault",
+ "erroneous", "ERROR:", "Error:",
+ "mpirun can *only* be used with MPI programs", "due to signal",
+ "failure", "abnormal termination", "failed", "FAILED", "Failed", 0};
+
+ const char* nonErrors[] = {
+ "Memcheck, a memory error detector", //valgrind
+ 0};
+
+ if (this->AllowErrorInOutput)
+ return 0;
+
+ vector<string> lines;
+ vector<string>::iterator it;
+ h5_api_test_sys::SystemTools::Split(output.c_str(), lines);
+
+ int i, j;
+
+ for (it = lines.begin(); it != lines.end(); ++it) {
+ for (i = 0; possibleMPIErrors[i]; ++i) {
+ if (it->find(possibleMPIErrors[i]) != it->npos) {
+ int found = 1;
+ for (j = 0; nonErrors[j]; ++j) {
+ if (it->find(nonErrors[j]) != it->npos) {
+ found = 0;
+ cerr << "Non error \"" << it->c_str()
+ << "\" suppressed " << std::endl;
+ }
+ }
+ if (found) {
+ cerr
+ << "H5APITestDriver: ***** Test will fail, because the string: \""
+ << possibleMPIErrors[i]
+ << "\"\nH5APITestDriver: ***** was found in the following output from the "
+ << pname << ":\n\"" << it->c_str() << "\"\n";
+ return 1;
+ }
+ }
+ }
+ }
+ return 0;
+}
+
+//----------------------------------------------------------------------------
+int
+H5APITestDriver::OutputStringHasToken(const char *pname, const char *regex,
+ string &output, string &token)
+{
+ vector<string> lines;
+ vector<string>::iterator it;
+ h5_api_test_sys::SystemTools::Split(output.c_str(), lines);
+ h5_api_test_sys::RegularExpression re(regex);
+
+ for (it = lines.begin(); it != lines.end(); ++it) {
+ if (re.find(*it)) {
+ token = re.match(1);
+ return 1;
+ }
+ }
+
+ return 0;
+}
+
+//----------------------------------------------------------------------------
+#define H5_API_CLEAN_PROCESSES do { \
+ h5_api_test_sysProcess_Delete(client); \
+ h5_api_test_sysProcess_Delete(client_helper); \
+ h5_api_test_sysProcess_Delete(client_init); \
+ h5_api_test_sysProcess_Delete(server); \
+} while (0)
+
+#define H5_API_EXECUTE_CMD(cmd) do { \
+ if (strlen(cmd) > 0) { \
+ std::vector<std::string> commands = \
+ h5_api_test_sys::SystemTools::SplitString(cmd, ';'); \
+ for (unsigned int cc = 0; cc < commands.size(); cc++) { \
+ std::string command = commands[cc]; \
+ if (command.size() > 0) { \
+ std::cout << command.c_str() << std::endl; \
+ system(command.c_str()); \
+ } \
+ } \
+ } \
+} while (0)
+
+//----------------------------------------------------------------------------
+int
+H5APITestDriver::Main(int argc, char* argv[])
+{
+#ifdef H5_API_TEST_INIT_COMMAND
+ // run user-specified commands before initialization.
+ // For example: "killall -9 rsh test;"
+ H5_API_EXECUTE_CMD(H5_API_TEST_INIT_COMMAND);
+#endif
+
+ if (!this->ProcessCommandLine(argc, argv))
+ return 1;
+ this->CollectConfiguredOptions();
+
+ // mpi code
+ // Allocate process managers.
+ h5_api_test_sysProcess *server = 0;
+ h5_api_test_sysProcess *client = 0;
+ h5_api_test_sysProcess *client_helper = 0;
+ h5_api_test_sysProcess *client_init = 0;
+
+ if (this->TestServer) {
+ server = h5_api_test_sysProcess_New();
+ if (!server) {
+ H5_API_CLEAN_PROCESSES;
+ cerr << "H5APITestDriver: Cannot allocate h5_api_test_sysProcess to "
+ "run the server.\n";
+ return 1;
+ }
+ }
+ if (this->ClientHelper) {
+ client_helper = h5_api_test_sysProcess_New();
+ if (!client_helper) {
+ H5API_CLEAN_PROCESSES;
+ cerr << "H5APITestDriver: Cannot allocate h5_api_test_sysProcess to "
+ "run the client helper.\n";
+ return 1;
+ }
+ }
+ if (this->ClientInit) {
+ client_init = h5_api_test_sysProcess_New();
+ if (!client_init) {
+ H5_API_CLEAN_PROCESSES;
+ cerr << "H5APITestDriver: Cannot allocate h5_api_test_sysProcess to "
+ "run the client init.\n";
+ return 1;
+ }
+ }
+ client = h5_api_test_sysProcess_New();
+ if (!client) {
+ H5_API_CLEAN_PROCESSES;
+ cerr << "H5APITestDriver: Cannot allocate h5_api_test_sysProcess to "
+ "run the client.\n";
+ return 1;
+ }
+
+ vector<char> ClientStdOut;
+ vector<char> ClientStdErr;
+ vector<char> ClientHelperStdOut;
+ vector<char> ClientHelperStdErr;
+ vector<char> ClientInitStdOut;
+ vector<char> ClientInitStdErr;
+ vector<char> ServerStdOut;
+ vector<char> ServerStdErr;
+
+ vector<const char *> serverCommand;
+ if (server) {
+ const char* serverExe = this->ServerExecutable.c_str();
+
+ this->CreateCommandLine(serverCommand, serverExe, 1, 0,
+ this->MPIServerNumProcessFlag.c_str(), this->ServerArgStart,
+ this->ServerArgCount, argv);
+ this->ReportCommand(&serverCommand[0], "server");
+ h5_api_test_sysProcess_SetCommand(server, &serverCommand[0]);
+ h5_api_test_sysProcess_SetWorkingDirectory(server,
+ this->GetDirectory(serverExe).c_str());
+ }
+
+ vector<const char *> clientHelperCommand;
+ if (client_helper) {
+ // Construct the client helper process command line.
+ const char *clientHelperExe = this->ClientHelperExecutable.c_str();
+ this->CreateCommandLine(clientHelperCommand, clientHelperExe, 0, 1,
+ "1", this->ClientHelperArgStart,
+ this->ClientHelperArgCount, argv);
+ this->ReportCommand(&clientHelperCommand[0], "client_helper");
+ h5_api_test_sysProcess_SetCommand(client_helper, &clientHelperCommand[0]);
+ h5_api_test_sysProcess_SetWorkingDirectory(client_helper,
+ this->GetDirectory(clientHelperExe).c_str());
+ }
+
+ vector<const char *> clientInitCommand;
+ if (client_init) {
+ // Construct the client helper process command line.
+ const char *clientInitExe = this->ClientInitExecutable.c_str();
+ this->CreateCommandLine(clientInitCommand, clientInitExe, 0, 1,
+ "1", this->ClientInitArgStart, this->ClientInitArgCount, argv);
+ this->ReportCommand(&clientInitCommand[0], "client_init");
+ h5_api_test_sysProcess_SetCommand(client_init, &clientInitCommand[0]);
+ h5_api_test_sysProcess_SetWorkingDirectory(client_init,
+ this->GetDirectory(clientInitExe).c_str());
+ }
+
+ // Start the server if there is one
+ if (!this->StartServer(server, "server", ServerStdOut, ServerStdErr)) {
+ cerr << "H5APITestDriver: Server never started.\n";
+ H5_API_CLEAN_PROCESSES;
+ return -1;
+ }
+
+ // Start the client helper here if there is one
+ if (!this->StartClientHelper(client_helper, "client_helper",
+ ClientHelperStdOut, ClientHelperStdErr)) {
+ cerr << "H5APITestDriver: Client Helper never started.\n";
+ this->Stop(server, "server");
+#ifdef H5_API_TEST_SERVER_EXIT_COMMAND
+ H5_API_EXECUTE_CMD(H5_API_TEST_SERVER_EXIT_COMMAND);
+#endif
+ H5_API_CLEAN_PROCESSES;
+ return -1;
+ }
+
+ // Start the client init here if there is one
+ if (!this->StartClientInit(client_init, "client_init",
+ ClientInitStdOut, ClientInitStdErr)) {
+ cerr << "H5APITestDriver: Client Init never started.\n";
+ this->Stop(server, "server");
+#ifdef H5_API_TEST_SERVER_EXIT_COMMAND
+ H5_API_EXECUTE_CMD(H5_API_TEST_SERVER_EXIT_COMMAND);
+#endif
+ this->Stop(client_helper, "client_helper");
+#ifdef H5_API_TEST_CLIENT_HELPER_EXIT_COMMAND
+ H5_API_EXECUTE_CMD(H5_API_TEST_CLIENT_HELPER_EXIT_COMMAND);
+#endif
+ H5_API_CLEAN_PROCESSES;
+ return -1;
+ }
+
+ // Construct the client process command line.
+ vector<const char *> clientCommand;
+ const char *clientExe = this->ClientExecutable.c_str();
+ this->CreateCommandLine(clientCommand, clientExe, 0, 0,
+ this->MPIClientNumProcessFlag.c_str(), this->ClientArgStart,
+ this->ClientArgCount, argv);
+ this->ReportCommand(&clientCommand[0], "client");
+ h5_api_test_sysProcess_SetCommand(client, &clientCommand[0]);
+ h5_api_test_sysProcess_SetWorkingDirectory(client,
+ this->GetDirectory(clientExe).c_str());
+
+ // Now run the client
+ if (!this->StartClient(client, "client")) {
+ this->Stop(server, "server");
+ this->Stop(client_helper, "client_helper");
+ this->Stop(client_init, "client_init");
+ H5_API_CLEAN_PROCESSES;
+ return -1;
+ }
+
+ // Report the output of the processes.
+ int clientPipe = 1;
+
+ string output;
+ int mpiError = 0;
+ while (clientPipe) {
+ clientPipe = this->WaitForAndPrintLine("client", client, output, 0.1,
+ ClientStdOut, ClientStdErr, NULL, NULL);
+ if (!mpiError && this->OutputStringHasError("client", output)) {
+ mpiError = 1;
+ }
+ // If client has died, we wait for output from the server processes
+ // for this->ServerExitTimeOut, then we'll kill the servers, if needed.
+ double timeout = (clientPipe) ? 0 : this->ServerExitTimeOut;
+ output = "";
+ this->WaitForAndPrintLine("server", server, output, timeout,
+ ServerStdOut, ServerStdErr, NULL, NULL);
+ if (!mpiError && this->OutputStringHasError("server", output)) {
+ mpiError = 1;
+ }
+ output = "";
+ }
+
+ // Wait for the client and server to exit.
+ h5_api_test_sysProcess_WaitForExit(client, 0);
+
+ // Once client is finished, the servers
+ // must finish quickly. If not, it usually is a sign that
+ // the client crashed/exited before it attempted to connect to
+ // the server.
+ if (server) {
+#ifdef H5_API_TEST_SERVER_EXIT_COMMAND
+ H5_API_EXECUTE_CMD(H5_API_TEST_SERVER_EXIT_COMMAND);
+#endif
+ h5_api_test_sysProcess_WaitForExit(server, &this->ServerExitTimeOut);
+ }
+
+ if (client_helper) {
+#ifdef H5_API_TEST_CLIENT_HELPER_EXIT_COMMAND
+ H5_API_EXECUTE_CMD(H5_API_TEST_CLIENT_HELPER_EXIT_COMMAND);
+#endif
+ h5_api_test_sysProcess_WaitForExit(client_helper, 0);
+ }
+
+ // Get the results.
+ int clientResult = this->ReportStatus(client, "client");
+ int serverResult = 0;
+ if (server) {
+ serverResult = this->ReportStatus(server, "server");
+ h5_api_test_sysProcess_Kill(server);
+ }
+
+ // Free process managers.
+ H5_API_CLEAN_PROCESSES;
+
+ // Report the server return code if it is nonzero. Otherwise report
+ // the client return code.
+ if (serverResult && !this->IgnoreServerResult)
+ return serverResult;
+
+ if (mpiError) {
+ cerr
+ << "H5VLTestDriver: Error string found in output, H5APITestDriver returning "
+ << mpiError << "\n";
+ return mpiError;
+ }
+
+ // if server is fine return the client result
+ return clientResult;
+}
+
+//----------------------------------------------------------------------------
+void
+H5APITestDriver::ReportCommand(const char * const *command, const char *name)
+{
+ cerr << "H5APITestDriver: " << name << " command is:\n";
+ for (const char * const *c = command; *c; ++c)
+ cerr << " \"" << *c << "\"";
+ cerr << "\n";
+}
+
+//----------------------------------------------------------------------------
+int
+H5APITestDriver::ReportStatus(h5_api_test_sysProcess *process, const char *name)
+{
+ int result = 1;
+ switch (h5_api_test_sysProcess_GetState(process)) {
+ case h5_api_test_sysProcess_State_Starting: {
+ cerr << "H5APITestDriver: Never started " << name << " process.\n";
+ }
+ break;
+ case h5_api_test_sysProcess_State_Error: {
+ cerr << "H5APITestDriver: Error executing " << name << " process: "
+ << h5_api_test_sysProcess_GetErrorString(process) << "\n";
+ }
+ break;
+ case h5_api_test_sysProcess_State_Exception: {
+ cerr << "H5APITestDriver: " << name
+ << " process exited with an exception: ";
+ switch (h5_api_test_sysProcess_GetExitException(process)) {
+ case h5_api_test_sysProcess_Exception_None: {
+ cerr << "None";
+ }
+ break;
+ case h5_api_test_sysProcess_Exception_Fault: {
+ cerr << "Segmentation fault";
+ }
+ break;
+ case h5_api_test_sysProcess_Exception_Illegal: {
+ cerr << "Illegal instruction";
+ }
+ break;
+ case h5_api_test_sysProcess_Exception_Interrupt: {
+ cerr << "Interrupted by user";
+ }
+ break;
+ case h5_api_test_sysProcess_Exception_Numerical: {
+ cerr << "Numerical exception";
+ }
+ break;
+ case h5_api_test_sysProcess_Exception_Other: {
+ cerr << "Unknown";
+ }
+ break;
+ }
+ cerr << "\n";
+ }
+ break;
+ case h5_api_test_sysProcess_State_Executing: {
+ cerr << "H5APITestDriver: Never terminated " << name
+ << " process.\n";
+ }
+ break;
+ case h5_api_test_sysProcess_State_Exited: {
+ result = h5_api_test_sysProcess_GetExitValue(process);
+ cerr << "H5APITestDriver: " << name << " process exited with code "
+ << result << "\n";
+ }
+ break;
+ case h5_api_test_sysProcess_State_Expired: {
+ cerr << "H5APITestDriver: killed " << name
+ << " process due to timeout.\n";
+ }
+ break;
+ case h5_api_test_sysProcess_State_Killed: {
+ cerr << "H5APITestDriver: killed " << name << " process.\n";
+ }
+ break;
+ }
+ return result;
+}
+
+//----------------------------------------------------------------------------
+int
+H5APITestDriver::WaitForLine(h5_api_test_sysProcess *process, string &line,
+ double timeout, vector<char> &out, vector<char> &err)
+{
+ line = "";
+ vector<char>::iterator outiter = out.begin();
+ vector<char>::iterator erriter = err.begin();
+ while (1) {
+ // Check for a newline in stdout.
+ for (; outiter != out.end(); ++outiter) {
+ if ((*outiter == '\r') && ((outiter + 1) == out.end())) {
+ break;
+ } else if (*outiter == '\n' || *outiter == '\0') {
+ int length = outiter - out.begin();
+ if (length > 1 && *(outiter - 1) == '\r')
+ --length;
+ if (length > 0)
+ line.append(&out[0], length);
+ out.erase(out.begin(), outiter + 1);
+ return h5_api_test_sysProcess_Pipe_STDOUT;
+ }
+ }
+
+ // Check for a newline in stderr.
+ for (; erriter != err.end(); ++erriter) {
+ if ((*erriter == '\r') && ((erriter + 1) == err.end())) {
+ break;
+ } else if (*erriter == '\n' || *erriter == '\0') {
+ int length = erriter - err.begin();
+ if (length > 1 && *(erriter - 1) == '\r')
+ --length;
+ if (length > 0)
+ line.append(&err[0], length);
+ err.erase(err.begin(), erriter + 1);
+ return h5_api_test_sysProcess_Pipe_STDERR;
+ }
+ }
+
+ // No newlines found. Wait for more data from the process.
+ int length;
+ char *data;
+ int pipe = h5_api_test_sysProcess_WaitForData(process, &data, &length,
+ &timeout);
+ if (pipe == h5_api_test_sysProcess_Pipe_Timeout) {
+ // Timeout has been exceeded.
+ return pipe;
+ } else if (pipe == h5_api_test_sysProcess_Pipe_STDOUT) {
+ // Append to the stdout buffer.
+ vector<char>::size_type size = out.size();
+ out.insert(out.end(), data, data + length);
+ outiter = out.begin() + size;
+ } else if (pipe == h5_api_test_sysProcess_Pipe_STDERR) {
+ // Append to the stderr buffer.
+ vector<char>::size_type size = err.size();
+ err.insert(err.end(), data, data + length);
+ erriter = err.begin() + size;
+ } else if (pipe == h5_api_test_sysProcess_Pipe_None) {
+ // Both stdout and stderr pipes have broken. Return leftover data.
+ if (!out.empty()) {
+ line.append(&out[0], outiter - out.begin());
+ out.erase(out.begin(), out.end());
+ return h5_api_test_sysProcess_Pipe_STDOUT;
+ } else if (!err.empty()) {
+ line.append(&err[0], erriter - err.begin());
+ err.erase(err.begin(), err.end());
+ return h5_api_test_sysProcess_Pipe_STDERR;
+ } else {
+ return h5_api_test_sysProcess_Pipe_None;
+ }
+ }
+ }
+}
+
+//----------------------------------------------------------------------------
+void
+H5APITestDriver::PrintLine(const char *pname, const char *line)
+{
+ // if the name changed then the line is output from a different process
+ if (this->CurrentPrintLineName != pname) {
+ cerr << "-------------- " << pname << " output --------------\n";
+ // save the current pname
+ this->CurrentPrintLineName = pname;
+ }
+ cerr << line << "\n";
+ cerr.flush();
+}
+
+//----------------------------------------------------------------------------
+int
+H5APITestDriver::WaitForAndPrintLine(const char *pname,
+ h5_api_test_sysProcess *process, string &line, double timeout,
+ vector<char> &out, vector<char> &err, const char *waitMsg,
+ int *foundWaiting)
+{
+ int pipe = this->WaitForLine(process, line, timeout, out, err);
+ if (pipe == h5_api_test_sysProcess_Pipe_STDOUT
+ || pipe == h5_api_test_sysProcess_Pipe_STDERR) {
+ this->PrintLine(pname, line.c_str());
+ if (foundWaiting && (line.find(waitMsg) != line.npos))
+ *foundWaiting = 1;
+ }
+ return pipe;
+}
+
+//----------------------------------------------------------------------------
+string
+H5APITestDriver::GetDirectory(string location)
+{
+ return h5_api_test_sys::SystemTools::GetParentDirectory(location.c_str());
+}
diff --git a/test/API/driver/h5_api_test_driver.hxx b/test/API/driver/h5_api_test_driver.hxx
new file mode 100644
index 0000000..b8e05e7
--- /dev/null
+++ b/test/API/driver/h5_api_test_driver.hxx
@@ -0,0 +1,93 @@
+#ifndef H5_API_TEST_DRIVER_H
+#define H5_API_TEST_DRIVER_H
+
+#include <string>
+#include <vector>
+
+#include <h5_api_test_sys/Process.h>
+
+class H5APITestDriver {
+public:
+ int Main(int argc, char *argv[]);
+ H5APITestDriver();
+ ~H5APITestDriver();
+
+protected:
+ void SeparateArguments(const char* str, std::vector<std::string> &flags);
+
+ void ReportCommand(const char * const *command, const char *name);
+ int ReportStatus(h5_api_test_sysProcess *process, const char *name);
+ int ProcessCommandLine(int argc, char *argv[]);
+ void CollectConfiguredOptions();
+ void CreateCommandLine(std::vector<const char *> &commandLine,
+ const char *cmd, int isServer, int isHelper, const char *numProc,
+ int argStart = 0, int argCount = 0, char *argv[] = 0);
+
+ int StartServer(h5_api_test_sysProcess *server, const char *name,
+ std::vector<char> &out, std::vector<char> &err);
+ int StartClientHelper(h5_api_test_sysProcess *client, const char *name,
+ std::vector<char> &out, std::vector<char> &err);
+ int StartClientInit(h5_api_test_sysProcess *client, const char *name,
+ std::vector<char> &out, std::vector<char> &err);
+ int StartClient(h5_api_test_sysProcess *client, const char *name);
+ void Stop(h5_api_test_sysProcess *p, const char *name);
+ int OutputStringHasError(const char *pname, std::string &output);
+ int OutputStringHasToken(const char *pname, const char *regex,
+ std::string &output, std::string &token);
+
+ int WaitForLine(h5_api_test_sysProcess *process, std::string &line,
+ double timeout, std::vector<char> &out, std::vector<char> &err);
+ void PrintLine(const char *pname, const char *line);
+ int WaitForAndPrintLine(const char *pname, h5_api_test_sysProcess *process,
+ std::string &line, double timeout, std::vector<char> &out,
+ std::vector<char> &err, const char *waitMsg, int *foundWaiting);
+
+ std::string GetDirectory(std::string location);
+
+private:
+ std::string ClientExecutable; // fullpath to client executable
+ std::string ClientHelperExecutable; // fullpath to client helper executable
+ std::string ClientInitExecutable; // fullpath to client init executable
+ std::string ServerExecutable; // fullpath to server executable
+ std::string MPIRun; // fullpath to mpirun executable
+
+ // This specify the preflags and post flags that can be set using:
+ // VTK_MPI_PRENUMPROC_FLAGS VTK_MPI_PREFLAGS / VTK_MPI_POSTFLAGS at config time
+ // std::vector<std::string> MPIPreNumProcFlags;
+ std::vector<std::string> ClientEnvVars;
+ std::vector<std::string> MPIClientPreFlags;
+ std::vector<std::string> MPIClientPostFlags;
+ std::vector<std::string> MPIServerPreFlags;
+ std::vector<std::string> MPIServerPostFlags;
+
+ // Specify the number of process flag, this can be set using: VTK_MPI_NUMPROC_FLAG.
+ // This is then split into :
+ // MPIServerNumProcessFlag & MPIRenderServerNumProcessFlag
+ std::string MPINumProcessFlag;
+ std::string MPIServerNumProcessFlag;
+ std::string MPIClientNumProcessFlag;
+
+ std::string ClientTokenVar; // use token to launch client if requested
+
+ std::string CurrentPrintLineName;
+
+ double TimeOut;
+ double ServerExitTimeOut; // time to wait for servers to finish.
+ bool ClientHelper;
+ bool ClientInit;
+ bool TestServer;
+
+ int ClientArgStart;
+ int ClientArgCount;
+ int ClientHelperArgStart;
+ int ClientHelperArgCount;
+ int ClientInitArgStart;
+ int ClientInitArgCount;
+ int ServerArgStart;
+ int ServerArgCount;
+ bool AllowErrorInOutput;
+ bool TestSerial;
+ bool IgnoreServerResult;
+};
+
+#endif //H5_API_TEST_DRIVER_H
diff --git a/test/API/tarray.c b/test/API/tarray.c
new file mode 100644
index 0000000..214a022
--- /dev/null
+++ b/test/API/tarray.c
@@ -0,0 +1,2250 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/***********************************************************
+ *
+ * Test program: tarray
+ *
+ * Test the Array Datatype functionality
+ *
+ *************************************************************/
+
+#include "testhdf5.h"
+/* #include "H5srcdir.h" */
+
+#define FILENAME "tarray1.h5"
+#define TESTFILE "tarrold.h5"
+
+/* 1-D array datatype */
+#define ARRAY1_RANK 1
+#define ARRAY1_DIM1 4
+
+/* 3-D array datatype */
+#define ARRAY2_RANK 3
+#define ARRAY2_DIM1 3
+#define ARRAY2_DIM2 4
+#define ARRAY2_DIM3 5
+
+/* 2-D array datatype */
+#define ARRAY3_RANK 2
+#define ARRAY3_DIM1 6
+#define ARRAY3_DIM2 3
+
+/* 1-D dataset with fixed dimensions */
+#define SPACE1_RANK 1
+#define SPACE1_DIM1 4
+
+/* Parameters used with the test_array_bkg() test */
+#define FIELDNAME "ArrayofStructures"
+#define LENGTH 5
+#define ALEN 10
+#define RANK 1
+#define NMAX 100
+
+/* Struct used with test_array_bkg() test */
+typedef struct {
+ int nsubfields;
+ char *name[NMAX];
+ size_t offset[NMAX];
+ hid_t datatype[NMAX];
+
+} CmpDTSinfo;
+
+/* Forward declarations for custom vlen memory manager functions */
+void *test_array_alloc_custom(size_t size, void *info);
+void test_array_free_custom(void *mem, void *info);
+
+/*-------------------------------------------------------------------------
+ * Function: test_array_atomic_1d
+ *
+ * Purpose: Test basic array datatype code.
+ * Tests 1-D array of atomic datatypes.
+ *
+ * Return: void
+ *
+ *-------------------------------------------------------------------------
+ */
+static void
+test_array_atomic_1d(void)
+{
+ int wdata[SPACE1_DIM1][ARRAY1_DIM1]; /* Information to write */
+ int rdata[SPACE1_DIM1][ARRAY1_DIM1]; /* Information read in */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1; /* Datatype ID */
+ hsize_t sdims1[] = {SPACE1_DIM1};
+ hsize_t tdims1[] = {ARRAY1_DIM1};
+ int ndims; /* Array rank for reading */
+ hsize_t rdims1[H5S_MAX_RANK]; /* Array dimensions for reading */
+ int i, j; /* counting variables */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing 1-D Array of Atomic Datatypes Functionality\n"));
+
+ /* Allocate and initialize array data to write */
+ for (i = 0; i < SPACE1_DIM1; i++)
+ for (j = 0; j < ARRAY1_DIM1; j++)
+ wdata[i][j] = i * 10 + j;
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, sdims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create a datatype to refer to */
+ tid1 = H5Tarray_create2(H5T_NATIVE_INT, ARRAY1_RANK, tdims1);
+ CHECK(tid1, FAIL, "H5Tarray_create2");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, "Dataset1", tid1, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write dataset to disk */
+ ret = H5Dwrite(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open file */
+ fid1 = H5Fopen(FILENAME, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Open the dataset */
+ dataset = H5Dopen2(fid1, "Dataset1", H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Get the datatype */
+ tid1 = H5Dget_type(dataset);
+ CHECK(tid1, FAIL, "H5Dget_type");
+
+ /* Check the array rank */
+ ndims = H5Tget_array_ndims(tid1);
+ VERIFY(ndims, ARRAY1_RANK, "H5Tget_array_ndims");
+
+ /* Get the array dimensions */
+ ret = H5Tget_array_dims2(tid1, rdims1);
+ CHECK(ret, FAIL, "H5Tget_array_dims2");
+
+ /* Check the array dimensions */
+ for (i = 0; i < ndims; i++)
+ if (rdims1[i] != tdims1[i]) {
+ TestErrPrintf("Array dimension information doesn't match!, rdims1[%d]=%" PRIuHSIZE
+ ", tdims1[%d]=%" PRIuHSIZE "\n",
+ i, rdims1[i], i, tdims1[i]);
+ continue;
+ } /* end if */
+
+ /* Read dataset from disk */
+ ret = H5Dread(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < SPACE1_DIM1; i++)
+ for (j = 0; j < ARRAY1_DIM1; j++)
+ if (wdata[i][j] != rdata[i][j]) {
+ TestErrPrintf("Array data information doesn't match!, wdata[%d][%d]=%d, rdata[%d][%d]=%d\n",
+ (int)i, (int)j, (int)wdata[i][j], (int)i, (int)j, (int)rdata[i][j]);
+ continue;
+ } /* end if */
+
+ /* Close Datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_array_atomic_1d() */
+
+/*-------------------------------------------------------------------------
+ * Function: test_array_funcs
+ *
+ * Purpose: Test some type functions that are and aren't supposed to
+ * work with array type.
+ *
+ * Return: void
+ *
+ *-------------------------------------------------------------------------
+ */
+static void
+test_array_funcs(void)
+{
+ hid_t type; /* Datatype ID */
+ hsize_t tdims1[] = {ARRAY1_DIM1};
+ size_t size;
+ H5T_pad_t inpad;
+ H5T_norm_t norm;
+ H5T_cset_t cset;
+ H5T_str_t strpad;
+ herr_t ret; /* Generic return value */
+
+ /* Create a datatype to refer to */
+ type = H5Tarray_create2(H5T_IEEE_F32BE, ARRAY1_RANK, tdims1);
+ CHECK(type, FAIL, "H5Tarray_create2");
+
+ size = H5Tget_precision(type);
+ CHECK(size, 0, "H5Tget_precision");
+
+ size = H5Tget_size(type);
+ CHECK(size, 0, "H5Tget_size");
+
+ size = H5Tget_ebias(type);
+ CHECK(size, 0, "H5Tget_ebias");
+
+ ret = H5Tset_pad(type, H5T_PAD_ZERO, H5T_PAD_ONE);
+ CHECK(ret, FAIL, "H5Tset_pad");
+
+ inpad = H5Tget_inpad(type);
+ CHECK(inpad, FAIL, "H5Tget_inpad");
+
+ norm = H5Tget_norm(type);
+ CHECK(norm, FAIL, "H5Tget_norm");
+
+ ret = H5Tset_offset(type, (size_t)16);
+ CHECK(ret, FAIL, "H5Tset_offset");
+
+ H5E_BEGIN_TRY
+ {
+ cset = H5Tget_cset(type);
+ }
+ H5E_END_TRY;
+ VERIFY(cset, FAIL, "H5Tget_cset");
+
+ H5E_BEGIN_TRY
+ {
+ strpad = H5Tget_strpad(type);
+ }
+ H5E_END_TRY;
+ VERIFY(strpad, FAIL, "H5Tget_strpad");
+
+ /* Close datatype */
+ ret = H5Tclose(type);
+ CHECK(ret, FAIL, "H5Tclose");
+} /* end test_array_funcs() */
+
+/*-------------------------------------------------------------------------
+ * Function: test_array_atomic_3d
+ *
+ * Purpose: Test basic array datatype code.
+ * Tests 3-D array of atomic datatypes.
+ *
+ * Return: void
+ *
+ *-------------------------------------------------------------------------
+ */
+static void
+test_array_atomic_3d(void)
+{
+ int wdata[SPACE1_DIM1][ARRAY2_DIM1][ARRAY2_DIM2][ARRAY2_DIM3]; /* Information to write */
+ int rdata[SPACE1_DIM1][ARRAY2_DIM1][ARRAY2_DIM2][ARRAY2_DIM3]; /* Information read in */
+ hid_t fid; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t tid; /* Datatype ID */
+ hsize_t sdims1[] = {SPACE1_DIM1};
+ hsize_t tdims2[] = {ARRAY2_DIM1, ARRAY2_DIM2, ARRAY2_DIM3};
+ int ndims; /* Array rank for reading */
+ hsize_t rdims2[H5S_MAX_RANK]; /* Array dimensions for reading */
+ int i, j, k, l; /* counting variables */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing 3-D Array of Atomic Datatypes Functionality\n"));
+
+ /* Allocate and initialize array data to write */
+ for (i = 0; i < SPACE1_DIM1; i++)
+ for (j = 0; j < ARRAY2_DIM1; j++)
+ for (k = 0; k < ARRAY2_DIM2; k++)
+ for (l = 0; l < ARRAY2_DIM3; l++)
+ wdata[i][j][k][l] = i * 1000 + j * 100 + k * 10 + l;
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid = H5Screate_simple(SPACE1_RANK, sdims1, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Create a datatype to refer to */
+ tid = H5Tarray_create2(H5T_NATIVE_INT, ARRAY2_RANK, tdims2);
+ CHECK(tid, FAIL, "H5Tarray_create2");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid, "Dataset1", tid, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write dataset to disk */
+ ret = H5Dwrite(dataset, tid, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Open the dataset */
+ dataset = H5Dopen2(fid, "Dataset1", H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Get the datatype */
+ tid = H5Dget_type(dataset);
+ CHECK(tid, FAIL, "H5Dget_type");
+
+ /* Check the array rank */
+ ndims = H5Tget_array_ndims(tid);
+ VERIFY(ndims, ARRAY2_RANK, "H5Tget_array_ndims");
+
+ /* Get the array dimensions */
+ ret = H5Tget_array_dims2(tid, rdims2);
+ CHECK(ret, FAIL, "H5Tget_array_dims2");
+
+ /* Check the array dimensions */
+ for (i = 0; i < ndims; i++)
+ if (rdims2[i] != tdims2[i]) {
+ TestErrPrintf("Array dimension information doesn't match!, rdims2[%d]=%d, tdims2[%d]=%d\n",
+ (int)i, (int)rdims2[i], (int)i, (int)tdims2[i]);
+ continue;
+ } /* end if */
+
+ /* Read dataset from disk */
+ ret = H5Dread(dataset, tid, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < SPACE1_DIM1; i++)
+ for (j = 0; j < ARRAY2_DIM1; j++)
+ for (k = 0; k < ARRAY2_DIM2; k++)
+ for (l = 0; l < ARRAY2_DIM3; l++)
+ if (wdata[i][j][k][l] != rdata[i][j][k][l]) {
+ TestErrPrintf("Array data information doesn't match!, wdata[%d][%d][%d][%d]=%d, "
+ "rdata[%d][%d][%d][%d]=%d\n",
+ (int)i, (int)j, (int)k, (int)l, (int)wdata[i][j][k][l], (int)i, (int)j,
+ (int)k, (int)l, (int)rdata[i][j][k][l]);
+ continue;
+ } /* end if */
+
+ /* Close Datatype */
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* end test_array_atomic_3d() */
+
+/*-------------------------------------------------------------------------
+ * Function: test_array_array_atomic
+ *
+ * Purpose: Test basic array datatype code.
+ * Tests 1-D array 2-D arrays of atomic datatypes.
+ *
+ * Return: void
+ *
+ *-------------------------------------------------------------------------
+ */
+static void
+test_array_array_atomic(void)
+{
+ int wdata[SPACE1_DIM1][ARRAY1_DIM1][ARRAY3_DIM1][ARRAY3_DIM2]; /* Information to write */
+ int rdata[SPACE1_DIM1][ARRAY1_DIM1][ARRAY3_DIM1][ARRAY3_DIM2]; /* Information read in */
+ hid_t fid; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t tid1; /* 1-D array Datatype ID */
+ hid_t tid2; /* 2-D array Datatype ID */
+ hsize_t sdims1[] = {SPACE1_DIM1};
+ hsize_t tdims1[] = {ARRAY1_DIM1};
+ hsize_t tdims2[] = {ARRAY3_DIM1, ARRAY3_DIM2};
+ int ndims1; /* Array rank for reading */
+ int ndims2; /* Array rank for reading */
+ hsize_t rdims1[H5S_MAX_RANK]; /* Array dimensions for reading */
+ hsize_t rdims2[H5S_MAX_RANK]; /* Array dimensions for reading */
+ int i, j, k, l; /* counting variables */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing 1-D Array 2-D Arrays of Atomic Datatypes Functionality\n"));
+
+ /* Allocate and initialize array data to write */
+ for (i = 0; i < SPACE1_DIM1; i++)
+ for (j = 0; j < ARRAY1_DIM1; j++)
+ for (k = 0; k < ARRAY3_DIM1; k++)
+ for (l = 0; l < ARRAY3_DIM2; l++)
+ wdata[i][j][k][l] = i * 1000 + j * 100 + k * 10 + l;
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid = H5Screate_simple(SPACE1_RANK, sdims1, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Create a 2-D datatype to refer to */
+ tid2 = H5Tarray_create2(H5T_NATIVE_INT, ARRAY3_RANK, tdims2);
+ CHECK(tid2, FAIL, "H5Tarray_create2");
+
+ /* Create a 1-D datatype to refer to */
+ tid1 = H5Tarray_create2(tid2, ARRAY1_RANK, tdims1);
+ CHECK(tid1, FAIL, "H5Tarray_create2");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid, "Dataset1", tid1, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write dataset to disk */
+ ret = H5Dwrite(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close datatypes */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Open the dataset */
+ dataset = H5Dopen2(fid, "Dataset1", H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Get the 1-D datatype */
+ tid1 = H5Dget_type(dataset);
+ CHECK(tid1, FAIL, "H5Dget_type");
+
+ /* Check the 1-D array rank */
+ ndims1 = H5Tget_array_ndims(tid1);
+ VERIFY(ndims1, ARRAY1_RANK, "H5Tget_array_ndims");
+
+ /* Get the 1-D array dimensions */
+ ret = H5Tget_array_dims2(tid1, rdims1);
+ CHECK(ret, FAIL, "H5Tget_array_dims2");
+
+ /* Check the array dimensions */
+ for (i = 0; i < ndims1; i++)
+ if (rdims1[i] != tdims1[i]) {
+ TestErrPrintf("Array dimension information doesn't match!, rdims1[%d]=%" PRIuHSIZE
+ ", tdims1[%d]=%" PRIuHSIZE "\n",
+ i, rdims1[i], i, tdims1[i]);
+ continue;
+ } /* end if */
+
+ /* Get the 2-D datatype */
+ tid2 = H5Tget_super(tid1);
+ CHECK(tid2, FAIL, "H5Tget_super");
+
+ /* Check the 2-D array rank */
+ ndims2 = H5Tget_array_ndims(tid2);
+ VERIFY(ndims2, ARRAY3_RANK, "H5Tget_array_ndims");
+
+ /* Get the 2-D array dimensions */
+ ret = H5Tget_array_dims2(tid2, rdims2);
+ CHECK(ret, FAIL, "H5Tget_array_dims2");
+
+ /* Check the array dimensions */
+ for (i = 0; i < ndims2; i++)
+ if (rdims2[i] != tdims2[i]) {
+ TestErrPrintf("Array dimension information doesn't match!, rdims2[%d]=%d, tdims2[%d]=%d\n",
+ (int)i, (int)rdims2[i], (int)i, (int)tdims2[i]);
+ continue;
+ } /* end if */
+
+ /* Read dataset from disk */
+ ret = H5Dread(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < SPACE1_DIM1; i++)
+ for (j = 0; j < ARRAY1_DIM1; j++)
+ for (k = 0; k < ARRAY3_DIM1; k++)
+ for (l = 0; l < ARRAY3_DIM2; l++)
+ if (wdata[i][j][k][l] != rdata[i][j][k][l]) {
+ TestErrPrintf("Array data information doesn't match!, wdata[%d][%d][%d][%d]=%d, "
+ "rdata[%d][%d][%d][%d]=%d\n",
+ (int)i, (int)j, (int)k, (int)l, (int)wdata[i][j][k][l], (int)i, (int)j,
+ (int)k, (int)l, (int)rdata[i][j][k][l]);
+ continue;
+ } /* end if */
+
+ /* Close Datatypes */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_array_array_atomic() */
+
+/*-------------------------------------------------------------------------
+ * Function: test_array_compound_atomic
+ *
+ * Purpose: Test basic array datatype code.
+ * Tests 1-D array of compound datatypes (with no array fields).
+ *
+ * Return: void
+ *
+ *-------------------------------------------------------------------------
+ */
+static void
+test_array_compound_atomic(void)
+{
+ typedef struct { /* Typedef for compound datatype */
+ int i;
+ float f;
+ } s1_t;
+
+ s1_t wdata[SPACE1_DIM1][ARRAY1_DIM1]; /* Information to write */
+ s1_t rdata[SPACE1_DIM1][ARRAY1_DIM1]; /* Information read in */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1; /* Array Datatype ID */
+ hid_t tid2; /* Compound Datatype ID */
+ hsize_t sdims1[] = {SPACE1_DIM1};
+ hsize_t tdims1[] = {ARRAY1_DIM1};
+ int ndims; /* Array rank for reading */
+ hsize_t rdims1[H5S_MAX_RANK]; /* Array dimensions for reading */
+ int nmemb; /* Number of compound members */
+ char *mname; /* Name of compound field */
+ size_t off; /* Offset of compound field */
+ hid_t mtid; /* Datatype ID for field */
+ int i, j; /* counting variables */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing 1-D Array of Compound Atomic Datatypes Functionality\n"));
+
+ /* Initialize array data to write */
+ for (i = 0; i < SPACE1_DIM1; i++)
+ for (j = 0; j < ARRAY1_DIM1; j++) {
+ wdata[i][j].i = i * 10 + j;
+ wdata[i][j].f = (float)i * 2.5F + (float)j;
+ } /* end for */
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, sdims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create a compound datatype to refer to */
+ tid2 = H5Tcreate(H5T_COMPOUND, sizeof(s1_t));
+ CHECK(tid2, FAIL, "H5Tcreate");
+
+ /* Insert integer field */
+ ret = H5Tinsert(tid2, "i", HOFFSET(s1_t, i), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Insert float field */
+ ret = H5Tinsert(tid2, "f", HOFFSET(s1_t, f), H5T_NATIVE_FLOAT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Create an array datatype to refer to */
+ tid1 = H5Tarray_create2(tid2, ARRAY1_RANK, tdims1);
+ CHECK(tid1, FAIL, "H5Tarray_create2");
+
+ /* Close compound datatype */
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, "Dataset1", tid1, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write dataset to disk */
+ ret = H5Dwrite(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open file */
+ fid1 = H5Fopen(FILENAME, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Open the dataset */
+ dataset = H5Dopen2(fid1, "Dataset1", H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Get the datatype */
+ tid1 = H5Dget_type(dataset);
+ CHECK(tid1, FAIL, "H5Dget_type");
+
+ /* Check the array rank */
+ ndims = H5Tget_array_ndims(tid1);
+ VERIFY(ndims, ARRAY1_RANK, "H5Tget_array_ndims");
+
+ /* Get the array dimensions */
+ ret = H5Tget_array_dims2(tid1, rdims1);
+ CHECK(ret, FAIL, "H5Tget_array_dims2");
+
+ /* Check the array dimensions */
+ for (i = 0; i < ndims; i++)
+ if (rdims1[i] != tdims1[i]) {
+ TestErrPrintf("Array dimension information doesn't match!, rdims1[%d]=%" PRIuHSIZE
+ ", tdims1[%d]=%" PRIuHSIZE "\n",
+ i, rdims1[i], i, tdims1[i]);
+ continue;
+ } /* end if */
+
+ /* Get the compound datatype */
+ tid2 = H5Tget_super(tid1);
+ CHECK(tid2, FAIL, "H5Tget_super");
+
+ /* Check the number of members */
+ nmemb = H5Tget_nmembers(tid2);
+ VERIFY(nmemb, 2, "H5Tget_nmembers");
+
+ /* Check the 1st field's name */
+ mname = H5Tget_member_name(tid2, 0);
+ CHECK_PTR(mname, "H5Tget_member_name");
+ if (HDstrcmp(mname, "i") != 0)
+ TestErrPrintf("Compound field name doesn't match!, mname=%s\n", mname);
+ H5free_memory(mname);
+
+ /* Check the 1st field's offset */
+ off = H5Tget_member_offset(tid2, 0);
+ VERIFY(off, HOFFSET(s1_t, i), "H5Tget_member_offset");
+
+ /* Check the 1st field's datatype */
+ mtid = H5Tget_member_type(tid2, 0);
+ CHECK(mtid, FAIL, "H5Tget_member_type");
+ if ((ret = H5Tequal(mtid, H5T_NATIVE_INT)) <= 0)
+ TestErrPrintf("Compound data type is incorrect!, ret=%d\n", (int)ret);
+ ret = H5Tclose(mtid);
+ CHECK(mtid, FAIL, "H5Tclose");
+
+ /* Check the 2nd field's name */
+ mname = H5Tget_member_name(tid2, 1);
+ CHECK_PTR(mname, "H5Tget_member_name");
+ if (HDstrcmp(mname, "f") != 0)
+ TestErrPrintf("Compound field name doesn't match!, mname=%s\n", mname);
+ H5free_memory(mname);
+
+ /* Check the 2nd field's offset */
+ off = H5Tget_member_offset(tid2, 1);
+ VERIFY(off, HOFFSET(s1_t, f), "H5Tget_member_offset");
+
+ /* Check the 2nd field's datatype */
+ mtid = H5Tget_member_type(tid2, 1);
+ CHECK(mtid, FAIL, "H5Tget_member_type");
+ if ((ret = H5Tequal(mtid, H5T_NATIVE_FLOAT)) <= 0)
+ TestErrPrintf("Compound data type is incorrect!, ret=%d\n", (int)ret);
+ ret = H5Tclose(mtid);
+ CHECK(mtid, FAIL, "H5Tclose");
+
+ /* Close Compound Datatype */
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Read dataset from disk */
+ ret = H5Dread(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < SPACE1_DIM1; i++)
+ for (j = 0; j < ARRAY1_DIM1; j++) {
+ if (wdata[i][j].i != rdata[i][j].i) {
+ TestErrPrintf(
+ "Array data information doesn't match!, wdata[%d][%d].i=%d, rdata[%d][%d].i=%d\n", (int)i,
+ (int)j, (int)wdata[i][j].i, (int)i, (int)j, (int)rdata[i][j].i);
+ continue;
+ } /* end if */
+ if (!H5_FLT_ABS_EQUAL(wdata[i][j].f, rdata[i][j].f)) {
+ TestErrPrintf(
+ "Array data information doesn't match!, wdata[%d][%d].f=%f, rdata[%d][%d].f=%f\n", (int)i,
+ (int)j, (double)wdata[i][j].f, (int)i, (int)j, (double)rdata[i][j].f);
+ continue;
+ } /* end if */
+ } /* end for */
+
+ /* Close Datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_array_compound_atomic() */
+
+/*-------------------------------------------------------------------------
+ * Function: test_array_compound_array
+ *
+ * Purpose: Test basic array datatype code.
+ * Tests 1-D array of compound datatypes (with array fields).
+ *
+ * Return: void
+ *
+ *-------------------------------------------------------------------------
+ */
+static void
+test_array_compound_array(void)
+{
+ typedef struct { /* Typedef for compound datatype */
+ int i;
+ float f[ARRAY1_DIM1];
+ } s1_t;
+
+ s1_t wdata[SPACE1_DIM1][ARRAY1_DIM1]; /* Information to write */
+ s1_t rdata[SPACE1_DIM1][ARRAY1_DIM1]; /* Information read in */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1; /* Array Datatype ID */
+ hid_t tid2; /* Compound Datatype ID */
+ hid_t tid3; /* Nested Array Datatype ID */
+ hsize_t sdims1[] = {SPACE1_DIM1};
+ hsize_t tdims1[] = {ARRAY1_DIM1};
+ int ndims; /* Array rank for reading */
+ hsize_t rdims1[H5S_MAX_RANK]; /* Array dimensions for reading */
+ int nmemb; /* Number of compound members */
+ char *mname; /* Name of compound field */
+ size_t off; /* Offset of compound field */
+ hid_t mtid; /* Datatype ID for field */
+ H5T_class_t mclass; /* Datatype class for field */
+ int i, j, k; /* counting variables */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing 1-D Array of Compound Array Datatypes Functionality\n"));
+
+ /* Initialize array data to write */
+ for (i = 0; i < SPACE1_DIM1; i++)
+ for (j = 0; j < ARRAY1_DIM1; j++) {
+ wdata[i][j].i = i * 10 + j;
+ for (k = 0; k < ARRAY1_DIM1; k++)
+ wdata[i][j].f[k] = (float)i * 10.0F + (float)j * 2.5F + (float)k;
+ } /* end for */
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, sdims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create a compound datatype to refer to */
+ tid2 = H5Tcreate(H5T_COMPOUND, sizeof(s1_t));
+ CHECK(tid2, FAIL, "H5Tcreate");
+
+ /* Insert integer field */
+ ret = H5Tinsert(tid2, "i", HOFFSET(s1_t, i), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Create an array of floats datatype */
+ tid3 = H5Tarray_create2(H5T_NATIVE_FLOAT, ARRAY1_RANK, tdims1);
+ CHECK(tid3, FAIL, "H5Tarray_create2");
+
+ /* Insert float array field */
+ ret = H5Tinsert(tid2, "f", HOFFSET(s1_t, f), tid3);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Close array of floats field datatype */
+ ret = H5Tclose(tid3);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Create an array datatype to refer to */
+ tid1 = H5Tarray_create2(tid2, ARRAY1_RANK, tdims1);
+ CHECK(tid1, FAIL, "H5Tarray_create2");
+
+ /* Close compound datatype */
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, "Dataset1", tid1, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write dataset to disk */
+ ret = H5Dwrite(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open file */
+ fid1 = H5Fopen(FILENAME, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Open the dataset */
+ dataset = H5Dopen2(fid1, "Dataset1", H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Get the datatype */
+ tid1 = H5Dget_type(dataset);
+ CHECK(tid1, FAIL, "H5Dget_type");
+
+ /* Check the array rank */
+ ndims = H5Tget_array_ndims(tid1);
+ VERIFY(ndims, ARRAY1_RANK, "H5Tget_array_ndims");
+
+ /* Get the array dimensions */
+ ret = H5Tget_array_dims2(tid1, rdims1);
+ CHECK(ret, FAIL, "H5Tget_array_dims2");
+
+ /* Check the array dimensions */
+ for (i = 0; i < ndims; i++)
+ if (rdims1[i] != tdims1[i]) {
+ TestErrPrintf("Array dimension information doesn't match!, rdims1[%d]=%" PRIuHSIZE
+ ", tdims1[%d]=%" PRIuHSIZE "\n",
+ i, rdims1[i], i, tdims1[i]);
+ continue;
+ } /* end if */
+
+ /* Get the compound datatype */
+ tid2 = H5Tget_super(tid1);
+ CHECK(tid2, FAIL, "H5Tget_super");
+
+ /* Check the number of members */
+ nmemb = H5Tget_nmembers(tid2);
+ VERIFY(nmemb, 2, "H5Tget_nmembers");
+
+ /* Check the 1st field's name */
+ mname = H5Tget_member_name(tid2, 0);
+ CHECK_PTR(mname, "H5Tget_member_name");
+ if (HDstrcmp(mname, "i") != 0)
+ TestErrPrintf("Compound field name doesn't match!, mname=%s\n", mname);
+ H5free_memory(mname);
+
+ /* Check the 1st field's offset */
+ off = H5Tget_member_offset(tid2, 0);
+ VERIFY(off, HOFFSET(s1_t, i), "H5Tget_member_offset");
+
+ /* Check the 1st field's datatype */
+ mtid = H5Tget_member_type(tid2, 0);
+ CHECK(mtid, FAIL, "H5Tget_member_type");
+ if ((ret = H5Tequal(mtid, H5T_NATIVE_INT)) <= 0)
+ TestErrPrintf("Compound data type is incorrect!, ret=%d\n", (int)ret);
+ ret = H5Tclose(mtid);
+ CHECK(mtid, FAIL, "H5Tclose");
+
+ /* Check the 2nd field's name */
+ mname = H5Tget_member_name(tid2, 1);
+ CHECK_PTR(mname, "H5Tget_member_name");
+ if (HDstrcmp(mname, "f") != 0)
+ TestErrPrintf("Compound field name doesn't match!, mname=%s\n", mname);
+ H5free_memory(mname);
+
+ /* Check the 2nd field's offset */
+ off = H5Tget_member_offset(tid2, 1);
+ VERIFY(off, HOFFSET(s1_t, f), "H5Tget_member_offset");
+
+ /* Check the 2nd field's datatype */
+ mtid = H5Tget_member_type(tid2, 1);
+ CHECK(mtid, FAIL, "H5Tget_member_type");
+
+ /* Get the 2nd field's class */
+ mclass = H5Tget_class(mtid);
+ VERIFY(mclass, H5T_ARRAY, "H5Tget_class");
+
+ /* Check the array rank */
+ ndims = H5Tget_array_ndims(mtid);
+ VERIFY(ndims, ARRAY1_RANK, "H5Tget_array_ndims");
+
+ /* Get the array dimensions */
+ ret = H5Tget_array_dims2(mtid, rdims1);
+ CHECK(ret, FAIL, "H5Tget_array_dims2");
+
+ /* Check the array dimensions */
+ for (i = 0; i < ndims; i++)
+ if (rdims1[i] != tdims1[i]) {
+ TestErrPrintf("Nested array dimension information doesn't match!, rdims1[%d]=%" PRIuHSIZE
+ ", tdims1[%d]=%" PRIuHSIZE "\n",
+ i, rdims1[i], i, tdims1[i]);
+ continue;
+ } /* end if */
+
+ /* Check the nested array's datatype */
+ tid3 = H5Tget_super(mtid);
+ CHECK(tid3, FAIL, "H5Tget_super");
+
+ if ((ret = H5Tequal(tid3, H5T_NATIVE_FLOAT)) <= 0)
+ TestErrPrintf("Compound data type is incorrect!, ret=%d\n", (int)ret);
+
+ /* Close the array's base type datatype */
+ ret = H5Tclose(tid3);
+ CHECK(mtid, FAIL, "H5Tclose");
+
+ /* Close the member datatype */
+ ret = H5Tclose(mtid);
+ CHECK(mtid, FAIL, "H5Tclose");
+
+ /* Close Compound Datatype */
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Read dataset from disk */
+ ret = H5Dread(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ for (j = 0; j < ARRAY1_DIM1; j++) {
+ if (wdata[i][j].i != rdata[i][j].i) {
+ TestErrPrintf(
+ "Array data information doesn't match!, wdata[%d][%d].i=%d, rdata[%d][%d].i=%d\n", (int)i,
+ (int)j, (int)wdata[i][j].i, (int)i, (int)j, (int)rdata[i][j].i);
+ continue;
+ } /* end if */
+ for (k = 0; k < ARRAY1_DIM1; k++)
+ if (!H5_FLT_ABS_EQUAL(wdata[i][j].f[k], rdata[i][j].f[k])) {
+ TestErrPrintf("Array data information doesn't match!, wdata[%d][%d].f[%d]=%f, "
+ "rdata[%d][%d].f[%d]=%f\n",
+ (int)i, (int)j, (int)k, (double)wdata[i][j].f[k], (int)i, (int)j, (int)k,
+ (double)rdata[i][j].f[k]);
+ continue;
+ } /* end if */
+ } /* end for */
+ } /* end for */
+
+ /* Close Datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* end test_array_compound_array() */
+
+/****************************************************************
+**
+** test_array_alloc_custom(): Test VL datatype custom memory
+** allocation routines. This routine just uses malloc to
+** allocate the memory and increments the amount of memory
+** allocated.
+**
+****************************************************************/
+
+/*-------------------------------------------------------------------------
+ * Function: test_array_alloc_custom
+ *
+ * Purpose: Memory allocator for testing VL datatype custom memory
+ * allocation routines.
+ *
+ * This routine just uses malloc to allocate the memory and
+ * increments the amount of memory allocated.
+ *
+ * Return:
+ *
+ * Success: A memory buffer
+ * Failure: NULL
+ *
+ *-------------------------------------------------------------------------
+ */
+void *
+test_array_alloc_custom(size_t size, void *info)
+{
+ void *ret_value = NULL; /* Pointer to return */
+ size_t *mem_used = (size_t *)info; /* Pointer to the memory used */
+ size_t extra; /* Extra space needed */
+
+ /*
+ * This weird contortion is required on the DEC Alpha to keep the
+ * alignment correct - QAK
+ */
+ extra = MAX(sizeof(void *), sizeof(size_t));
+
+ if ((ret_value = HDmalloc(extra + size)) != NULL) {
+ *(size_t *)ret_value = size;
+ *mem_used += size;
+ } /* end if */
+
+ ret_value = ((unsigned char *)ret_value) + extra;
+ return ret_value;
+} /* end test_array_alloc_custom() */
+
+/*-------------------------------------------------------------------------
+ * Function: test_array_free_custom
+ *
+ * Purpose: Memory free function for testing VL datatype custom memory
+ * allocation routines.
+ *
+ * This routine just uses free to free the memory and
+ * decrements the amount of memory allocated.
+ *
+ * Return: void
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+test_array_free_custom(void *_mem, void *info)
+{
+ unsigned char *mem = NULL; /* Pointer to mem to be freed */
+ size_t *mem_used = (size_t *)info; /* Pointer to the memory used */
+ size_t extra; /* Extra space needed */
+
+ /*
+ * This weird contortion is required on the DEC Alpha to keep the
+ * alignment correct - QAK
+ */
+ extra = MAX(sizeof(void *), sizeof(size_t));
+
+ if (_mem != NULL) {
+ mem = ((unsigned char *)_mem) - extra;
+ *mem_used -= *(size_t *)((void *)mem);
+ HDfree(mem);
+ } /* end if */
+
+} /* end test_array_free_custom() */
+
+/*-------------------------------------------------------------------------
+ * Function: test_array_vlen_atomic
+ *
+ * Purpose: Test basic array datatype code.
+ * Tests 1-D array of atomic VL datatypes.
+ *
+ * Return: void
+ *
+ *-------------------------------------------------------------------------
+ */
+static void
+test_array_vlen_atomic(void)
+{
+ hvl_t wdata[SPACE1_DIM1][ARRAY1_DIM1]; /* Information to write */
+ hvl_t rdata[SPACE1_DIM1][ARRAY1_DIM1]; /* Information read in */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1; /* Array Datatype ID */
+ hid_t tid2; /* VL Datatype ID */
+ hid_t tid3; /* Atomic Datatype ID */
+ hsize_t sdims1[] = {SPACE1_DIM1};
+ hsize_t tdims1[] = {ARRAY1_DIM1};
+ int ndims; /* Array rank for reading */
+ hsize_t rdims1[H5S_MAX_RANK]; /* Array dimensions for reading */
+ H5T_class_t mclass; /* Datatype class for VL */
+ hid_t xfer_pid; /* Dataset transfer property list ID */
+ hsize_t size; /* Number of bytes which will be used */
+ size_t mem_used = 0; /* Memory used during allocation */
+ int i, j, k; /* counting variables */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing 1-D Array of Atomic Variable-Length Datatypes Functionality\n"));
+
+ /* Initialize array data to write */
+ for (i = 0; i < SPACE1_DIM1; i++)
+ for (j = 0; j < ARRAY1_DIM1; j++) {
+ wdata[i][j].p = HDmalloc((size_t)(i + j + 1) * sizeof(unsigned int));
+ wdata[i][j].len = (size_t)(i + j + 1);
+ for (k = 0; k < (i + j + 1); k++)
+ ((unsigned int *)wdata[i][j].p)[k] = (unsigned int)(i * 100 + j * 10 + k);
+ } /* end for */
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, sdims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create a compound datatype to refer to */
+ tid2 = H5Tvlen_create(H5T_NATIVE_UINT);
+ CHECK(tid2, FAIL, "H5Tcreate");
+
+ /* Create an array datatype to refer to */
+ tid1 = H5Tarray_create2(tid2, ARRAY1_RANK, tdims1);
+ CHECK(tid1, FAIL, "H5Tarray_create2");
+
+ /* Close VL datatype */
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, "Dataset1", tid1, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write dataset to disk */
+ ret = H5Dwrite(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open file */
+ fid1 = H5Fopen(FILENAME, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Open the dataset */
+ dataset = H5Dopen2(fid1, "Dataset1", H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Get the dataspace */
+ sid1 = H5Dget_space(dataset);
+ CHECK(sid1, FAIL, "H5Dget_space");
+
+ /* Get the datatype */
+ tid1 = H5Dget_type(dataset);
+ CHECK(tid1, FAIL, "H5Dget_type");
+
+ /* Check the array rank */
+ ndims = H5Tget_array_ndims(tid1);
+ VERIFY(ndims, ARRAY1_RANK, "H5Tget_array_ndims");
+
+ /* Get the array dimensions */
+ ret = H5Tget_array_dims2(tid1, rdims1);
+ CHECK(ret, FAIL, "H5Tget_array_dims2");
+
+ /* Check the array dimensions */
+ for (i = 0; i < ndims; i++)
+ if (rdims1[i] != tdims1[i]) {
+ TestErrPrintf("Array dimension information doesn't match!, rdims1[%d]=%" PRIuHSIZE
+ ", tdims1[%d]=%" PRIuHSIZE "\n",
+ i, rdims1[i], i, tdims1[i]);
+ continue;
+ } /* end if */
+
+ /* Get the VL datatype */
+ tid2 = H5Tget_super(tid1);
+ CHECK(tid2, FAIL, "H5Tget_super");
+
+ /* Get the 2nd field's class */
+ mclass = H5Tget_class(tid2);
+ VERIFY(mclass, H5T_VLEN, "H5Tget_class");
+
+ /* Check the VL datatype's base type */
+ tid3 = H5Tget_super(tid2);
+ CHECK(tid3, FAIL, "H5Tget_super");
+
+ if ((ret = H5Tequal(tid3, H5T_NATIVE_UINT)) <= 0)
+ TestErrPrintf("VL base datatype is incorrect!, ret=%d\n", (int)ret);
+
+ /* Close the array's base type datatype */
+ ret = H5Tclose(tid3);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close VL Datatype */
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Change to the custom memory allocation routines for reading VL data */
+ xfer_pid = H5Pcreate(H5P_DATASET_XFER);
+ CHECK(xfer_pid, FAIL, "H5Pcreate");
+
+ ret = H5Pset_vlen_mem_manager(xfer_pid, test_array_alloc_custom, &mem_used, test_array_free_custom,
+ &mem_used);
+ CHECK(ret, FAIL, "H5Pset_vlen_mem_manager");
+
+ /* Make certain the correct amount of memory will be used */
+ ret = H5Dvlen_get_buf_size(dataset, tid1, sid1, &size);
+ CHECK(ret, FAIL, "H5Dvlen_get_buf_size");
+
+ /* # elements allocated = (1 + 2 + 3 + 4) + (2 + 3 + 4 + 5) +
+ * (3 + 4 + 5 + 6) + (4 + 5 + 6 + 7) = 64 elements
+ */
+ VERIFY(size, 64 * sizeof(unsigned int), "H5Dvlen_get_buf_size");
+
+ /* Read dataset from disk */
+ ret = H5Dread(dataset, tid1, H5S_ALL, H5S_ALL, xfer_pid, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Make certain the correct amount of memory has been used */
+ /* # elements allocated = (1 + 2 + 3 + 4) + (2 + 3 + 4 + 5) +
+ * (3 + 4 + 5 + 6) + (4 + 5 + 6 + 7) = 64 elements
+ */
+ VERIFY(mem_used, 64 * sizeof(unsigned int), "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ for (j = 0; j < ARRAY1_DIM1; j++) {
+ if (wdata[i][j].len != rdata[i][j].len) {
+ TestErrPrintf("VL data length don't match!, wdata[%d][%d].len=%d, rdata[%d][%d].len=%d\n",
+ (int)i, (int)j, (int)wdata[i][j].len, (int)i, (int)j, (int)rdata[i][j].len);
+ continue;
+ } /* end if */
+ for (k = 0; k < (int)rdata[i][j].len; k++) {
+ if (((unsigned int *)wdata[i][j].p)[k] != ((unsigned int *)rdata[i][j].p)[k]) {
+ TestErrPrintf(
+ "VL data values don't match!, wdata[%d][%d].p[%d]=%d, rdata[%d][%d].p[%d]=%d\n",
+ (int)i, (int)j, (int)k, (int)((unsigned int *)wdata[i][j].p)[k], (int)i, (int)j,
+ (int)k, (int)((unsigned int *)rdata[i][j].p)[k]);
+ continue;
+ } /* end if */
+ } /* end for */
+ } /* end for */
+ } /* end for */
+
+ /* Reclaim the read VL data */
+ ret = H5Treclaim(tid1, sid1, xfer_pid, rdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Make certain the VL memory has been freed */
+ VERIFY(mem_used, 0, "H5Treclaim");
+
+ /* Reclaim the write VL data */
+ ret = H5Treclaim(tid1, sid1, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Close dataset transfer property list */
+ ret = H5Pclose(xfer_pid);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close Datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* end test_array_vlen_atomic() */
+
+/*-------------------------------------------------------------------------
+ * Function: test_array_vlen_array
+ *
+ * Purpose: Test basic array datatype code.
+ * Tests 1-D array of 1-D array VL datatypes.
+ *
+ * Return: void
+ *
+ *-------------------------------------------------------------------------
+ */
+static void
+test_array_vlen_array(void)
+{
+ hvl_t wdata[SPACE1_DIM1][ARRAY1_DIM1]; /* Information to write */
+ hvl_t rdata[SPACE1_DIM1][ARRAY1_DIM1]; /* Information read in */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1; /* Array Datatype ID */
+ hid_t tid2; /* VL Datatype ID */
+ hid_t tid3; /* Nested Array Datatype ID */
+ hid_t tid4; /* Atomic Datatype ID */
+ hsize_t sdims1[] = {SPACE1_DIM1};
+ hsize_t tdims1[] = {ARRAY1_DIM1};
+ int ndims; /* Array rank for reading */
+ hsize_t rdims1[H5S_MAX_RANK]; /* Array dimensions for reading */
+ H5T_class_t mclass; /* Datatype class for VL */
+ hid_t xfer_pid; /* Dataset transfer property list ID */
+ hsize_t size; /* Number of bytes which will be used */
+ size_t mem_used = 0; /* Memory used during allocation */
+ int i, j, k, l; /* Index variables */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing 1-D Array of 1-D Array Variable-Length Datatypes Functionality\n"));
+
+ /* Initialize array data to write */
+ for (i = 0; i < SPACE1_DIM1; i++)
+ for (j = 0; j < ARRAY1_DIM1; j++) {
+ wdata[i][j].p = HDmalloc((size_t)(i + j + 1) * sizeof(unsigned int) * (size_t)ARRAY1_DIM1);
+ wdata[i][j].len = (size_t)(i + j + 1);
+ for (k = 0; k < (i + j + 1); k++)
+ for (l = 0; l < ARRAY1_DIM1; l++)
+ ((unsigned int *)wdata[i][j].p)[k * ARRAY1_DIM1 + l] =
+ (unsigned int)(i * 1000 + j * 100 + k * 10 + l);
+ }
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, sdims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create the nested array datatype to refer to */
+ tid3 = H5Tarray_create2(H5T_NATIVE_UINT, ARRAY1_RANK, tdims1);
+ CHECK(tid3, FAIL, "H5Tarray_create2");
+
+ /* Create a VL datatype of 1-D arrays to refer to */
+ tid2 = H5Tvlen_create(tid3);
+ CHECK(tid2, FAIL, "H5Tcreate");
+
+ /* Close nested array datatype */
+ ret = H5Tclose(tid3);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Create an array datatype to refer to */
+ tid1 = H5Tarray_create2(tid2, ARRAY1_RANK, tdims1);
+ CHECK(tid1, FAIL, "H5Tarray_create2");
+
+ /* Close VL datatype */
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, "Dataset1", tid1, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write dataset to disk */
+ ret = H5Dwrite(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open file */
+ fid1 = H5Fopen(FILENAME, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Open the dataset */
+ dataset = H5Dopen2(fid1, "Dataset1", H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Get the dataspace */
+ sid1 = H5Dget_space(dataset);
+ CHECK(sid1, FAIL, "H5Dget_space");
+
+ /* Get the datatype */
+ tid1 = H5Dget_type(dataset);
+ CHECK(tid1, FAIL, "H5Dget_type");
+
+ /* Check the array rank */
+ ndims = H5Tget_array_ndims(tid1);
+ VERIFY(ndims, ARRAY1_RANK, "H5Tget_array_ndims");
+
+ /* Get the array dimensions */
+ ret = H5Tget_array_dims2(tid1, rdims1);
+ CHECK(ret, FAIL, "H5Tget_array_dims2");
+
+ /* Check the array dimensions */
+ for (i = 0; i < ndims; i++)
+ if (rdims1[i] != tdims1[i]) {
+ TestErrPrintf("Array dimension information doesn't match!, rdims1[%d]=%" PRIuHSIZE
+ ", tdims1[%d]=%" PRIuHSIZE "\n",
+ i, rdims1[i], i, tdims1[i]);
+ continue;
+ } /* end if */
+
+ /* Get the VL datatype */
+ tid2 = H5Tget_super(tid1);
+ CHECK(tid2, FAIL, "H5Tget_super");
+
+ /* Get the VL datatype's class */
+ mclass = H5Tget_class(tid2);
+ VERIFY(mclass, H5T_VLEN, "H5Tget_class");
+
+ /* Check the VL datatype's base type */
+ tid3 = H5Tget_super(tid2);
+ CHECK(tid3, FAIL, "H5Tget_super");
+
+ /* Get the nested array datatype's class */
+ mclass = H5Tget_class(tid3);
+ VERIFY(mclass, H5T_ARRAY, "H5Tget_class");
+
+ /* Check the array rank */
+ ndims = H5Tget_array_ndims(tid3);
+ VERIFY(ndims, ARRAY1_RANK, "H5Tget_array_ndims");
+
+ /* Get the array dimensions */
+ ret = H5Tget_array_dims2(tid3, rdims1);
+ CHECK(ret, FAIL, "H5Tget_array_dims2");
+
+ /* Check the array dimensions */
+ for (i = 0; i < ndims; i++)
+ if (rdims1[i] != tdims1[i]) {
+ TestErrPrintf("Array dimension information doesn't match!, rdims1[%d]=%" PRIuHSIZE
+ ", tdims1[%d]=%" PRIuHSIZE "\n",
+ i, rdims1[i], i, tdims1[i]);
+ continue;
+ } /* end if */
+
+ /* Check the array's base type */
+ tid4 = H5Tget_super(tid3);
+ CHECK(tid4, FAIL, "H5Tget_super");
+
+ if ((ret = H5Tequal(tid4, H5T_NATIVE_UINT)) <= 0)
+ TestErrPrintf("VL base datatype is incorrect!, ret=%d\n", (int)ret);
+
+ /* Close the array's base type datatype */
+ ret = H5Tclose(tid4);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close the nested array datatype */
+ ret = H5Tclose(tid3);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close VL Datatype */
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Change to the custom memory allocation routines for reading VL data */
+ xfer_pid = H5Pcreate(H5P_DATASET_XFER);
+ CHECK(xfer_pid, FAIL, "H5Pcreate");
+
+ ret = H5Pset_vlen_mem_manager(xfer_pid, test_array_alloc_custom, &mem_used, test_array_free_custom,
+ &mem_used);
+ CHECK(ret, FAIL, "H5Pset_vlen_mem_manager");
+
+ /* Make certain the correct amount of memory will be used */
+ ret = H5Dvlen_get_buf_size(dataset, tid1, sid1, &size);
+ CHECK(ret, FAIL, "H5Dvlen_get_buf_size");
+
+ /* # elements allocated = (1 + 2 + 3 + 4) + (2 + 3 + 4 + 5) +
+ * (3 + 4 + 5 + 6) + (4 + 5 + 6 + 7) = 64*ARRAY1_DIM1 elements
+ */
+ VERIFY(size, 64 * (sizeof(unsigned int) * ARRAY1_DIM1), "H5Dvlen_get_buf_size");
+
+ /* Read dataset from disk */
+ ret = H5Dread(dataset, tid1, H5S_ALL, H5S_ALL, xfer_pid, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Make certain the correct amount of memory has been used */
+ /* # elements allocated = (1 + 2 + 3 + 4) + (2 + 3 + 4 + 5) +
+ * (3 + 4 + 5 + 6) + (4 + 5 + 6 + 7) = 64*ARRAY1_DIM1 elements
+ */
+ VERIFY(mem_used, 64 * (sizeof(unsigned int) * ARRAY1_DIM1), "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ for (j = 0; j < ARRAY1_DIM1; j++) {
+ if (wdata[i][j].len != rdata[i][j].len) {
+ TestErrPrintf("VL data length don't match!, wdata[%d][%d].len=%d, rdata[%d][%d].len=%d\n",
+ (int)i, (int)j, (int)wdata[i][j].len, (int)i, (int)j, (int)rdata[i][j].len);
+ continue;
+ } /* end if */
+ for (k = 0; k < (int)rdata[i][j].len; k++) {
+ for (l = 0; l < ARRAY1_DIM1; l++) {
+ if (((unsigned int *)wdata[i][j].p)[k * ARRAY1_DIM1 + l] !=
+ ((unsigned int *)rdata[i][j].p)[k * ARRAY1_DIM1 + l]) {
+ TestErrPrintf("VL data values don't match!, wdata[%d][%d].p[%d][%d]=%d, "
+ "rdata[%d][%d].p[%d][%d]=%d\n",
+ (int)i, (int)j, (int)k, (int)l,
+ (int)((unsigned int *)wdata[i][j].p)[k * ARRAY1_DIM1 + l], (int)i,
+ (int)j, (int)k, (int)l,
+ (int)((unsigned int *)rdata[i][j].p)[k * ARRAY1_DIM1 + l]);
+ continue;
+ } /* end if */
+ } /* end for */
+ } /* end for */
+ } /* end for */
+ } /* end for */
+
+ /* Reclaim the read VL data */
+ ret = H5Treclaim(tid1, sid1, xfer_pid, rdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Make certain the VL memory has been freed */
+ VERIFY(mem_used, 0, "H5Treclaim");
+
+ /* Reclaim the write VL data */
+ ret = H5Treclaim(tid1, sid1, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Close dataset transfer property list */
+ ret = H5Pclose(xfer_pid);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close Datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* end test_array_vlen_array() */
+
+/*-------------------------------------------------------------------------
+ * Function: test_array_bkg
+ *
+ * Purpose: Test basic array datatype code.
+ * Tests reading compound datatype with array fields and
+ * writing partial fields.
+ *
+ * Return: void
+ *
+ *-------------------------------------------------------------------------
+ */
+static void
+test_array_bkg(void)
+{
+ herr_t status = -1;
+
+ hid_t fid, array_dt;
+ hid_t space;
+ hid_t type;
+ hid_t dataset;
+
+ hsize_t dim[] = {LENGTH};
+ hsize_t dima[] = {ALEN};
+
+ int i, j;
+ unsigned ndims[3] = {1, 1, 1};
+
+ typedef struct {
+ int a[ALEN];
+ float b[ALEN];
+ double c[ALEN];
+ } CmpField;
+
+ CmpField cf[LENGTH];
+ CmpField cfr[LENGTH];
+ CmpDTSinfo *dtsinfo = NULL;
+
+ typedef struct {
+ float b[ALEN];
+ } fld_t;
+
+ fld_t fld[LENGTH];
+ fld_t fldr[LENGTH];
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Partial I/O of Array Fields in Compound Datatype Functionality\n"));
+
+ /* Initialize the data */
+ /* ------------------- */
+ dtsinfo = (CmpDTSinfo *)HDmalloc(sizeof(CmpDTSinfo));
+ CHECK_PTR(dtsinfo, "HDmalloc");
+ HDmemset(dtsinfo, 0, sizeof(CmpDTSinfo));
+ for (i = 0; i < LENGTH; i++) {
+ for (j = 0; j < ALEN; j++) {
+ cf[i].a[j] = 100 * (i + 1) + j;
+ cf[i].b[j] = 100.0F * ((float)i + 1.0F) + 0.01F * (float)j;
+ cf[i].c[j] = (double)(100.0F * ((float)i + 1.0F) + 0.02F * (float)j);
+ } /* end for */
+ } /* end for */
+
+ /* Set the number of data members */
+ /* ------------------------------ */
+ dtsinfo->nsubfields = 3;
+
+ /* Initialize the offsets */
+ /* ----------------------- */
+ dtsinfo->offset[0] = HOFFSET(CmpField, a);
+ dtsinfo->offset[1] = HOFFSET(CmpField, b);
+ dtsinfo->offset[2] = HOFFSET(CmpField, c);
+
+ /* Initialize the data type IDs */
+ /* ---------------------------- */
+ dtsinfo->datatype[0] = H5T_NATIVE_INT;
+ dtsinfo->datatype[1] = H5T_NATIVE_FLOAT;
+ dtsinfo->datatype[2] = H5T_NATIVE_DOUBLE;
+
+ /* Initialize the names of data members */
+ /* ------------------------------------ */
+ for (i = 0; i < dtsinfo->nsubfields; i++)
+ dtsinfo->name[i] = (char *)HDcalloc((size_t)20, sizeof(char));
+
+ HDstrcpy(dtsinfo->name[0], "One");
+ HDstrcpy(dtsinfo->name[1], "Two");
+ HDstrcpy(dtsinfo->name[2], "Three");
+
+ /* Create file */
+ /* ----------- */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create data space */
+ /* ----------------- */
+ space = H5Screate_simple(RANK, dim, NULL);
+ CHECK(space, FAIL, "H5Screate_simple");
+
+ /* Create the memory data type */
+ /* --------------------------- */
+ type = H5Tcreate(H5T_COMPOUND, sizeof(CmpField));
+ CHECK(type, FAIL, "H5Tcreate");
+
+ /* Add members to the compound data type */
+ /* -------------------------------------- */
+ for (i = 0; i < dtsinfo->nsubfields; i++) {
+ array_dt = H5Tarray_create2(dtsinfo->datatype[i], ndims[i], dima);
+ CHECK(array_dt, FAIL, "H5Tarray_create2");
+
+ status = H5Tinsert(type, dtsinfo->name[i], dtsinfo->offset[i], array_dt);
+ CHECK(status, FAIL, "H5Tinsert");
+
+ status = H5Tclose(array_dt);
+ CHECK(status, FAIL, "H5Tclose");
+ } /* end for */
+
+ /* Create the dataset */
+ /* ------------------ */
+ dataset = H5Dcreate2(fid, FIELDNAME, type, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write data to the dataset */
+ /* ------------------------- */
+ status = H5Dwrite(dataset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, cf);
+ CHECK(status, FAIL, "H5Dwrite");
+
+ status = H5Dread(dataset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, cfr);
+ CHECK(status, FAIL, "H5Dread");
+
+ /* Verify correct data */
+ /* ------------------- */
+ for (i = 0; i < LENGTH; i++) {
+ for (j = 0; j < ALEN; j++) {
+ if (cf[i].a[j] != cfr[i].a[j]) {
+ TestErrPrintf("Field a data doesn't match, cf[%d].a[%d]=%d, cfr[%d].a[%d]=%d\n", (int)i,
+ (int)j, (int)cf[i].a[j], (int)i, (int)j, (int)cfr[i].a[j]);
+ continue;
+ } /* end if */
+ if (!H5_FLT_ABS_EQUAL(cf[i].b[j], cfr[i].b[j])) {
+ TestErrPrintf("Field b data doesn't match, cf[%d].b[%d]=%f, cfr[%d].b[%d]=%f\n", (int)i,
+ (int)j, (double)cf[i].b[j], (int)i, (int)j, (double)cfr[i].b[j]);
+ continue;
+ } /* end if */
+ if (!H5_DBL_ABS_EQUAL(cf[i].c[j], cfr[i].c[j])) {
+ TestErrPrintf("Field c data doesn't match, cf[%d].b[%d]=%f, cfr[%d].b[%d]=%f\n", (int)i,
+ (int)j, (double)cf[i].c[j], (int)i, (int)j, (double)cfr[i].c[j]);
+ continue;
+ } /* end if */
+ } /* end for */
+ } /* end for */
+
+ /* Release memory resources */
+ /* ------------------------ */
+ for (i = 0; i < dtsinfo->nsubfields; i++)
+ HDfree(dtsinfo->name[i]);
+
+ /* Release IDs */
+ /* ----------- */
+ status = H5Tclose(type);
+ CHECK(status, FAIL, "H5Tclose");
+
+ status = H5Sclose(space);
+ CHECK(status, FAIL, "H5Sclose");
+
+ status = H5Dclose(dataset);
+ CHECK(status, FAIL, "H5Dclose");
+
+ status = H5Fclose(fid);
+ CHECK(status, FAIL, "H5Fclose");
+
+ /******************************/
+ /* Reopen the file and update */
+ /******************************/
+
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ dataset = H5Dopen2(fid, FIELDNAME, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ type = H5Tcreate(H5T_COMPOUND, sizeof(fld_t));
+ CHECK(type, FAIL, "H5Tcreate");
+
+ array_dt = H5Tarray_create2(H5T_NATIVE_FLOAT, 1, dima);
+ CHECK(array_dt, FAIL, "H5Tarray_create2");
+
+ status = H5Tinsert(type, "Two", HOFFSET(fld_t, b), array_dt);
+ CHECK(status, FAIL, "H5Tinsert");
+
+ /* Initialize the data to overwrite */
+ /* -------------------------------- */
+ for (i = 0; i < LENGTH; i++)
+ for (j = 0; j < ALEN; j++)
+ cf[i].b[j] = fld[i].b[j] = 1.313F;
+
+ status = H5Dwrite(dataset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, fld);
+ CHECK(status, FAIL, "H5Dwrite");
+
+ /* Read just the field changed */
+ status = H5Dread(dataset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, fldr);
+ CHECK(status, FAIL, "H5Dread");
+
+ for (i = 0; i < LENGTH; i++)
+ for (j = 0; j < ALEN; j++)
+ if (!H5_FLT_ABS_EQUAL(fld[i].b[j], fldr[i].b[j])) {
+ TestErrPrintf("Field data doesn't match, fld[%d].b[%d]=%f, fldr[%d].b[%d]=%f\n", (int)i,
+ (int)j, (double)fld[i].b[j], (int)i, (int)j, (double)fldr[i].b[j]);
+ continue;
+ } /* end if */
+
+ status = H5Tclose(type);
+ CHECK(status, FAIL, "H5Tclose");
+
+ status = H5Tclose(array_dt);
+ CHECK(status, FAIL, "H5Tclose");
+
+ type = H5Dget_type(dataset);
+ CHECK(type, FAIL, "H5Dget_type");
+
+ /* Read the entire dataset again */
+ status = H5Dread(dataset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, cfr);
+ CHECK(status, FAIL, "H5Dread");
+
+ /* Verify correct data */
+ /* ------------------- */
+ for (i = 0; i < LENGTH; i++) {
+ for (j = 0; j < ALEN; j++) {
+ if (cf[i].a[j] != cfr[i].a[j]) {
+ TestErrPrintf("Field a data doesn't match, cf[%d].a[%d]=%d, cfr[%d].a[%d]=%d\n", (int)i,
+ (int)j, (int)cf[i].a[j], (int)i, (int)j, (int)cfr[i].a[j]);
+ continue;
+ } /* end if */
+ if (!H5_FLT_ABS_EQUAL(cf[i].b[j], cfr[i].b[j])) {
+ TestErrPrintf("Field b data doesn't match, cf[%d].b[%d]=%f, cfr[%d].b[%d]=%f\n", (int)i,
+ (int)j, (double)cf[i].b[j], (int)i, (int)j, (double)cfr[i].b[j]);
+ continue;
+ } /* end if */
+ if (!H5_DBL_ABS_EQUAL(cf[i].c[j], cfr[i].c[j])) {
+ TestErrPrintf("Field c data doesn't match, cf[%d].b[%d]=%f, cfr[%d].b[%d]=%f\n", (int)i,
+ (int)j, (double)cf[i].c[j], (int)i, (int)j, (double)cfr[i].c[j]);
+ continue;
+ } /* end if */
+ } /* end for */
+ } /* end for */
+
+ status = H5Dclose(dataset);
+ CHECK(status, FAIL, "H5Dclose");
+
+ status = H5Tclose(type);
+ CHECK(status, FAIL, "H5Tclose");
+
+ status = H5Fclose(fid);
+ CHECK(status, FAIL, "H5Fclose");
+
+ /****************************************************/
+ /* Reopen the file and print out all the data again */
+ /****************************************************/
+
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ dataset = H5Dopen2(fid, FIELDNAME, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ type = H5Dget_type(dataset);
+ CHECK(type, FAIL, "H5Dget_type");
+
+ /* Reset the data to read in */
+ /* ------------------------- */
+ HDmemset(cfr, 0, sizeof(CmpField) * LENGTH);
+
+ status = H5Dread(dataset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, cfr);
+ CHECK(status, FAIL, "H5Dread");
+
+ /* Verify correct data */
+ /* ------------------- */
+ for (i = 0; i < LENGTH; i++) {
+ for (j = 0; j < ALEN; j++) {
+ if (cf[i].a[j] != cfr[i].a[j]) {
+ TestErrPrintf("Field a data doesn't match, cf[%d].a[%d]=%d, cfr[%d].a[%d]=%d\n", (int)i,
+ (int)j, (int)cf[i].a[j], (int)i, (int)j, (int)cfr[i].a[j]);
+ continue;
+ } /* end if */
+ if (!H5_FLT_ABS_EQUAL(cf[i].b[j], cfr[i].b[j])) {
+ TestErrPrintf("Field b data doesn't match, cf[%d].b[%d]=%f, cfr[%d].b[%d]=%f\n", (int)i,
+ (int)j, (double)cf[i].b[j], (int)i, (int)j, (double)cfr[i].b[j]);
+ continue;
+ } /* end if */
+ if (!H5_DBL_ABS_EQUAL(cf[i].c[j], cfr[i].c[j])) {
+ TestErrPrintf("Field c data doesn't match, cf[%d].b[%d]=%f, cfr[%d].b[%d]=%f\n", (int)i,
+ (int)j, (double)cf[i].c[j], (int)i, (int)j, (double)cfr[i].c[j]);
+ continue;
+ } /* end if */
+ } /* end for */
+ } /* end for */
+
+ status = H5Dclose(dataset);
+ CHECK(status, FAIL, "H5Dclose");
+
+ status = H5Tclose(type);
+ CHECK(status, FAIL, "H5Tclose");
+
+ status = H5Fclose(fid);
+ CHECK(status, FAIL, "H5Fclose");
+
+ HDfree(dtsinfo);
+} /* end test_array_bkg() */
+
+/*-------------------------------------------------------------------------
+ * Function: test_compat
+ *
+ * Purpose: Test array datatype compatibility code.
+ *
+ * Reads file containing old version of datatype object header
+ * messages for compound datatypes and verifies reading the older
+ * version of the is working correctly.
+ *
+ * Return: void
+ *
+ *-------------------------------------------------------------------------
+ */
+#if 0
+static void
+test_compat(void)
+{
+ const char *testfile = H5_get_srcdir_filename(TESTFILE); /* Corrected test file name */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t tid1; /* Array Datatype ID */
+ hid_t tid2; /* Datatype ID */
+ hsize_t tdims1[] = {ARRAY1_DIM1};
+ int ndims; /* Array rank for reading */
+ hsize_t rdims1[H5S_MAX_RANK]; /* Array dimensions for reading */
+ H5T_class_t mclass; /* Datatype class for VL */
+ int nmemb; /* Number of compound members */
+ char *mname; /* Name of compound field */
+ size_t off; /* Offset of compound field */
+ hid_t mtid; /* Datatype ID for field */
+ int i; /* Index variables */
+ hbool_t driver_is_default_compatible;
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Array Datatypes Compatibility Functionality\n"));
+
+ /*
+ * Try reading a file that has been prepared that has datasets with
+ * compound datatypes which use an older version (version 1) of the
+ * datatype object header message for describing the datatype.
+ *
+ * If this test fails and the datatype object header message version has
+ * changed, follow the instructions in gen_old_array.c for regenerating
+ * the tarrold.h5 file.
+ */
+
+ if (h5_driver_is_default_vfd_compatible(H5P_DEFAULT, &driver_is_default_compatible) < 0)
+ TestErrPrintf("can't check if VFD is default VFD compatible\n");
+ if (!driver_is_default_compatible) {
+ HDprintf(" -- SKIPPED --\n");
+ return;
+ }
+
+ /* Open the testfile */
+ fid1 = H5Fopen(testfile, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK_I(fid1, "H5Fopen");
+
+ /* Only try to proceed if the file is around */
+ if (fid1 >= 0) {
+ /* Open the first dataset (with no array fields) */
+ dataset = H5Dopen2(fid1, "Dataset1", H5P_DEFAULT);
+ CHECK_I(dataset, "H5Dopen2");
+
+ /* Get the datatype */
+ tid1 = H5Dget_type(dataset);
+ CHECK_I(tid1, "H5Dget_type");
+
+ /* Verify datatype class */
+ mclass = H5Tget_class(tid1);
+ VERIFY(mclass, H5T_COMPOUND, "H5Tget_class");
+
+ /* Get the number of compound datatype fields */
+ nmemb = H5Tget_nmembers(tid1);
+ VERIFY(nmemb, 3, "H5Tget_nmembers");
+
+ /* Check the 1st field's name */
+ mname = H5Tget_member_name(tid1, 0);
+ CHECK_PTR(mname, "H5Tget_member_name");
+ if (HDstrcmp(mname, "i") != 0)
+ TestErrPrintf("Compound field name doesn't match!, mname=%s\n", mname);
+ H5free_memory(mname);
+
+ /* Check the 1st field's offset */
+ off = H5Tget_member_offset(tid1, 0);
+ VERIFY(off, 0, "H5Tget_member_offset");
+
+ /* Check the 1st field's datatype */
+ mtid = H5Tget_member_type(tid1, 0);
+ CHECK(mtid, FAIL, "H5Tget_member_type");
+ if ((ret = H5Tequal(mtid, H5T_STD_I16LE)) <= 0)
+ TestErrPrintf("Compound data type is incorrect!, ret=%d\n", (int)ret);
+ ret = H5Tclose(mtid);
+ CHECK(mtid, FAIL, "H5Tclose");
+
+ /* Check the 2nd field's name */
+ mname = H5Tget_member_name(tid1, 1);
+ CHECK_PTR(mname, "H5Tget_member_name");
+ if (HDstrcmp(mname, "f") != 0)
+ TestErrPrintf("Compound field name doesn't match!, mname=%s\n", mname);
+ H5free_memory(mname);
+
+ /* Check the 2nd field's offset */
+ off = H5Tget_member_offset(tid1, 1);
+ VERIFY(off, 4, "H5Tget_member_offset");
+
+ /* Check the 2nd field's datatype */
+ mtid = H5Tget_member_type(tid1, 1);
+ CHECK(mtid, FAIL, "H5Tget_member_type");
+ if ((ret = H5Tequal(mtid, H5T_IEEE_F32LE)) <= 0)
+ TestErrPrintf("Compound data type is incorrect!, ret=%d\n", (int)ret);
+ ret = H5Tclose(mtid);
+ CHECK(mtid, FAIL, "H5Tclose");
+
+ /* Check the 3rd field's name */
+ mname = H5Tget_member_name(tid1, 2);
+ CHECK_PTR(mname, "H5Tget_member_name");
+ if (HDstrcmp(mname, "l") != 0)
+ TestErrPrintf("Compound field name doesn't match!, mname=%s\n", mname);
+ H5free_memory(mname);
+
+ /* Check the 3rd field's offset */
+ off = H5Tget_member_offset(tid1, 2);
+ VERIFY(off, 8, "H5Tget_member_offset");
+
+ /* Check the 3rd field's datatype */
+ mtid = H5Tget_member_type(tid1, 2);
+ CHECK(mtid, FAIL, "H5Tget_member_type");
+ if ((ret = H5Tequal(mtid, H5T_STD_I32LE)) <= 0)
+ TestErrPrintf("Compound data type is incorrect!, ret=%d\n", (int)ret);
+ ret = H5Tclose(mtid);
+ CHECK(mtid, FAIL, "H5Tclose");
+
+ /* Close the datatype */
+ ret = H5Tclose(tid1);
+ CHECK_I(ret, "H5Tclose");
+
+ /* Close the dataset */
+ ret = H5Dclose(dataset);
+ CHECK_I(ret, "H5Dclose");
+
+ /* Open the second dataset (with array fields) */
+ dataset = H5Dopen2(fid1, "Dataset2", H5P_DEFAULT);
+ CHECK_I(dataset, "H5Dopen2");
+
+ /* Get the datatype */
+ tid1 = H5Dget_type(dataset);
+ CHECK_I(tid1, "H5Dget_type");
+
+ /* Verify datatype class */
+ mclass = H5Tget_class(tid1);
+ VERIFY(mclass, H5T_COMPOUND, "H5Tget_class");
+
+ /* Get the number of compound datatype fields */
+ nmemb = H5Tget_nmembers(tid1);
+ VERIFY(nmemb, 4, "H5Tget_nmembers");
+
+ /* Check the 1st field's name */
+ mname = H5Tget_member_name(tid1, 0);
+ CHECK_PTR(mname, "H5Tget_member_name");
+ if (mname && HDstrcmp(mname, "i") != 0)
+ TestErrPrintf("Compound field name doesn't match!, mname=%s\n", mname);
+ if (mname)
+ H5free_memory(mname);
+
+ /* Check the 1st field's offset */
+ off = H5Tget_member_offset(tid1, 0);
+ VERIFY(off, 0, "H5Tget_member_offset");
+
+ /* Check the 1st field's datatype */
+ mtid = H5Tget_member_type(tid1, 0);
+ CHECK(mtid, FAIL, "H5Tget_member_type");
+ if ((ret = H5Tequal(mtid, H5T_STD_I16LE)) <= 0)
+ TestErrPrintf("Compound data type is incorrect!, ret=%d\n", (int)ret);
+ ret = H5Tclose(mtid);
+ CHECK(mtid, FAIL, "H5Tclose");
+
+ /* Check the 2nd field's name */
+ mname = H5Tget_member_name(tid1, 1);
+ CHECK_PTR(mname, "H5Tget_member_name");
+ if (mname && HDstrcmp(mname, "f") != 0)
+ TestErrPrintf("Compound field name doesn't match!, mname=%s\n", mname);
+ if (mname)
+ H5free_memory(mname);
+
+ /* Check the 2nd field's offset */
+ off = H5Tget_member_offset(tid1, 1);
+ VERIFY(off, 4, "H5Tget_member_offset");
+
+ /* Check the 2nd field's datatype */
+ mtid = H5Tget_member_type(tid1, 1);
+ CHECK(mtid, FAIL, "H5Tget_member_type");
+
+ /* Verify datatype class */
+ mclass = H5Tget_class(mtid);
+ VERIFY(mclass, H5T_ARRAY, "H5Tget_class");
+
+ /* Check the array rank */
+ ndims = H5Tget_array_ndims(mtid);
+ VERIFY(ndims, ARRAY1_RANK, "H5Tget_array_ndims");
+
+ /* Get the array dimensions */
+ ret = H5Tget_array_dims2(mtid, rdims1);
+ CHECK(ret, FAIL, "H5Tget_array_dims2");
+
+ /* Check the array dimensions */
+ for (i = 0; i < ndims; i++)
+ if (rdims1[i] != tdims1[i]) {
+ TestErrPrintf("Array dimension information doesn't match!, rdims1[%d]=%" PRIuHSIZE
+ ", tdims1[%d]=%" PRIuHSIZE "\n",
+ i, rdims1[i], i, tdims1[i]);
+ continue;
+ } /* end if */
+
+ /* Check the array's base datatype */
+ tid2 = H5Tget_super(mtid);
+ CHECK(tid2, FAIL, "H5Tget_super");
+
+ if ((ret = H5Tequal(tid2, H5T_IEEE_F32LE)) <= 0)
+ TestErrPrintf("Compound data type is incorrect!, ret=%d\n", (int)ret);
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Tclose(mtid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Check the 3rd field's name */
+ mname = H5Tget_member_name(tid1, 2);
+ CHECK_PTR(mname, "H5Tget_member_name");
+ if (mname && HDstrcmp(mname, "l") != 0)
+ TestErrPrintf("Compound field name doesn't match!, mname=%s\n", mname);
+ if (mname)
+ H5free_memory(mname);
+
+ /* Check the 3rd field's offset */
+ off = H5Tget_member_offset(tid1, 2);
+ VERIFY(off, 20, "H5Tget_member_offset");
+
+ /* Check the 3rd field's datatype */
+ mtid = H5Tget_member_type(tid1, 2);
+ CHECK(mtid, FAIL, "H5Tget_member_type");
+
+ /* Verify datatype class */
+ mclass = H5Tget_class(mtid);
+ VERIFY(mclass, H5T_ARRAY, "H5Tget_class");
+
+ /* Check the array rank */
+ ndims = H5Tget_array_ndims(mtid);
+ VERIFY(ndims, ARRAY1_RANK, "H5Tget_array_ndims");
+
+ /* Get the array dimensions */
+ ret = H5Tget_array_dims2(mtid, rdims1);
+ CHECK(ret, FAIL, "H5Tget_array_dims2");
+
+ /* Check the array dimensions */
+ for (i = 0; i < ndims; i++)
+ if (rdims1[i] != tdims1[i]) {
+ TestErrPrintf("Array dimension information doesn't match!, rdims1[%d]=%" PRIuHSIZE
+ ", tdims1[%d]=%" PRIuHSIZE "\n",
+ i, rdims1[i], i, tdims1[i]);
+ continue;
+ } /* end if */
+
+ /* Check the array's base datatype */
+ tid2 = H5Tget_super(mtid);
+ CHECK(tid2, FAIL, "H5Tget_super");
+
+ if ((ret = H5Tequal(tid2, H5T_STD_I32LE)) <= 0)
+ TestErrPrintf("Compound data type is incorrect!, ret=%d\n", (int)ret);
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Tclose(mtid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Check the 4th field's name */
+ mname = H5Tget_member_name(tid1, 3);
+ CHECK_PTR(mname, "H5Tget_member_name");
+ if (mname && HDstrcmp(mname, "d") != 0)
+ TestErrPrintf("Compound field name doesn't match!, mname=%s\n", mname);
+ if (mname)
+ H5free_memory(mname);
+
+ /* Check the 4th field's offset */
+ off = H5Tget_member_offset(tid1, 3);
+ VERIFY(off, 36, "H5Tget_member_offset");
+
+ /* Check the 4th field's datatype */
+ mtid = H5Tget_member_type(tid1, 3);
+ CHECK(mtid, FAIL, "H5Tget_member_type");
+ if ((ret = H5Tequal(mtid, H5T_IEEE_F64LE)) <= 0)
+ TestErrPrintf("Compound data type is incorrect!, ret=%d\n", (int)ret);
+ ret = H5Tclose(mtid);
+ CHECK(mtid, FAIL, "H5Tclose");
+
+ /* Close the datatype */
+ ret = H5Tclose(tid1);
+ CHECK_I(ret, "H5Tclose");
+
+ /* Close the dataset */
+ ret = H5Dclose(dataset);
+ CHECK_I(ret, "H5Dclose");
+
+ /* Close the file */
+ ret = H5Fclose(fid1);
+ CHECK_I(ret, "H5Fclose");
+ } /* end if */
+ else
+ HDprintf("***cannot open the pre-created compound datatype test file (%s)\n", testfile);
+
+} /* end test_compat() */
+#endif
+
+/*-------------------------------------------------------------------------
+ * Function: test_array
+ *
+ * Purpose: Main array datatype testing routine.
+ *
+ * Return: void
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+test_array(void)
+{
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Array Datatypes\n"));
+
+ /* These tests use the same file... */
+ test_array_atomic_1d(); /* Test 1-D array of atomic datatypes */
+ test_array_atomic_3d(); /* Test 3-D array of atomic datatypes */
+ test_array_array_atomic(); /* Test 1-D array of 2-D arrays of atomic datatypes */
+ test_array_compound_atomic(); /* Test 1-D array of compound datatypes (with no array fields) */
+ test_array_compound_array(); /* Test 1-D array of compound datatypes (with array fields) */
+ test_array_vlen_atomic(); /* Test 1-D array of atomic VL datatypes */
+ test_array_vlen_array(); /* Test 1-D array of 1-D array VL datatypes */
+ test_array_funcs(); /* Test type functions with array types */
+
+ test_array_bkg(); /* Read compound datatype with array fields and background fields read */
+#if 0
+ /* This test uses a custom file */
+ test_compat(); /* Test compatibility changes for compound datatype fields */
+#endif
+} /* end test_array() */
+
+/*-------------------------------------------------------------------------
+ * Function: cleanup_array
+ *
+ * Purpose: Cleanup temporary test files
+ *
+ * Return: void
+ *
+ * Programmer: Quincey Koziol
+ * June 8, 1999
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+cleanup_array(void)
+{
+ H5Fdelete(FILENAME, H5P_DEFAULT);
+} /* end cleanup_array() */
diff --git a/test/API/tattr.c b/test/API/tattr.c
new file mode 100644
index 0000000..d006eb8
--- /dev/null
+++ b/test/API/tattr.c
@@ -0,0 +1,11929 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/***********************************************************
+ *
+ * Test program: tattr
+ *
+ * Test the attribute functionality
+ *
+ *************************************************************/
+
+#include "testhdf5.h"
+
+#if 0
+#include "H5VLnative_private.h"
+
+/*
+ * This file needs to access private information from the H5O package.
+ * This file also needs to access the object header testing code.
+ */
+#define H5O_FRIEND /*suppress error about including H5Opkg */
+#define H5O_TESTING
+#include "H5Opkg.h" /* Object headers */
+
+/*
+ * This file needs to access private information from the H5A package.
+ * This file also needs to access the attribute testing code.
+ */
+#define H5A_FRIEND /*suppress error about including H5Apkg */
+#define H5A_TESTING
+#include "H5Apkg.h" /* Attributes */
+
+/*
+ * This file needs to access private information from the H5F package.
+ * This file also needs to access the file testing code.
+ */
+#define H5F_FRIEND /*suppress error about including H5Fpkg */
+#define H5F_TESTING
+#include "H5Fpkg.h" /* File access */
+#endif
+
+#define FILENAME "tattr.h5"
+#define NAME_BUF_SIZE 1024
+#define ATTR_NAME_LEN 16
+#define ATTR_MAX_DIMS 7
+#define ATTR_TMP_NAME "a really long temp_name"
+#define CORDER_ITER_STOP 3
+
+/* 3-D dataset with fixed dimensions */
+#define SPACE1_RANK 3
+#define SPACE1_DIM1 3
+#define SPACE1_DIM2 15
+#define SPACE1_DIM3 13
+
+/* Dataset Information */
+#define DSET1_NAME "Dataset1"
+#define DSET2_NAME "Dataset2"
+#define DSET3_NAME "Dataset3"
+#define NUM_DSETS 3
+
+/* Group Information */
+#define GROUP1_NAME "/Group1"
+#define GROUP2_NAME "/Group2"
+#define GROUP3_NAME "/Group3"
+
+/* Named Datatype Information */
+#define TYPE1_NAME "/Type"
+
+/* Attribute Rank & Dimensions */
+#define ATTR1_NAME "Attr1"
+#define ATTR1_RANK 1
+#define ATTR1_DIM1 3
+int attr_data1[ATTR1_DIM1] = {512, -234, 98123}; /* Test data for 1st attribute */
+
+/* rank & dimensions for another attribute */
+#define ATTR1A_NAME "Attr1_a"
+int attr_data1a[ATTR1_DIM1] = {256, 11945, -22107};
+
+#define ATTR2_NAME "Attr2"
+#define ATTR2_RANK 2
+#define ATTR2_DIM1 2
+#define ATTR2_DIM2 2
+int attr_data2[ATTR2_DIM1][ATTR2_DIM2] = {{7614, -416}, {197814, -3}}; /* Test data for 2nd attribute */
+
+#define ATTR3_NAME "Attr3"
+#define ATTR3_RANK 3
+#define ATTR3_DIM1 2
+#define ATTR3_DIM2 2
+#define ATTR3_DIM3 2
+double attr_data3[ATTR3_DIM1][ATTR3_DIM2][ATTR3_DIM3] = {
+ {{2.3, -26.1}, {0.123, -10.0}}, {{973.23, -0.91827}, {2.0, 23.0}}}; /* Test data for 3rd attribute */
+
+#define ATTR4_NAME "Attr4"
+#define ATTR4_RANK 2
+#define ATTR4_DIM1 2
+#define ATTR4_DIM2 2
+#define ATTR4_FIELDNAME1 "i"
+#define ATTR4_FIELDNAME2 "d"
+#define ATTR4_FIELDNAME3 "c"
+size_t attr4_field1_off = 0;
+size_t attr4_field2_off = 0;
+size_t attr4_field3_off = 0;
+struct attr4_struct {
+ int i;
+ double d;
+ char c;
+} attr_data4[ATTR4_DIM1][ATTR4_DIM2] = {
+ {{3, -26.1, 'd'}, {-100000, 0.123, '3'}},
+ {{-23, 981724.2, 'Q'}, {0, 2.0, '\n'}}}; /* Test data for 4th attribute */
+
+#define ATTR5_NAME "Attr5"
+#define ATTR5_RANK 0
+float attr_data5 = -5.123F; /* Test data for 5th attribute */
+
+#define ATTR6_RANK 3
+#define ATTR6_DIM1 100
+#define ATTR6_DIM2 100
+#define ATTR6_DIM3 100
+
+#define ATTR7_NAME "attr 1 - 000000"
+#define ATTR8_NAME "attr 2"
+
+#define LINK1_NAME "Link1"
+
+#define NATTR_MANY_OLD 350
+#define NATTR_MANY_NEW 3500
+
+#define BUG2_NATTR 100
+#define BUG2_NATTR2 16
+
+#define BUG3_DSET_NAME "dset"
+#define BUG3_DT_NAME "dt"
+#define BUG3_ATTR_NAME "attr"
+
+/* Used by test_attr_delete_last_dense() */
+#define GRPNAME "grp"
+#define ATTRNAME "attr"
+#define DIM0 100
+#define DIM1 100
+#define RANK 2
+
+/* Used by test_attr_info_null_info_pointer() */
+#define GET_INFO_NULL_POINTER_ATTR_NAME "NullInfoPointerAttr"
+
+/* Used by test_attr_rename_invalid_name() */
+#define INVALID_RENAME_TEST_ATTR_NAME "InvalidRenameTestAttr"
+#define INVALID_RENAME_TEST_NEW_ATTR_NAME "InvalidRenameTestNewAttr"
+
+/* Used by test_attr_get_name_invalid_buf() */
+#define GET_NAME_INVALID_BUF_TEST_ATTR_NAME "InvalidNameBufferTestAttr"
+
+/* Attribute iteration struct */
+typedef struct {
+ H5_iter_order_t order; /* Direction of iteration */
+ unsigned ncalled; /* # of times callback is entered */
+ unsigned nskipped; /* # of attributes skipped */
+ int stop; /* # of iterations to stop after */
+ hsize_t curr; /* Current creation order value */
+ size_t max_visit; /* Size of "visited attribute" flag array */
+ hbool_t *visited; /* Pointer to array of "visited attribute" flags */
+} attr_iter_info_t;
+
+static herr_t attr_op1(hid_t loc_id, const char *name, const H5A_info_t *ainfo, void *op_data);
+
+/* Global dcpl ID, can be re-set as a generated dcpl for various operations
+ * across multiple tests.
+ * e.g., minimized dataset object headers
+ */
+static hid_t dcpl_g = H5P_DEFAULT;
+
+/****************************************************************
+**
+** test_attr_basic_write(): Test basic H5A (attribute) code.
+** Tests integer attributes on both datasets and groups
+**
+****************************************************************/
+static void
+test_attr_basic_write(hid_t fapl)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t group; /* Group ID */
+ hid_t sid1, sid2; /* Dataspace ID */
+ hid_t attr, attr2; /* Attribute ID */
+#if 0
+ hsize_t attr_size; /* storage size for attribute */
+#endif
+ ssize_t attr_name_size; /* size of attribute name */
+ char *attr_name = NULL; /* name of attribute */
+ hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
+ hsize_t dims2[] = {ATTR1_DIM1};
+ hsize_t dims3[] = {ATTR2_DIM1, ATTR2_DIM2};
+ int read_data1[ATTR1_DIM1] = {0}; /* Buffer for reading 1st attribute */
+ int i;
+#ifndef NO_PREVENT_CREATE_SAME_ATTRIBUTE_TWICE
+ hid_t ret_id; /* Generic hid_t return value */
+#endif
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Basic Scalar Attribute Writing Functions\n"));
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, DSET1_NAME, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, dcpl_g, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Create dataspace for attribute */
+ sid2 = H5Screate_simple(ATTR1_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Try to create an attribute on the file (should create an attribute on root group) */
+ attr = H5Acreate2(fid1, ATTR1_NAME, H5T_NATIVE_INT, sid2, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Open the root group */
+ group = H5Gopen2(fid1, "/", H5P_DEFAULT);
+ CHECK(group, FAIL, "H5Gopen2");
+
+ /* Open attribute again */
+ attr = H5Aopen(group, ATTR1_NAME, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close root group */
+ ret = H5Gclose(group);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Create an attribute for the dataset */
+ attr = H5Acreate2(dataset, ATTR1_NAME, H5T_NATIVE_INT, sid2, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+#ifndef NO_PREVENT_CREATE_SAME_ATTRIBUTE_TWICE
+ /* Try to create the same attribute again (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret_id = H5Acreate2(dataset, ATTR1_NAME, H5T_NATIVE_INT, sid2, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret_id, FAIL, "H5Acreate2");
+#endif
+ /* Write attribute information */
+ ret = H5Awrite(attr, H5T_NATIVE_INT, attr_data1);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Create an another attribute for the dataset */
+ attr2 = H5Acreate2(dataset, ATTR1A_NAME, H5T_NATIVE_INT, sid2, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write attribute information */
+ ret = H5Awrite(attr2, H5T_NATIVE_INT, attr_data1a);
+ CHECK(ret, FAIL, "H5Awrite");
+#if 0
+ /* Check storage size for attribute */
+ attr_size = H5Aget_storage_size(attr);
+ VERIFY(attr_size, (ATTR1_DIM1 * sizeof(int)), "H5A_get_storage_size");
+#endif
+ /* Read attribute information immediately, without closing attribute */
+ ret = H5Aread(attr, H5T_NATIVE_INT, read_data1);
+ CHECK(ret, FAIL, "H5Aread");
+
+ /* Verify values read in */
+ for (i = 0; i < ATTR1_DIM1; i++)
+ if (attr_data1[i] != read_data1[i])
+ TestErrPrintf("%d: attribute data different: attr_data1[%d]=%d, read_data1[%d]=%d\n", __LINE__, i,
+ attr_data1[i], i, read_data1[i]);
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close attribute */
+ ret = H5Aclose(attr2);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* change attribute name */
+ ret = H5Arename(dataset, ATTR1_NAME, ATTR_TMP_NAME);
+ CHECK(ret, FAIL, "H5Arename");
+
+ /* Open attribute again */
+ attr = H5Aopen(dataset, ATTR_TMP_NAME, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+
+ /* Verify new attribute name */
+ attr_name_size = H5Aget_name(attr, (size_t)0, NULL);
+ CHECK(attr_name_size, FAIL, "H5Aget_name");
+
+ if (attr_name_size > 0) {
+ attr_name = (char *)HDcalloc((size_t)(attr_name_size + 1), sizeof(char));
+ CHECK_PTR(attr_name, "HDcalloc");
+
+ if (attr_name) {
+ ret = (herr_t)H5Aget_name(attr, (size_t)(attr_name_size + 1), attr_name);
+ CHECK(ret, FAIL, "H5Aget_name");
+ ret = HDstrcmp(attr_name, ATTR_TMP_NAME);
+ VERIFY(ret, 0, "HDstrcmp");
+
+ HDfree(attr_name);
+ attr_name = NULL;
+ } /* end if */
+ } /* end if */
+
+ /* Read attribute information immediately, without closing attribute */
+ ret = H5Aread(attr, H5T_NATIVE_INT, read_data1);
+ CHECK(ret, FAIL, "H5Aread");
+
+ /* Verify values read in */
+ for (i = 0; i < ATTR1_DIM1; i++)
+ if (attr_data1[i] != read_data1[i])
+ TestErrPrintf("%d: attribute data different: attr_data1[%d]=%d, read_data1[%d]=%d\n", __LINE__, i,
+ attr_data1[i], i, read_data1[i]);
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Open the second attribute again */
+ attr2 = H5Aopen(dataset, ATTR1A_NAME, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+
+ /* Verify new attribute name */
+ attr_name_size = H5Aget_name(attr2, (size_t)0, NULL);
+ CHECK(attr_name_size, FAIL, "H5Aget_name");
+
+ if (attr_name_size > 0) {
+ attr_name = (char *)HDcalloc((size_t)(attr_name_size + 1), sizeof(char));
+ CHECK_PTR(attr_name, "HDcalloc");
+
+ if (attr_name) {
+ ret = (herr_t)H5Aget_name(attr2, (size_t)(attr_name_size + 1), attr_name);
+ CHECK(ret, FAIL, "H5Aget_name");
+ ret = HDstrcmp(attr_name, ATTR1A_NAME);
+ VERIFY(ret, 0, "HDstrcmp");
+
+ HDfree(attr_name);
+ attr_name = NULL;
+ } /* end if */
+ } /* end if */
+
+ /* Read attribute information immediately, without closing attribute */
+ ret = H5Aread(attr2, H5T_NATIVE_INT, read_data1);
+ CHECK(ret, FAIL, "H5Aread");
+
+ /* Verify values read in */
+ for (i = 0; i < ATTR1_DIM1; i++)
+ if (attr_data1a[i] != read_data1[i])
+ TestErrPrintf("%d: attribute data different: attr_data1a[%d]=%d, read_data1[%d]=%d\n", __LINE__,
+ i, attr_data1a[i], i, read_data1[i]);
+
+ /* Close attribute */
+ ret = H5Aclose(attr2);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Create group */
+ group = H5Gcreate2(fid1, GROUP1_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(group, FAIL, "H5Gcreate2");
+
+ /* Create dataspace for attribute */
+ sid2 = H5Screate_simple(ATTR2_RANK, dims3, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Create an attribute for the group */
+ attr = H5Acreate2(group, ATTR2_NAME, H5T_NATIVE_INT, sid2, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+#if 0
+ /* Check storage size for attribute */
+ attr_size = H5Aget_storage_size(attr);
+ VERIFY(attr_size, (ATTR2_DIM1 * ATTR2_DIM2 * sizeof(int)), "H5Aget_storage_size");
+#endif
+#ifndef NO_PREVENT_CREATE_SAME_ATTRIBUTE_TWICE
+ /* Try to create the same attribute again (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret_id = H5Acreate2(group, ATTR2_NAME, H5T_NATIVE_INT, sid2, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret_id, FAIL, "H5Acreate2");
+#endif
+ /* Write attribute information */
+ ret = H5Awrite(attr, H5T_NATIVE_INT, attr_data2);
+ CHECK(ret, FAIL, "H5Awrite");
+#if 0
+ /* Check storage size for attribute */
+ attr_size = H5Aget_storage_size(attr);
+ VERIFY(attr_size, (ATTR2_DIM1 * ATTR2_DIM2 * sizeof(int)), "H5A_get_storage_size");
+#endif
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close Attribute dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Group */
+ ret = H5Gclose(group);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_attr_basic_write() */
+
+/****************************************************************
+**
+** test_attr_basic_read(): Test basic H5A (attribute) code.
+**
+****************************************************************/
+static void
+test_attr_basic_read(hid_t fapl)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t group; /* Group ID */
+ hid_t attr; /* Attribute ID */
+ H5O_info2_t oinfo; /* Object info */
+ int read_data1[ATTR1_DIM1] = {0}; /* Buffer for reading 1st attribute */
+ int read_data2[ATTR2_DIM1][ATTR2_DIM2] = {{0}}; /* Buffer for reading 2nd attribute */
+ int i, j; /* Local index variables */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Basic Attribute Functions\n"));
+
+ /* Create file */
+ fid1 = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Open the dataset */
+ dataset = H5Dopen2(fid1, DSET1_NAME, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Verify the correct number of attributes */
+ ret = H5Oget_info3(dataset, &oinfo, H5O_INFO_NUM_ATTRS);
+ CHECK(ret, FAIL, "H5Oget_info3");
+ VERIFY(oinfo.num_attrs, 2, "H5Oget_info3");
+
+ /* Open first attribute for the dataset */
+ attr = H5Aopen(dataset, ATTR_TMP_NAME, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+
+ /* Read attribute information */
+ ret = H5Aread(attr, H5T_NATIVE_INT, read_data1);
+ CHECK(ret, FAIL, "H5Aread");
+
+ /* Verify values read in */
+ for (i = 0; i < ATTR1_DIM1; i++)
+ if (attr_data1[i] != read_data1[i])
+ TestErrPrintf("%d: attribute data different: attr_data1[%d]=%d, read_data1[%d]=%d\n", __LINE__, i,
+ attr_data1[i], i, read_data1[i]);
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Open the group */
+ group = H5Gopen2(fid1, GROUP1_NAME, H5P_DEFAULT);
+ CHECK(group, FAIL, "H5Gopen2");
+
+ /* Verify the correct number of attributes */
+ ret = H5Oget_info3(group, &oinfo, H5O_INFO_NUM_ATTRS);
+ CHECK(ret, FAIL, "H5Oget_info3");
+ VERIFY(oinfo.num_attrs, 1, "H5Oget_info3");
+
+ /* Open the attribute for the group */
+ attr = H5Aopen(group, ATTR2_NAME, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+
+ /* Read attribute information */
+ ret = H5Aread(attr, H5T_NATIVE_INT, read_data2);
+ CHECK(ret, FAIL, "H5Aread");
+
+ /* Verify values read in */
+ for (i = 0; i < ATTR2_DIM1; i++)
+ for (j = 0; j < ATTR2_DIM2; j++)
+ if (attr_data2[i][j] != read_data2[i][j])
+ TestErrPrintf("%d: attribute data different: attr_data2[%d][%d]=%d, read_data2[%d][%d]=%d\n",
+ __LINE__, i, j, attr_data2[i][j], i, j, read_data1[i]);
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close group */
+ ret = H5Gclose(group);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_attr_basic_read() */
+
+/****************************************************************
+**
+** test_attr_flush(): Test H5A (attribute) code for performing
+** I/O when H5Fflush is used.
+**
+****************************************************************/
+static void
+test_attr_flush(hid_t fapl)
+{
+ hid_t fil, /* File ID */
+ att, /* Attribute ID */
+ spc, /* Dataspace ID */
+ set; /* Dataset ID */
+ double wdata = 3.14159; /* Data to write */
+ double rdata; /* Data read in */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Attribute Flushing\n"));
+
+ fil = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(fil, FAIL, "H5Fcreate");
+
+ spc = H5Screate(H5S_SCALAR);
+ CHECK(spc, FAIL, "H5Screate");
+
+ set = H5Dcreate2(fil, DSET1_NAME, H5T_NATIVE_DOUBLE, spc, H5P_DEFAULT, dcpl_g, H5P_DEFAULT);
+ CHECK(set, FAIL, "H5Dcreate2");
+
+ att = H5Acreate2(set, ATTR1_NAME, H5T_NATIVE_DOUBLE, spc, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(att, FAIL, "H5Acreate2");
+#ifndef NO_ATTR_FILL_VALUE_SUPPORT
+ ret = H5Aread(att, H5T_NATIVE_DOUBLE, &rdata);
+ CHECK(ret, FAIL, "H5Aread");
+
+ if (!H5_DBL_ABS_EQUAL(rdata, 0.0))
+ TestErrPrintf("attribute value wrong: rdata=%f, should be %f\n", rdata, 0.0);
+
+ ret = H5Fflush(fil, H5F_SCOPE_GLOBAL);
+ CHECK(ret, FAIL, "H5Fflush");
+
+ ret = H5Aread(att, H5T_NATIVE_DOUBLE, &rdata);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ if (!H5_DBL_ABS_EQUAL(rdata, 0.0))
+ TestErrPrintf("attribute value wrong: rdata=%f, should be %f\n", rdata, 0.0);
+#else
+ HDprintf("** SKIPPED attribute pre-read temporarily until attribute fill values supported **\n");
+#endif
+ ret = H5Awrite(att, H5T_NATIVE_DOUBLE, &wdata);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ ret = H5Aread(att, H5T_NATIVE_DOUBLE, &rdata);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ if (!H5_DBL_ABS_EQUAL(rdata, wdata))
+ TestErrPrintf("attribute value wrong: rdata=%f, should be %f\n", rdata, wdata);
+
+ ret = H5Sclose(spc);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Aclose(att);
+ CHECK(ret, FAIL, "H5Aclose");
+ ret = H5Dclose(set);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Fclose(fil);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_attr_flush() */
+
+/****************************************************************
+**
+** test_attr_plist(): Test Attribute Creation Property Lists
+**
+****************************************************************/
+static void
+test_attr_plist(hid_t fapl)
+{
+ hid_t fid = H5I_INVALID_HID; /* File ID */
+ hid_t did = H5I_INVALID_HID; /* Dataset ID */
+ hid_t dsid = H5I_INVALID_HID; /* Dataspace ID (for dataset) */
+ hid_t asid = H5I_INVALID_HID; /* Dataspace ID (for attribute) */
+ hid_t aid = H5I_INVALID_HID; /* Attribute ID */
+ hid_t acpl_id = H5I_INVALID_HID; /* Attribute creation property list ID */
+ hid_t aapl_id = H5I_INVALID_HID; /* Attribute access property list ID */
+ hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
+ hsize_t dims2[] = {ATTR1_DIM1};
+ H5T_cset_t cset; /* Character set for attributes */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Attribute Property Lists\n"));
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(fid, H5I_INVALID_HID, "H5Fcreate");
+
+ /* Create dataspace for dataset */
+ dsid = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(dsid, H5I_INVALID_HID, "H5Screate_simple");
+
+ /* Create a dataset */
+ did = H5Dcreate2(fid, DSET1_NAME, H5T_NATIVE_UCHAR, dsid, H5P_DEFAULT, dcpl_g, H5P_DEFAULT);
+ CHECK(did, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Create dataspace for attribute */
+ asid = H5Screate_simple(ATTR1_RANK, dims2, NULL);
+ CHECK(asid, H5I_INVALID_HID, "H5Screate_simple");
+
+ /* Create default creation property list for attribute */
+ acpl_id = H5Pcreate(H5P_ATTRIBUTE_CREATE);
+ CHECK(acpl_id, H5I_INVALID_HID, "H5Pcreate");
+
+ /* Create default access property list for attribute
+ * This currently has no properties, but we need to test its creation
+ * and use.
+ */
+ aapl_id = H5Pcreate(H5P_ATTRIBUTE_ACCESS);
+ CHECK(aapl_id, H5I_INVALID_HID, "H5Pcreate");
+
+ /* Get the character encoding and ensure that it is the default (ASCII) */
+ ret = H5Pget_char_encoding(acpl_id, &cset);
+ CHECK(ret, FAIL, "H5Pget_char_encoding");
+ VERIFY(cset, H5T_CSET_ASCII, "H5Pget_char_encoding");
+
+ /* Create an attribute for the dataset using the property list */
+ aid = H5Acreate2(did, ATTR1_NAME, H5T_NATIVE_INT, asid, acpl_id, aapl_id);
+ CHECK(aid, H5I_INVALID_HID, "H5Acreate2");
+
+ /* Close the property list, and get the attribute's creation property list */
+ ret = H5Pclose(acpl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+ acpl_id = H5Aget_create_plist(aid);
+ CHECK(acpl_id, H5I_INVALID_HID, "H5Aget_create_plist");
+
+ /* Get the character encoding and ensure that it is the default (ASCII) */
+ ret = H5Pget_char_encoding(acpl_id, &cset);
+ CHECK(ret, FAIL, "H5Pget_char_encoding");
+ VERIFY(cset, H5T_CSET_ASCII, "H5Pget_char_encoding");
+
+ /* Close the property list and attribute */
+ ret = H5Pclose(acpl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Create a new property list and modify it to use a different encoding */
+ acpl_id = H5Pcreate(H5P_ATTRIBUTE_CREATE);
+ CHECK(acpl_id, H5I_INVALID_HID, "H5Pcreate");
+ ret = H5Pset_char_encoding(acpl_id, H5T_CSET_UTF8);
+ CHECK(ret, FAIL, "H5Pset_char_encoding");
+
+ /* Get the character encoding and ensure that it has been changed */
+ ret = H5Pget_char_encoding(acpl_id, &cset);
+ CHECK(ret, FAIL, "H5Pget_char_encoding");
+ VERIFY(cset, H5T_CSET_UTF8, "H5Pget_char_encoding");
+
+ /* Create an attribute for the dataset using the modified property list */
+ aid = H5Acreate2(did, ATTR2_NAME, H5T_NATIVE_INT, asid, acpl_id, aapl_id);
+ CHECK(aid, H5I_INVALID_HID, "H5Acreate2");
+
+ /* Close the property list and attribute */
+ ret = H5Pclose(acpl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Re-open the second attribute and ensure that its character encoding is correct */
+ aid = H5Aopen(did, ATTR2_NAME, H5P_DEFAULT);
+ CHECK(aid, H5I_INVALID_HID, "H5Aopen");
+ acpl_id = H5Aget_create_plist(aid);
+ CHECK(acpl_id, H5I_INVALID_HID, "H5Aget_create_plist");
+ ret = H5Pget_char_encoding(acpl_id, &cset);
+ CHECK(ret, FAIL, "H5Pget_char_encoding");
+ VERIFY(cset, H5T_CSET_UTF8, "H5Pget_char_encoding");
+
+ /* Close everything */
+ ret = H5Sclose(dsid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(asid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Pclose(aapl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(acpl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_attr_plist() */
+
+/****************************************************************
+**
+** test_attr_compound_write(): Test H5A (attribute) code.
+** Tests compound datatype attributes
+**
+****************************************************************/
+static void
+test_attr_compound_write(hid_t fapl)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t tid1; /* Attribute datatype ID */
+ hid_t sid1, sid2; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
+ hsize_t dims2[] = {ATTR4_DIM1, ATTR4_DIM2};
+#ifndef NO_PREVENT_CREATE_SAME_ATTRIBUTE_TWICE
+ hid_t ret_id; /* Generic hid_t return value */
+#endif
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Multiple Attribute Functions\n"));
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, DSET1_NAME, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, dcpl_g, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Close dataset's dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create the attribute datatype. */
+ tid1 = H5Tcreate(H5T_COMPOUND, sizeof(struct attr4_struct));
+ CHECK(tid1, FAIL, "H5Tcreate");
+ attr4_field1_off = HOFFSET(struct attr4_struct, i);
+ ret = H5Tinsert(tid1, ATTR4_FIELDNAME1, attr4_field1_off, H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+ attr4_field2_off = HOFFSET(struct attr4_struct, d);
+ ret = H5Tinsert(tid1, ATTR4_FIELDNAME2, attr4_field2_off, H5T_NATIVE_DOUBLE);
+ CHECK(ret, FAIL, "H5Tinsert");
+ attr4_field3_off = HOFFSET(struct attr4_struct, c);
+ ret = H5Tinsert(tid1, ATTR4_FIELDNAME3, attr4_field3_off, H5T_NATIVE_SCHAR);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Create dataspace for 1st attribute */
+ sid2 = H5Screate_simple(ATTR4_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Create complex attribute for the dataset */
+ attr = H5Acreate2(dataset, ATTR4_NAME, tid1, sid2, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+#ifndef NO_PREVENT_CREATE_SAME_ATTRIBUTE_TWICE
+ /* Try to create the same attribute again (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret_id = H5Acreate2(dataset, ATTR4_NAME, tid1, sid2, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret_id, FAIL, "H5Acreate2");
+#endif
+ /* Write complex attribute data */
+ ret = H5Awrite(attr, tid1, attr_data4);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close attribute's dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close attribute's datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_attr_compound_write() */
+
+/****************************************************************
+**
+** test_attr_compound_read(): Test basic H5A (attribute) code.
+**
+****************************************************************/
+static void
+test_attr_compound_read(hid_t fapl)
+{
+ hid_t fid1; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t space; /* Attribute dataspace */
+ hid_t type; /* Attribute datatype */
+ hid_t attr; /* Attribute ID */
+ char attr_name[ATTR_NAME_LEN]; /* Buffer for attribute names */
+ int rank; /* Attribute rank */
+ hsize_t dims[ATTR_MAX_DIMS]; /* Attribute dimensions */
+ H5T_class_t t_class; /* Attribute datatype class */
+ H5T_order_t order; /* Attribute datatype order */
+ size_t size; /* Attribute datatype size as stored in file */
+ int fields; /* # of Attribute datatype fields */
+ char *fieldname; /* Name of a field */
+ size_t offset; /* Attribute datatype field offset */
+ hid_t field; /* Attribute field datatype */
+ struct attr4_struct read_data4[ATTR4_DIM1][ATTR4_DIM2]; /* Buffer for reading 4th attribute */
+ ssize_t name_len; /* Length of attribute name */
+ H5O_info2_t oinfo; /* Object info */
+ int i, j; /* Local index variables */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Basic Attribute Functions\n"));
+
+ /* Open file */
+ fid1 = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Open the dataset */
+ dataset = H5Dopen2(fid1, DSET1_NAME, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Verify the correct number of attributes */
+ ret = H5Oget_info3(dataset, &oinfo, H5O_INFO_NUM_ATTRS);
+ CHECK(ret, FAIL, "H5Oget_info3");
+ VERIFY(oinfo.num_attrs, 1, "H5Oget_info3");
+
+ /* Open 1st attribute for the dataset */
+ attr =
+ H5Aopen_by_idx(dataset, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, (hsize_t)0, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen_by_idx");
+
+ /* Verify Dataspace */
+ space = H5Aget_space(attr);
+ CHECK(space, FAIL, "H5Aget_space");
+ rank = H5Sget_simple_extent_ndims(space);
+ VERIFY(rank, ATTR4_RANK, "H5Sget_simple_extent_ndims");
+ ret = H5Sget_simple_extent_dims(space, dims, NULL);
+ CHECK(ret, FAIL, "H5Sget_simple_extent_dims");
+ if (dims[0] != ATTR4_DIM1)
+ TestErrPrintf("attribute dimensions different: dims[0]=%d, should be %d\n", (int)dims[0], ATTR4_DIM1);
+ if (dims[1] != ATTR4_DIM2)
+ TestErrPrintf("attribute dimensions different: dims[1]=%d, should be %d\n", (int)dims[1], ATTR4_DIM2);
+ H5Sclose(space);
+
+ /* Verify Datatype */
+ type = H5Aget_type(attr);
+ CHECK(type, FAIL, "H5Aget_type");
+ t_class = H5Tget_class(type);
+ VERIFY(t_class, H5T_COMPOUND, "H5Tget_class");
+ fields = H5Tget_nmembers(type);
+ VERIFY(fields, 3, "H5Tget_nmembers");
+ for (i = 0; i < fields; i++) {
+ fieldname = H5Tget_member_name(type, (unsigned)i);
+ if (!(HDstrcmp(fieldname, ATTR4_FIELDNAME1) != 0 || HDstrcmp(fieldname, ATTR4_FIELDNAME2) != 0 ||
+ HDstrcmp(fieldname, ATTR4_FIELDNAME3) != 0))
+ TestErrPrintf("invalid field name for field #%d: %s\n", i, fieldname);
+ H5free_memory(fieldname);
+ } /* end for */
+ offset = H5Tget_member_offset(type, 0);
+ VERIFY(offset, attr4_field1_off, "H5Tget_member_offset");
+ offset = H5Tget_member_offset(type, 1);
+ VERIFY(offset, attr4_field2_off, "H5Tget_member_offset");
+ offset = H5Tget_member_offset(type, 2);
+ VERIFY(offset, attr4_field3_off, "H5Tget_member_offset");
+
+ /* Verify each field's type, class & size */
+ field = H5Tget_member_type(type, 0);
+ CHECK(field, FAIL, "H5Tget_member_type");
+ t_class = H5Tget_class(field);
+ VERIFY(t_class, H5T_INTEGER, "H5Tget_class");
+ order = H5Tget_order(field);
+ VERIFY_TYPE(order, H5Tget_order(H5T_NATIVE_INT), H5T_order_t, "%d", "H5Tget_order");
+ size = H5Tget_size(field);
+ VERIFY(size, H5Tget_size(H5T_NATIVE_INT), "H5Tget_size");
+ H5Tclose(field);
+ field = H5Tget_member_type(type, 1);
+ CHECK(field, FAIL, "H5Tget_member_type");
+ t_class = H5Tget_class(field);
+ VERIFY(t_class, H5T_FLOAT, "H5Tget_class");
+ order = H5Tget_order(field);
+ VERIFY_TYPE(order, H5Tget_order(H5T_NATIVE_DOUBLE), H5T_order_t, "%d", "H5Tget_order");
+ size = H5Tget_size(field);
+ VERIFY(size, H5Tget_size(H5T_NATIVE_DOUBLE), "H5Tget_size");
+ H5Tclose(field);
+ field = H5Tget_member_type(type, 2);
+ CHECK(field, FAIL, "H5Tget_member_type");
+ t_class = H5Tget_class(field);
+ VERIFY(t_class, H5T_INTEGER, "H5Tget_class");
+ order = H5Tget_order(field);
+ VERIFY_TYPE(order, H5Tget_order(H5T_NATIVE_SCHAR), H5T_order_t, "%d", "H5Tget_order");
+ size = H5Tget_size(field);
+ VERIFY(size, H5Tget_size(H5T_NATIVE_SCHAR), "H5Tget_size");
+ H5Tclose(field);
+
+ /* Read attribute information */
+ ret = H5Aread(attr, type, read_data4);
+ CHECK(ret, FAIL, "H5Aread");
+
+ /* Verify values read in */
+ for (i = 0; i < ATTR4_DIM1; i++)
+ for (j = 0; j < ATTR4_DIM2; j++)
+ if (HDmemcmp(&attr_data4[i][j], &read_data4[i][j], sizeof(struct attr4_struct)) != 0) {
+ HDprintf("%d: attribute data different: attr_data4[%d][%d].i=%d, read_data4[%d][%d].i=%d\n",
+ __LINE__, i, j, attr_data4[i][j].i, i, j, read_data4[i][j].i);
+ HDprintf("%d: attribute data different: attr_data4[%d][%d].d=%f, read_data4[%d][%d].d=%f\n",
+ __LINE__, i, j, attr_data4[i][j].d, i, j, read_data4[i][j].d);
+ TestErrPrintf(
+ "%d: attribute data different: attr_data4[%d][%d].c=%c, read_data4[%d][%d].c=%c\n",
+ __LINE__, i, j, attr_data4[i][j].c, i, j, read_data4[i][j].c);
+ } /* end if */
+
+ /* Verify Name */
+ name_len = H5Aget_name(attr, (size_t)ATTR_NAME_LEN, attr_name);
+ VERIFY(name_len, HDstrlen(ATTR4_NAME), "H5Aget_name");
+ if (HDstrcmp(attr_name, ATTR4_NAME) != 0)
+ TestErrPrintf("attribute name different: attr_name=%s, should be %s\n", attr_name, ATTR4_NAME);
+
+ /* Close attribute datatype */
+ ret = H5Tclose(type);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_attr_compound_read() */
+
+/****************************************************************
+**
+** test_attr_scalar_write(): Test scalar H5A (attribute) writing code.
+**
+****************************************************************/
+static void
+test_attr_scalar_write(hid_t fapl)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1, sid2; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
+#ifndef NO_PREVENT_CREATE_SAME_ATTRIBUTE_TWICE
+ hid_t ret_id; /* Generic hid_t return value */
+#endif
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Basic Attribute Functions\n"));
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, DSET1_NAME, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, dcpl_g, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Create dataspace for attribute */
+ sid2 = H5Screate_simple(ATTR5_RANK, NULL, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Create an attribute for the dataset */
+ attr = H5Acreate2(dataset, ATTR5_NAME, H5T_NATIVE_FLOAT, sid2, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+#ifndef NO_PREVENT_CREATE_SAME_ATTRIBUTE_TWICE
+ /* Try to create the same attribute again (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret_id = H5Acreate2(dataset, ATTR5_NAME, H5T_NATIVE_FLOAT, sid2, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret_id, FAIL, "H5Acreate2");
+#endif
+ /* Write attribute information */
+ ret = H5Awrite(attr, H5T_NATIVE_FLOAT, &attr_data5);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_attr_scalar_write() */
+
+/****************************************************************
+**
+** test_attr_scalar_read(): Test scalar H5A (attribute) reading code.
+**
+****************************************************************/
+static void
+test_attr_scalar_read(hid_t fapl)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ H5S_class_t stype; /* Dataspace class */
+ float rdata = 0.0F; /* Buffer for reading 1st attribute */
+ H5O_info2_t oinfo; /* Object info */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Basic Scalar Attribute Reading Functions\n"));
+
+ /* Create file */
+ fid1 = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Open the dataset */
+ dataset = H5Dopen2(fid1, DSET1_NAME, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Verify the correct number of attributes */
+ ret = H5Oget_info3(dataset, &oinfo, H5O_INFO_NUM_ATTRS);
+ CHECK(ret, FAIL, "H5Oget_info3");
+ VERIFY(oinfo.num_attrs, 1, "H5Oget_info3");
+
+ /* Open an attribute for the dataset */
+ attr = H5Aopen(dataset, ATTR5_NAME, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+
+ /* Read attribute information */
+ ret = H5Aread(attr, H5T_NATIVE_FLOAT, &rdata);
+ CHECK(ret, FAIL, "H5Aread");
+
+ /* Verify the floating-poing value in this way to avoid compiler warning. */
+ if (!H5_FLT_ABS_EQUAL(rdata, attr_data5))
+ HDprintf("*** UNEXPECTED VALUE from %s should be %f, but is %f at line %4d in %s\n", "H5Aread",
+ (double)attr_data5, (double)rdata, (int)__LINE__, __FILE__);
+
+ /* Get the attribute's dataspace */
+ sid = H5Aget_space(attr);
+ CHECK(sid, FAIL, "H5Aget_space");
+
+ /* Make certain the dataspace is scalar */
+ stype = H5Sget_simple_extent_type(sid);
+ VERIFY(stype, H5S_SCALAR, "H5Sget_simple_extent_type");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_attr_scalar_read() */
+
+/****************************************************************
+**
+** test_attr_mult_write(): Test basic H5A (attribute) code.
+** Tests integer attributes on both datasets and groups
+**
+****************************************************************/
+static void
+test_attr_mult_write(hid_t fapl)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1, sid2; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
+ hsize_t dims2[] = {ATTR1_DIM1};
+ hsize_t dims3[] = {ATTR2_DIM1, ATTR2_DIM2};
+ hsize_t dims4[] = {ATTR3_DIM1, ATTR3_DIM2, ATTR3_DIM3};
+#ifndef NO_PREVENT_CREATE_SAME_ATTRIBUTE_TWICE
+ hid_t ret_id; /* Generic hid_t return value */
+#endif
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Multiple Attribute Functions\n"));
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, DSET1_NAME, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, dcpl_g, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Close dataset's dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create dataspace for 1st attribute */
+ sid2 = H5Screate_simple(ATTR1_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Create 1st attribute for the dataset */
+ attr = H5Acreate2(dataset, ATTR1_NAME, H5T_NATIVE_INT, sid2, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+#ifndef NO_PREVENT_CREATE_SAME_ATTRIBUTE_TWICE
+ /* Try to create the same attribute again (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret_id = H5Acreate2(dataset, ATTR1_NAME, H5T_NATIVE_INT, sid2, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret_id, FAIL, "H5Acreate2");
+#endif
+ /* Write 1st attribute data */
+ ret = H5Awrite(attr, H5T_NATIVE_INT, attr_data1);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close 1st attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close attribute's dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create dataspace for 2nd attribute */
+ sid2 = H5Screate_simple(ATTR2_RANK, dims3, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Create 2nd attribute for the dataset */
+ attr = H5Acreate2(dataset, ATTR2_NAME, H5T_NATIVE_INT, sid2, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+#ifndef NO_PREVENT_CREATE_SAME_ATTRIBUTE_TWICE
+ /* Try to create the same attribute again (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret_id = H5Acreate2(dataset, ATTR2_NAME, H5T_NATIVE_INT, sid2, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret_id, FAIL, "H5Acreate2");
+#endif
+ /* Write 2nd attribute information */
+ ret = H5Awrite(attr, H5T_NATIVE_INT, attr_data2);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close 2nd attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close 2nd attribute's dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create dataspace for 3rd attribute */
+ sid2 = H5Screate_simple(ATTR3_RANK, dims4, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Create 3rd attribute for the dataset */
+ attr = H5Acreate2(dataset, ATTR3_NAME, H5T_NATIVE_DOUBLE, sid2, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+#ifndef NO_PREVENT_CREATE_SAME_ATTRIBUTE_TWICE
+ /* Try to create the same attribute again (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret_id = H5Acreate2(dataset, ATTR3_NAME, H5T_NATIVE_DOUBLE, sid2, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret_id, FAIL, "H5Acreate2");
+#endif
+ /* Write 3rd attribute information */
+ ret = H5Awrite(attr, H5T_NATIVE_DOUBLE, attr_data3);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close 3rd attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close 3rd attribute's dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_attr_mult_write() */
+
+/****************************************************************
+**
+** test_attr_mult_read(): Test basic H5A (attribute) code.
+**
+****************************************************************/
+static void
+test_attr_mult_read(hid_t fapl)
+{
+ hid_t fid1; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t space; /* Attribute dataspace */
+ hid_t type; /* Attribute datatype */
+ hid_t attr; /* Attribute ID */
+ char attr_name[ATTR_NAME_LEN]; /* Buffer for attribute names */
+ char temp_name[ATTR_NAME_LEN]; /* Buffer for mangling attribute names */
+ int rank; /* Attribute rank */
+ hsize_t dims[ATTR_MAX_DIMS]; /* Attribute dimensions */
+ H5T_class_t t_class; /* Attribute datatype class */
+ H5T_order_t order; /* Attribute datatype order */
+ size_t size; /* Attribute datatype size as stored in file */
+ int read_data1[ATTR1_DIM1] = {0}; /* Buffer for reading 1st attribute */
+ int read_data2[ATTR2_DIM1][ATTR2_DIM2] = {{0}}; /* Buffer for reading 2nd attribute */
+ double read_data3[ATTR3_DIM1][ATTR3_DIM2][ATTR3_DIM3] = {{{0}}}; /* Buffer for reading 3rd attribute */
+ ssize_t name_len; /* Length of attribute name */
+ H5O_info2_t oinfo; /* Object info */
+ int i, j, k; /* Local index values */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Basic Attribute Functions\n"));
+
+ /* Open file */
+ fid1 = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Open the dataset */
+ dataset = H5Dopen2(fid1, DSET1_NAME, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Verify the correct number of attributes */
+ ret = H5Oget_info3(dataset, &oinfo, H5O_INFO_NUM_ATTRS);
+ CHECK(ret, FAIL, "H5Oget_info3");
+ VERIFY(oinfo.num_attrs, 3, "H5Oget_info3");
+
+ /* Open 1st attribute for the dataset */
+ attr =
+ H5Aopen_by_idx(dataset, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, (hsize_t)0, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen_by_idx");
+
+ /* Verify Dataspace */
+ space = H5Aget_space(attr);
+ CHECK(space, FAIL, "H5Aget_space");
+ rank = H5Sget_simple_extent_ndims(space);
+ VERIFY(rank, ATTR1_RANK, "H5Sget_simple_extent_ndims");
+ ret = H5Sget_simple_extent_dims(space, dims, NULL);
+ CHECK(ret, FAIL, "H5Sget_simple_extent_dims");
+ if (dims[0] != ATTR1_DIM1)
+ TestErrPrintf("attribute dimensions different: dims[0]=%d, should be %d\n", (int)dims[0], ATTR1_DIM1);
+ H5Sclose(space);
+
+ /* Verify Datatype */
+ type = H5Aget_type(attr);
+ CHECK(type, FAIL, "H5Aget_type");
+ t_class = H5Tget_class(type);
+ VERIFY(t_class, H5T_INTEGER, "H5Tget_class");
+ order = H5Tget_order(type);
+ VERIFY_TYPE(order, H5Tget_order(H5T_NATIVE_INT), H5T_order_t, "%d", "H5Tget_order");
+ size = H5Tget_size(type);
+ VERIFY(size, H5Tget_size(H5T_NATIVE_INT), "H5Tget_size");
+ H5Tclose(type);
+
+ /* Read attribute information */
+ ret = H5Aread(attr, H5T_NATIVE_INT, read_data1);
+ CHECK(ret, FAIL, "H5Aread");
+
+ /* Verify values read in */
+ for (i = 0; i < ATTR1_DIM1; i++)
+ if (attr_data1[i] != read_data1[i])
+ TestErrPrintf("%d: attribute data different: attr_data1[%d]=%d, read_data1[%d]=%d\n", __LINE__, i,
+ attr_data1[i], i, read_data1[i]);
+
+ /* Verify Name */
+ name_len = H5Aget_name(attr, (size_t)ATTR_NAME_LEN, attr_name);
+ VERIFY(name_len, HDstrlen(ATTR1_NAME), "H5Aget_name");
+ if (HDstrcmp(attr_name, ATTR1_NAME) != 0)
+ TestErrPrintf("attribute name different: attr_name=%s, should be %s\n", attr_name, ATTR1_NAME);
+
+ /* Verify Name with too small of a buffer */
+ name_len = H5Aget_name(attr, HDstrlen(ATTR1_NAME), attr_name);
+ VERIFY(name_len, HDstrlen(ATTR1_NAME), "H5Aget_name");
+ HDstrcpy(temp_name, ATTR1_NAME); /* make a copy of the name */
+ temp_name[HDstrlen(ATTR1_NAME) - 1] = '\0'; /* truncate it to match the one retrieved */
+ if (HDstrcmp(attr_name, temp_name) != 0)
+ TestErrPrintf("attribute name different: attr_name=%s, should be %s\n", attr_name, temp_name);
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Open 2nd attribute for the dataset */
+ attr =
+ H5Aopen_by_idx(dataset, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, (hsize_t)1, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen_by_idx");
+
+ /* Verify Dataspace */
+ space = H5Aget_space(attr);
+ CHECK(space, FAIL, "H5Aget_space");
+ rank = H5Sget_simple_extent_ndims(space);
+ VERIFY(rank, ATTR2_RANK, "H5Sget_simple_extent_ndims");
+ ret = H5Sget_simple_extent_dims(space, dims, NULL);
+ CHECK(ret, FAIL, "H5Sget_simple_extent_dims");
+ if (dims[0] != ATTR2_DIM1)
+ TestErrPrintf("attribute dimensions different: dims[0]=%d, should be %d\n", (int)dims[0], ATTR2_DIM1);
+ if (dims[1] != ATTR2_DIM2)
+ TestErrPrintf("attribute dimensions different: dims[1]=%d, should be %d\n", (int)dims[1], ATTR2_DIM2);
+ H5Sclose(space);
+
+ /* Verify Datatype */
+ type = H5Aget_type(attr);
+ CHECK(type, FAIL, "H5Aget_type");
+ t_class = H5Tget_class(type);
+ VERIFY(t_class, H5T_INTEGER, "H5Tget_class");
+ order = H5Tget_order(type);
+ VERIFY_TYPE(order, H5Tget_order(H5T_NATIVE_INT), H5T_order_t, "%d", "H5Tget_order");
+ size = H5Tget_size(type);
+ VERIFY(size, H5Tget_size(H5T_NATIVE_INT), "H5Tget_size");
+ H5Tclose(type);
+
+ /* Read attribute information */
+ ret = H5Aread(attr, H5T_NATIVE_INT, read_data2);
+ CHECK(ret, FAIL, "H5Aread");
+
+ /* Verify values read in */
+ for (i = 0; i < ATTR2_DIM1; i++)
+ for (j = 0; j < ATTR2_DIM2; j++)
+ if (attr_data2[i][j] != read_data2[i][j])
+ TestErrPrintf("%d: attribute data different: attr_data2[%d][%d]=%d, read_data2[%d][%d]=%d\n",
+ __LINE__, i, j, attr_data2[i][j], i, j, read_data2[i][j]);
+
+ /* Verify Name */
+ name_len = H5Aget_name(attr, (size_t)ATTR_NAME_LEN, attr_name);
+ VERIFY(name_len, HDstrlen(ATTR2_NAME), "H5Aget_name");
+ if (HDstrcmp(attr_name, ATTR2_NAME) != 0)
+ TestErrPrintf("attribute name different: attr_name=%s, should be %s\n", attr_name, ATTR2_NAME);
+
+ /* Verify Name with too small of a buffer */
+ name_len = H5Aget_name(attr, HDstrlen(ATTR2_NAME), attr_name);
+ VERIFY(name_len, HDstrlen(ATTR2_NAME), "H5Aget_name");
+ HDstrcpy(temp_name, ATTR2_NAME); /* make a copy of the name */
+ temp_name[HDstrlen(ATTR2_NAME) - 1] = '\0'; /* truncate it to match the one retrieved */
+ if (HDstrcmp(attr_name, temp_name) != 0)
+ TestErrPrintf("attribute name different: attr_name=%s, should be %s\n", attr_name, temp_name);
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Open 2nd attribute for the dataset */
+ attr =
+ H5Aopen_by_idx(dataset, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, (hsize_t)2, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen_by_idx");
+
+ /* Verify Dataspace */
+ space = H5Aget_space(attr);
+ CHECK(space, FAIL, "H5Aget_space");
+ rank = H5Sget_simple_extent_ndims(space);
+ VERIFY(rank, ATTR3_RANK, "H5Sget_simple_extent_ndims");
+ ret = H5Sget_simple_extent_dims(space, dims, NULL);
+ CHECK(ret, FAIL, "H5Sget_simple_extent_dims");
+ if (dims[0] != ATTR3_DIM1)
+ TestErrPrintf("attribute dimensions different: dims[0]=%d, should be %d\n", (int)dims[0], ATTR3_DIM1);
+ if (dims[1] != ATTR3_DIM2)
+ TestErrPrintf("attribute dimensions different: dims[1]=%d, should be %d\n", (int)dims[1], ATTR3_DIM2);
+ if (dims[2] != ATTR3_DIM3)
+ TestErrPrintf("attribute dimensions different: dims[2]=%d, should be %d\n", (int)dims[2], ATTR3_DIM3);
+ H5Sclose(space);
+
+ /* Verify Datatype */
+ type = H5Aget_type(attr);
+ CHECK(type, FAIL, "H5Aget_type");
+ t_class = H5Tget_class(type);
+ VERIFY(t_class, H5T_FLOAT, "H5Tget_class");
+ order = H5Tget_order(type);
+ VERIFY_TYPE(order, H5Tget_order(H5T_NATIVE_DOUBLE), H5T_order_t, "%d", "H5Tget_order");
+ size = H5Tget_size(type);
+ VERIFY(size, H5Tget_size(H5T_NATIVE_DOUBLE), "H5Tget_size");
+ H5Tclose(type);
+
+ /* Read attribute information */
+ ret = H5Aread(attr, H5T_NATIVE_DOUBLE, read_data3);
+ CHECK(ret, FAIL, "H5Aread");
+
+ /* Verify values read in */
+ for (i = 0; i < ATTR3_DIM1; i++)
+ for (j = 0; j < ATTR3_DIM2; j++)
+ for (k = 0; k < ATTR3_DIM3; k++)
+ if (!H5_DBL_ABS_EQUAL(attr_data3[i][j][k], read_data3[i][j][k]))
+ TestErrPrintf("%d: attribute data different: attr_data3[%d][%d][%d]=%f, "
+ "read_data3[%d][%d][%d]=%f\n",
+ __LINE__, i, j, k, attr_data3[i][j][k], i, j, k, read_data3[i][j][k]);
+
+ /* Verify Name */
+ name_len = H5Aget_name(attr, (size_t)ATTR_NAME_LEN, attr_name);
+ VERIFY(name_len, HDstrlen(ATTR3_NAME), "H5Aget_name");
+ if (HDstrcmp(attr_name, ATTR3_NAME) != 0)
+ TestErrPrintf("attribute name different: attr_name=%s, should be %s\n", attr_name, ATTR3_NAME);
+
+ /* Verify Name with too small of a buffer */
+ name_len = H5Aget_name(attr, HDstrlen(ATTR3_NAME), attr_name);
+ VERIFY(name_len, HDstrlen(ATTR3_NAME), "H5Aget_name");
+ HDstrcpy(temp_name, ATTR3_NAME); /* make a copy of the name */
+ temp_name[HDstrlen(ATTR3_NAME) - 1] = '\0'; /* truncate it to match the one retrieved */
+ if (HDstrcmp(attr_name, temp_name) != 0)
+ TestErrPrintf("attribute name different: attr_name=%s, should be %s\n", attr_name, temp_name);
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_attr_mult_read() */
+
+/****************************************************************
+**
+** attr_op1(): Attribute operator
+**
+****************************************************************/
+static herr_t
+attr_op1(hid_t H5_ATTR_UNUSED loc_id, const char *name, const H5A_info_t H5_ATTR_UNUSED *ainfo, void *op_data)
+{
+ int *count = (int *)op_data;
+ herr_t ret = 0;
+
+ switch (*count) {
+ case 0:
+ if (HDstrcmp(name, ATTR1_NAME) != 0)
+ TestErrPrintf("attribute name different: name=%s, should be %s\n", name, ATTR1_NAME);
+ (*count)++;
+ break;
+
+ case 1:
+ if (HDstrcmp(name, ATTR2_NAME) != 0)
+ TestErrPrintf("attribute name different: name=%s, should be %s\n", name, ATTR2_NAME);
+ (*count)++;
+ break;
+
+ case 2:
+ if (HDstrcmp(name, ATTR3_NAME) != 0)
+ TestErrPrintf("attribute name different: name=%s, should be %s\n", name, ATTR3_NAME);
+ (*count)++;
+ break;
+
+ default:
+ ret = -1;
+ break;
+ } /* end switch() */
+
+ return (ret);
+} /* end attr_op1() */
+
+/****************************************************************
+**
+** test_attr_iterate(): Test H5A (attribute) iterator code.
+**
+****************************************************************/
+static void
+test_attr_iterate(hid_t fapl)
+{
+ hid_t file; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ int count; /* operator data for the iterator */
+ H5O_info2_t oinfo; /* Object info */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Basic Attribute Functions\n"));
+
+ /* Open file */
+ file = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(file, FAIL, "H5Fopen");
+
+ /* Create a dataspace */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create a new dataset */
+ dataset = H5Dcreate2(file, DSET2_NAME, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl_g, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Verify the correct number of attributes */
+ ret = H5Oget_info3(dataset, &oinfo, H5O_INFO_NUM_ATTRS);
+ CHECK(ret, FAIL, "H5Oget_info3");
+ VERIFY(oinfo.num_attrs, 0, "H5Oget_info3");
+
+ /* Iterate over attributes on dataset */
+ count = 0;
+ ret = H5Aiterate2(dataset, H5_INDEX_NAME, H5_ITER_INC, NULL, attr_op1, &count);
+ VERIFY(ret, 0, "H5Aiterate2");
+
+ /* Close dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Open existing dataset w/attributes */
+ dataset = H5Dopen2(file, DSET1_NAME, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Verify the correct number of attributes */
+ ret = H5Oget_info3(dataset, &oinfo, H5O_INFO_NUM_ATTRS);
+ CHECK(ret, FAIL, "H5Oget_info3");
+ VERIFY(oinfo.num_attrs, 3, "H5Oget_info3");
+
+ /* Iterate over attributes on dataset */
+ count = 0;
+ ret = H5Aiterate2(dataset, H5_INDEX_NAME, H5_ITER_INC, NULL, attr_op1, &count);
+ VERIFY(ret, 0, "H5Aiterate2");
+
+ /* Close dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_attr_iterate() */
+
+/****************************************************************
+**
+** test_attr_delete(): Test H5A (attribute) code for deleting objects.
+**
+****************************************************************/
+static void
+test_attr_delete(hid_t fapl)
+{
+ hid_t fid1; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t attr; /* Attribute ID */
+ char attr_name[ATTR_NAME_LEN]; /* Buffer for attribute names */
+ ssize_t name_len; /* Length of attribute name */
+ H5O_info2_t oinfo; /* Object info */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Basic Attribute Deletion Functions\n"));
+
+ /* Open file */
+ fid1 = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Open the dataset */
+ dataset = H5Dopen2(fid1, DSET1_NAME, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Verify the correct number of attributes */
+ ret = H5Oget_info3(dataset, &oinfo, H5O_INFO_NUM_ATTRS);
+ CHECK(ret, FAIL, "H5Oget_info3");
+ VERIFY(oinfo.num_attrs, 3, "H5Oget_info3");
+#ifndef NO_DELETE_NONEXISTENT_ATTRIBUTE
+ /* Try to delete bogus attribute */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Adelete(dataset, "Bogus");
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Adelete");
+#endif
+ /* Verify the correct number of attributes */
+ ret = H5Oget_info3(dataset, &oinfo, H5O_INFO_NUM_ATTRS);
+ CHECK(ret, FAIL, "H5Oget_info3");
+ VERIFY(oinfo.num_attrs, 3, "H5Oget_info3");
+
+ /* Delete middle (2nd) attribute */
+ ret = H5Adelete(dataset, ATTR2_NAME);
+ CHECK(ret, FAIL, "H5Adelete");
+
+ /* Verify the correct number of attributes */
+ ret = H5Oget_info3(dataset, &oinfo, H5O_INFO_NUM_ATTRS);
+ CHECK(ret, FAIL, "H5Oget_info3");
+ VERIFY(oinfo.num_attrs, 2, "H5Oget_info3");
+
+ /* Open 1st attribute for the dataset */
+ attr =
+ H5Aopen_by_idx(dataset, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, (hsize_t)0, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen_by_idx");
+
+ /* Verify Name */
+ name_len = H5Aget_name(attr, (size_t)ATTR_NAME_LEN, attr_name);
+ VERIFY(name_len, HDstrlen(ATTR1_NAME), "H5Aget_name");
+ if (HDstrcmp(attr_name, ATTR1_NAME) != 0)
+ TestErrPrintf("attribute name different: attr_name=%s, should be %s\n", attr_name, ATTR1_NAME);
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Open last (formally 3rd) attribute for the dataset */
+ attr =
+ H5Aopen_by_idx(dataset, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, (hsize_t)1, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen_by_idx");
+
+ /* Verify Name */
+ name_len = H5Aget_name(attr, (size_t)ATTR_NAME_LEN, attr_name);
+ VERIFY(name_len, HDstrlen(ATTR3_NAME), "H5Aget_name");
+ if (HDstrcmp(attr_name, ATTR3_NAME) != 0)
+ TestErrPrintf("attribute name different: attr_name=%s, should be %s\n", attr_name, ATTR3_NAME);
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Delete first attribute */
+ ret = H5Adelete(dataset, ATTR1_NAME);
+ CHECK(ret, FAIL, "H5Adelete");
+
+ /* Verify the correct number of attributes */
+ ret = H5Oget_info3(dataset, &oinfo, H5O_INFO_NUM_ATTRS);
+ CHECK(ret, FAIL, "H5Oget_info3");
+ VERIFY(oinfo.num_attrs, 1, "H5Oget_info3");
+
+ /* Open last (formally 3rd) attribute for the dataset */
+ attr =
+ H5Aopen_by_idx(dataset, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, (hsize_t)0, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen_by_idx");
+
+ /* Verify Name */
+ name_len = H5Aget_name(attr, (size_t)ATTR_NAME_LEN, attr_name);
+ VERIFY(name_len, HDstrlen(ATTR3_NAME), "H5Aget_name");
+ if (HDstrcmp(attr_name, ATTR3_NAME) != 0)
+ TestErrPrintf("attribute name different: attr_name=%s, should be %s\n", attr_name, ATTR3_NAME);
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Delete first attribute */
+ ret = H5Adelete(dataset, ATTR3_NAME);
+ CHECK(ret, FAIL, "H5Adelete");
+
+ /* Verify the correct number of attributes */
+ ret = H5Oget_info3(dataset, &oinfo, H5O_INFO_NUM_ATTRS);
+ CHECK(ret, FAIL, "H5Oget_info3");
+ VERIFY(oinfo.num_attrs, 0, "H5Oget_info3");
+
+ /* Close dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_attr_delete() */
+
+/****************************************************************
+**
+** test_attr_dtype_shared(): Test H5A (attribute) code for using
+** shared datatypes in attributes.
+**
+****************************************************************/
+static void
+test_attr_dtype_shared(hid_t fapl)
+{
+#ifndef NO_SHARED_DATATYPES
+ hid_t file_id; /* File ID */
+ hid_t dset_id; /* Dataset ID */
+ hid_t space_id; /* Dataspace ID for dataset & attribute */
+ hid_t type_id; /* Datatype ID for named datatype */
+ hid_t attr_id; /* Attribute ID */
+ int data = 8; /* Data to write */
+ int rdata = 0; /* Read read in */
+ H5O_info2_t oinfo; /* Object's information */
+#if 0
+ h5_stat_size_t empty_filesize; /* Size of empty file */
+ h5_stat_size_t filesize; /* Size of file after modifications */
+#endif
+ herr_t ret; /* Generic return value */
+#else
+ (void)fapl;
+#endif
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Shared Datatypes with Attributes - SKIPPED for now due to no support for shared "
+ "datatypes\n"));
+#ifndef NO_SHARED_DATATYPES
+ /* Create a file */
+ file_id = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(file_id, FAIL, "H5Fopen");
+
+ /* Close file */
+ ret = H5Fclose(file_id);
+ CHECK(ret, FAIL, "H5Fclose");
+#if 0
+ /* Get size of file */
+ empty_filesize = h5_get_file_size(FILENAME, fapl);
+ if (empty_filesize < 0)
+ TestErrPrintf("Line %d: file size wrong!\n", __LINE__);
+#endif
+
+ /* Re-open file */
+ file_id = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(file_id, FAIL, "H5Fopen");
+
+ /* Create a datatype to commit and use */
+ type_id = H5Tcopy(H5T_NATIVE_INT);
+ CHECK(type_id, FAIL, "H5Tcopy");
+
+ /* Commit datatype to file */
+ ret = H5Tcommit2(file_id, TYPE1_NAME, type_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+
+ /* Check reference count on named datatype */
+ ret = H5Oget_info_by_name3(file_id, TYPE1_NAME, &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+ VERIFY(oinfo.rc, 1, "H5Oget_info_by_name3");
+
+ /* Create dataspace for dataset */
+ space_id = H5Screate(H5S_SCALAR);
+ CHECK(space_id, FAIL, "H5Screate");
+
+ /* Create dataset */
+ dset_id = H5Dcreate2(file_id, DSET1_NAME, type_id, space_id, H5P_DEFAULT, dcpl_g, H5P_DEFAULT);
+ CHECK(dset_id, FAIL, "H5Dcreate2");
+
+ /* Check reference count on named datatype */
+ ret = H5Oget_info_by_name3(file_id, TYPE1_NAME, &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+ VERIFY(oinfo.rc, 2, "H5Oget_info_by_name3");
+
+ /* Create attribute on dataset */
+ attr_id = H5Acreate2(dset_id, ATTR1_NAME, type_id, space_id, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr_id, FAIL, "H5Acreate2");
+
+ /* Check reference count on named datatype */
+ ret = H5Oget_info_by_name3(file_id, TYPE1_NAME, &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+ VERIFY(oinfo.rc, 3, "H5Oget_info_by_name3");
+
+ /* Close attribute */
+ ret = H5Aclose(attr_id);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Delete attribute */
+ ret = H5Adelete(dset_id, ATTR1_NAME);
+ CHECK(ret, FAIL, "H5Adelete");
+
+ /* Check reference count on named datatype */
+ ret = H5Oget_info_by_name3(file_id, TYPE1_NAME, &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+ VERIFY(oinfo.rc, 2, "H5Oget_info_by_name3");
+
+ /* Create attribute on dataset */
+ attr_id = H5Acreate2(dset_id, ATTR1_NAME, type_id, space_id, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr_id, FAIL, "H5Acreate2");
+
+ /* Check reference count on named datatype */
+ ret = H5Oget_info_by_name3(file_id, TYPE1_NAME, &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+ VERIFY(oinfo.rc, 3, "H5Oget_info_by_name3");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr_id, H5T_NATIVE_INT, &data);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr_id);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close dataset */
+ ret = H5Dclose(dset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close dataspace */
+ ret = H5Sclose(space_id);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close datatype */
+ ret = H5Tclose(type_id);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close file */
+ ret = H5Fclose(file_id);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open file */
+ file_id = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(file_id, FAIL, "H5Fopen");
+
+ /* Open dataset */
+ dset_id = H5Dopen2(file_id, DSET1_NAME, H5P_DEFAULT);
+ CHECK(dset_id, FAIL, "H5Dopen2");
+
+ /* Open attribute */
+ attr_id = H5Aopen(dset_id, ATTR1_NAME, H5P_DEFAULT);
+ CHECK(attr_id, FAIL, "H5Aopen");
+
+ /* Read data from the attribute */
+ ret = H5Aread(attr_id, H5T_NATIVE_INT, &rdata);
+ CHECK(ret, FAIL, "H5Aread");
+ VERIFY(data, rdata, "H5Aread");
+
+ /* Close attribute */
+ ret = H5Aclose(attr_id);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close dataset */
+ ret = H5Dclose(dset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Check reference count on named datatype */
+ ret = H5Oget_info_by_name3(file_id, TYPE1_NAME, &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+ VERIFY(oinfo.rc, 3, "H5Oget_info_by_name3");
+
+ /* Unlink the dataset */
+ ret = H5Ldelete(file_id, DSET1_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ /* Check reference count on named datatype */
+ ret = H5Oget_info_by_name3(file_id, TYPE1_NAME, &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+ VERIFY(oinfo.rc, 1, "H5Oget_info_by_name3");
+
+ /* Unlink the named datatype */
+ ret = H5Ldelete(file_id, TYPE1_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ /* Close file */
+ ret = H5Fclose(file_id);
+ CHECK(ret, FAIL, "H5Fclose");
+#if 0
+ /* Check size of file */
+ filesize = h5_get_file_size(FILENAME, fapl);
+ VERIFY(filesize, empty_filesize, "h5_get_file_size");
+#endif
+#endif
+} /* test_attr_dtype_shared() */
+
+/****************************************************************
+**
+** test_attr_duplicate_ids(): Test operations with more than
+** one ID handles.
+**
+****************************************************************/
+static void
+test_attr_duplicate_ids(hid_t fapl)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t gid1, gid2; /* Group ID */
+ hid_t sid1, sid2; /* Dataspace ID */
+ hid_t attr, attr2; /* Attribute ID */
+ hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
+ hsize_t dims2[] = {ATTR1_DIM1};
+ int read_data1[ATTR1_DIM1] = {0}; /* Buffer for reading 1st attribute */
+ int rewrite_data[ATTR1_DIM1] = {1234, -423, 9907256}; /* Test data for rewrite */
+ int i;
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing operations with two ID handles\n"));
+
+ /*-----------------------------------------------------------------------------------
+ * Create an attribute in a new file and fill it with fill value.
+ */
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, DSET1_NAME, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, dcpl_g, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Create dataspace for attribute */
+ sid2 = H5Screate_simple(ATTR1_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Try to create an attribute on the dataset */
+ attr = H5Acreate2(dataset, ATTR1_NAME, H5T_NATIVE_INT, sid2, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Open the attribute just created and get a second ID */
+ attr2 = H5Aopen(dataset, ATTR1_NAME, H5P_DEFAULT);
+ CHECK(attr2, FAIL, "H5Aopen");
+
+ /* Close attribute */
+ ret = H5Aclose(attr2);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /*-----------------------------------------------------------------------------------
+ * Reopen the file and verify the fill value for attribute. Also write
+ * some real data.
+ */
+
+ /* Open file */
+ fid1 = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Open the dataset */
+ dataset = H5Dopen2(fid1, DSET1_NAME, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Open first attribute for the dataset */
+ attr = H5Aopen(dataset, ATTR1_NAME, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+
+ /* Read attribute with fill value */
+ ret = H5Aread(attr, H5T_NATIVE_INT, read_data1);
+ CHECK(ret, FAIL, "H5Aread");
+
+ /* Verify values read in */
+ for (i = 0; i < ATTR1_DIM1; i++)
+ if (0 != read_data1[i])
+ TestErrPrintf("%d: attribute data different: read_data1[%d]=%d\n", __LINE__, i, read_data1[i]);
+
+ /* Open attribute for the second time */
+ attr2 = H5Aopen(dataset, ATTR1_NAME, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+
+ /* Write attribute information */
+ ret = H5Awrite(attr2, H5T_NATIVE_INT, attr_data1);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr2);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /*-----------------------------------------------------------------------------------
+ * Reopen the file and verify the data. Also rewrite the data and verify it.
+ */
+
+ /* Open file */
+ fid1 = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Open the dataset */
+ dataset = H5Dopen2(fid1, DSET1_NAME, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Open first attribute for the dataset */
+ attr = H5Aopen(dataset, ATTR1_NAME, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+
+ /* Read attribute information */
+ ret = H5Aread(attr, H5T_NATIVE_INT, read_data1);
+ CHECK(ret, FAIL, "H5Aread");
+
+ /* Verify values read in */
+ for (i = 0; i < ATTR1_DIM1; i++)
+ if (attr_data1[i] != read_data1[i])
+ TestErrPrintf("%d: attribute data different: attr_data1[%d]=%d, read_data1[%d]=%d\n", __LINE__, i,
+ attr_data1[i], i, read_data1[i]);
+
+ /* Open attribute for the second time */
+ attr2 = H5Aopen(dataset, ATTR1_NAME, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+
+ /* Write attribute information */
+ ret = H5Awrite(attr2, H5T_NATIVE_INT, rewrite_data);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Read attribute information */
+ ret = H5Aread(attr, H5T_NATIVE_INT, read_data1);
+ CHECK(ret, FAIL, "H5Aread");
+
+ /* Verify values read in */
+ for (i = 0; i < ATTR1_DIM1; i++)
+ if (read_data1[i] != rewrite_data[i])
+ TestErrPrintf("%d: attribute data different: read_data1[%d]=%d, rewrite_data[%d]=%d\n", __LINE__,
+ i, read_data1[i], i, rewrite_data[i]);
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ ret = H5Aclose(attr2);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /*-----------------------------------------------------------------------------------
+ * Verify that the attribute being pointed to by different paths shares
+ * the same data.
+ */
+ /* Open file */
+ fid1 = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Create a group */
+ gid1 = H5Gcreate2(fid1, GROUP1_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid1, FAIL, "H5Gcreate2");
+
+ /* Create hard link to the first group */
+ ret = H5Lcreate_hard(gid1, GROUP1_NAME, H5L_SAME_LOC, GROUP2_NAME, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lcreate_hard");
+
+ /* Try to create an attribute on the group */
+ attr = H5Acreate2(gid1, ATTR2_NAME, H5T_NATIVE_INT, sid2, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Open the hard link just created */
+ gid2 = H5Gopen2(fid1, GROUP2_NAME, H5P_DEFAULT);
+ CHECK(gid2, FAIL, "H5Gopen2");
+
+ /* Open the attribute of the group for the second time */
+ attr2 = H5Aopen(gid2, ATTR2_NAME, H5P_DEFAULT);
+ CHECK(attr2, FAIL, "H5Aopen");
+
+ /* Write attribute information with the first attribute handle */
+ ret = H5Awrite(attr, H5T_NATIVE_INT, attr_data1);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Read attribute information with the second attribute handle */
+ ret = H5Aread(attr2, H5T_NATIVE_INT, read_data1);
+ CHECK(ret, FAIL, "H5Aread");
+
+ /* Verify values read in */
+ for (i = 0; i < ATTR1_DIM1; i++)
+ if (attr_data1[i] != read_data1[i])
+ TestErrPrintf("%d: attribute data different: attr_data1[%d]=%d, read_data1[%d]=%d\n", __LINE__, i,
+ attr_data1[i], i, read_data1[i]);
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ ret = H5Aclose(attr2);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close group */
+ ret = H5Gclose(gid1);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Gclose(gid2);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close Attribute dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_attr_duplicate_ids() */
+
+/****************************************************************
+**
+** test_attr_dense_verify(): Test basic H5A (attribute) code.
+** Verify attributes on object
+**
+****************************************************************/
+static int
+test_attr_dense_verify(hid_t loc_id, unsigned max_attr)
+{
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ hid_t attr; /* Attribute ID */
+ unsigned value; /* Attribute value */
+ unsigned u; /* Local index variable */
+ int old_nerrs; /* Number of errors when entering this check */
+ herr_t ret; /* Generic return value */
+
+ /* Retrieve the current # of reported errors */
+ old_nerrs = nerrors;
+
+ /* Re-open all the attributes by name and verify the data */
+ for (u = 0; u < max_attr; u++) {
+ /* Open attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Aopen(loc_id, attrname, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+
+ /* Read data from the attribute */
+ ret = H5Aread(attr, H5T_NATIVE_UINT, &value);
+ CHECK(ret, FAIL, "H5Aread");
+ VERIFY(value, u, "H5Aread");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+ } /* end for */
+
+ /* Re-open all the attributes by index and verify the data */
+ for (u = 0; u < max_attr; u++) {
+ ssize_t name_len; /* Length of attribute name */
+ char check_name[ATTR_NAME_LEN]; /* Buffer for checking attribute names */
+
+ /* Open attribute */
+ attr = H5Aopen_by_idx(loc_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, (hsize_t)u, H5P_DEFAULT,
+ H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen_by_idx");
+
+ /* Verify Name */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ name_len = H5Aget_name(attr, (size_t)ATTR_NAME_LEN, check_name);
+ VERIFY(name_len, HDstrlen(attrname), "H5Aget_name");
+ if (HDstrcmp(check_name, attrname) != 0)
+ TestErrPrintf("attribute name different: attrname = '%s', should be '%s'\n", check_name,
+ attrname);
+
+ /* Read data from the attribute */
+ ret = H5Aread(attr, H5T_NATIVE_UINT, &value);
+ CHECK(ret, FAIL, "H5Aread");
+ VERIFY(value, u, "H5Aread");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+ } /* end for */
+
+ /* Retrieve current # of errors */
+ if (old_nerrs == nerrors)
+ return (0);
+ else
+ return (-1);
+} /* test_attr_dense_verify() */
+
+/****************************************************************
+**
+** test_attr_dense_create(): Test basic H5A (attribute) code.
+** Tests "dense" attribute storage creation
+**
+****************************************************************/
+static void
+test_attr_dense_create(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ unsigned max_compact; /* Maximum # of attributes to store compactly */
+ unsigned min_dense; /* Minimum # of attributes to store "densely" */
+#if 0
+ htri_t is_dense; /* Are attributes stored densely? */
+#endif
+ unsigned u; /* Local index variable */
+#if 0
+ h5_stat_size_t empty_filesize; /* Size of empty file */
+ h5_stat_size_t filesize; /* Size of file after modifications */
+#endif
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Dense Attribute Storage Creation\n"));
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+#if 0
+ /* Get size of file */
+ empty_filesize = h5_get_file_size(FILENAME, fapl);
+ if (empty_filesize < 0)
+ TestErrPrintf("Line %d: file size wrong!\n", __LINE__);
+#endif
+ /* Re-open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Create dataspace for dataset */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* need DCPL to query the group creation properties */
+ if (dcpl_g == H5P_DEFAULT) {
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ }
+ else {
+ dcpl = H5Pcopy(dcpl_g);
+ CHECK(dcpl, FAIL, "H5Pcopy");
+ }
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid, DSET1_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Retrieve limits for compact/dense attribute storage */
+ ret = H5Pget_attr_phase_change(dcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+
+ /* Close property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Add attributes, until just before converting to dense storage */
+ for (u = 0; u < max_compact; u++) {
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Acreate2(dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+ } /* end for */
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Add one more attribute, to push into "dense" storage */
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Acreate2(dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+#endif
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+#ifndef NO_PREVENT_CREATE_SAME_ATTRIBUTE_TWICE
+ /* Attempt to add attribute again, which should fail */
+ H5E_BEGIN_TRY
+ {
+ attr = H5Acreate2(dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(attr, FAIL, "H5Acreate2");
+#endif
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Unlink dataset with attributes */
+ ret = H5Ldelete(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+#if 0
+ if (h5_using_default_driver(NULL)) {
+ /* Check size of file */
+ filesize = h5_get_file_size(FILENAME, fapl);
+ VERIFY(filesize, empty_filesize, "h5_get_file_size");
+ }
+#endif
+} /* test_attr_dense_create() */
+
+/****************************************************************
+**
+** test_attr_dense_open(): Test basic H5A (attribute) code.
+** Tests opening attributes in "dense" storage
+**
+****************************************************************/
+static void
+test_attr_dense_open(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ unsigned max_compact; /* Maximum # of attributes to store compactly */
+ unsigned min_dense; /* Minimum # of attributes to store "densely" */
+#if 0
+ htri_t is_dense; /* Are attributes stored densely? */
+#endif
+ unsigned u; /* Local index variable */
+#if 0
+ h5_stat_size_t empty_filesize; /* Size of empty file */
+ h5_stat_size_t filesize; /* Size of file after modifications */
+#endif
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Opening Attributes in Dense Storage\n"));
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+#if 0
+ /* Get size of file */
+ empty_filesize = h5_get_file_size(FILENAME, fapl);
+ if (empty_filesize < 0)
+ TestErrPrintf("Line %d: file size wrong!\n", __LINE__);
+#endif
+ /* Re-open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Create dataspace for dataset */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* need DCPL to query the group creation properties */
+ if (dcpl_g == H5P_DEFAULT) {
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ }
+ else {
+ dcpl = H5Pcopy(dcpl_g);
+ CHECK(dcpl, FAIL, "H5Pcopy");
+ }
+
+ /* Enable creation order tracking on attributes, so creation order tests work */
+ ret = H5Pset_attr_creation_order(dcpl, H5P_CRT_ORDER_TRACKED);
+ CHECK(ret, FAIL, "H5Pset_attr_creation_order");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid, DSET1_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Retrieve limits for compact/dense attribute storage */
+ ret = H5Pget_attr_phase_change(dcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+
+ /* Close property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Add attributes, until just before converting to dense storage */
+ for (u = 0; u < max_compact; u++) {
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Acreate2(dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Verify attributes written so far */
+ ret = test_attr_dense_verify(dataset, u);
+ CHECK(ret, FAIL, "test_attr_dense_verify");
+ } /* end for */
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Add one more attribute, to push into "dense" storage */
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Acreate2(dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+#endif
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Verify all the attributes written */
+ ret = test_attr_dense_verify(dataset, (u + 1));
+ CHECK(ret, FAIL, "test_attr_dense_verify");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Unlink dataset with attributes */
+ ret = H5Ldelete(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+#if 0
+ if (h5_using_default_driver(NULL)) {
+ /* Check size of file */
+ filesize = h5_get_file_size(FILENAME, fapl);
+ VERIFY(filesize, empty_filesize, "h5_get_file_size");
+ }
+#endif
+} /* test_attr_dense_open() */
+
+/****************************************************************
+**
+** test_attr_dense_delete(): Test basic H5A (attribute) code.
+** Tests deleting attributes in "dense" storage
+**
+****************************************************************/
+static void
+test_attr_dense_delete(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ unsigned max_compact; /* Maximum # of attributes to store compactly */
+ unsigned min_dense; /* Minimum # of attributes to store "densely" */
+#if 0
+ htri_t is_dense; /* Are attributes stored densely? */
+#endif
+ unsigned u; /* Local index variable */
+#if 0
+ h5_stat_size_t empty_filesize; /* Size of empty file */
+ h5_stat_size_t filesize; /* Size of file after modifications */
+#endif
+ H5O_info2_t oinfo; /* Object info */
+ int use_min_dset_oh = (dcpl_g != H5P_DEFAULT);
+ herr_t ret; /* Generic return value */
+
+ /* Only run this test for sec2/default driver */
+ if (!h5_using_default_driver(NULL))
+ return;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Deleting Attributes in Dense Storage\n"));
+
+ if (use_min_dset_oh) { /* using minimized dataset headers */
+ /* modify fcpl...
+ * sidestep "bug" where file space is lost with minimized dset ohdrs
+ */
+ fcpl = H5Pcopy(fcpl);
+ CHECK(fcpl, FAIL, "H5Pcopy");
+ ret = H5Pset_file_space_strategy(fcpl, H5F_FSPACE_STRATEGY_FSM_AGGR, TRUE, 1);
+ CHECK(ret, FAIL, "H5Pset_file_space_strategy");
+ }
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+ if (use_min_dset_oh)
+ CHECK(H5Pclose(fcpl), FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+#if 0
+ /* Get size of file */
+ empty_filesize = h5_get_file_size(FILENAME, fapl);
+ if (empty_filesize < 0)
+ TestErrPrintf("Line %d: file size wrong!\n", __LINE__);
+#endif
+ /* Re-open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Create dataspace for dataset */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* need DCPL to query the group creation properties */
+ if (use_min_dset_oh) {
+ dcpl = H5Pcopy(dcpl_g);
+ CHECK(dcpl, FAIL, "H5Pcopy");
+ }
+ else {
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ }
+
+ /* Enable creation order tracking on attributes, so creation order tests work */
+ ret = H5Pset_attr_creation_order(dcpl, H5P_CRT_ORDER_TRACKED);
+ CHECK(ret, FAIL, "H5Pset_attr_creation_order");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid, DSET1_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Retrieve limits for compact/dense attribute storage */
+ ret = H5Pget_attr_phase_change(dcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+
+ /* Close property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Add attributes, until well into dense storage */
+ for (u = 0; u < (max_compact * 2); u++) {
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Acreate2(dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Check # of attributes */
+ ret = H5Oget_info3(dataset, &oinfo, H5O_INFO_NUM_ATTRS);
+ CHECK(ret, FAIL, "H5Oget_info3");
+ VERIFY(oinfo.num_attrs, (u + 1), "H5Oget_info3");
+ } /* end for */
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+#endif
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Open dataset */
+ dataset = H5Dopen2(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Delete attributes until the attributes revert to compact storage again */
+ for (u--; u >= min_dense; u--) {
+ /* Delete attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ ret = H5Adelete(dataset, attrname);
+ CHECK(ret, FAIL, "H5Adelete");
+
+ /* Verify attributes still left */
+ ret = test_attr_dense_verify(dataset, u);
+ CHECK(ret, FAIL, "test_attr_dense_verify");
+ } /* end for */
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+#endif
+ /* Delete one more attribute, which should cause reversion to compact storage */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ ret = H5Adelete(dataset, attrname);
+ CHECK(ret, FAIL, "H5Adelete");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Verify attributes still left */
+ ret = test_attr_dense_verify(dataset, (u - 1));
+ CHECK(ret, FAIL, "test_attr_dense_verify");
+
+ /* Delete another attribute, to verify deletion in compact storage */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", (u - 1));
+ ret = H5Adelete(dataset, attrname);
+ CHECK(ret, FAIL, "H5Adelete");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Verify attributes still left */
+ ret = test_attr_dense_verify(dataset, (u - 2));
+ CHECK(ret, FAIL, "test_attr_dense_verify");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Unlink dataset with attributes */
+ ret = H5Ldelete(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+#if 0
+ if (h5_using_default_driver(NULL)) {
+ /* Check size of file */
+ filesize = h5_get_file_size(FILENAME, fapl);
+ VERIFY(filesize, empty_filesize, "h5_get_file_size");
+ }
+#endif
+} /* test_attr_dense_delete() */
+
+/****************************************************************
+**
+** test_attr_dense_rename(): Test basic H5A (attribute) code.
+** Tests renaming attributes in "dense" storage
+**
+****************************************************************/
+static void
+test_attr_dense_rename(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ char new_attrname[NAME_BUF_SIZE]; /* New name of attribute */
+ unsigned max_compact; /* Maximum # of attributes to store compactly */
+ unsigned min_dense; /* Minimum # of attributes to store "densely" */
+#if 0
+ htri_t is_dense; /* Are attributes stored densely? */
+ h5_stat_size_t empty_filesize; /* Size of empty file */
+ h5_stat_size_t filesize; /* Size of file after modifications */
+#endif
+ H5O_info2_t oinfo; /* Object info */
+ unsigned u; /* Local index variable */
+ int use_min_dset_oh = (dcpl_g != H5P_DEFAULT);
+ unsigned use_corder; /* Track creation order or not */
+ herr_t ret; /* Generic return value */
+
+ /* Only run this test for sec2/default driver */
+ if (!h5_using_default_driver(NULL))
+ return;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Renaming Attributes in Dense Storage\n"));
+
+ if (use_min_dset_oh) { /* using minimized dataset headers */
+ /* modify fcpl...
+ * sidestep "bug" where file space is lost with minimized dset ohdrs
+ */
+ fcpl = H5Pcopy(fcpl);
+ CHECK(fcpl, FAIL, "H5Pcopy");
+ ret = H5Pset_file_space_strategy(fcpl, H5F_FSPACE_STRATEGY_FSM_AGGR, TRUE, 1);
+ CHECK(ret, FAIL, "H5Pset_file_space_strategy");
+ }
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, H5I_INVALID_HID, "H5Fcreate");
+ if (use_min_dset_oh)
+ CHECK(H5Pclose(fcpl), FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+#if 0
+ /* Get size of file */
+ empty_filesize = h5_get_file_size(FILENAME, fapl);
+ if (empty_filesize < 0)
+ TestErrPrintf("Line %d: file size wrong!\n", __LINE__);
+#endif
+ /* Re-open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, H5I_INVALID_HID, "H5Fopen");
+
+ /* Create dataspace for dataset */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, H5I_INVALID_HID, "H5Screate");
+
+ /* need DCPL to query the group creation properties */
+ if (use_min_dset_oh) {
+ dcpl = H5Pcopy(dcpl_g);
+ CHECK(dcpl, H5I_INVALID_HID, "H5Pcopy");
+ }
+ else {
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, H5I_INVALID_HID, "H5Pcreate");
+ }
+
+ /* Retrieve limits for compact/dense attribute storage */
+ ret = H5Pget_attr_phase_change(dcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+
+ /* Using creation order or not */
+ for (use_corder = FALSE; use_corder <= TRUE; use_corder++) {
+
+ if (use_corder) {
+ ret = H5Pset_attr_creation_order(dcpl, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED);
+ CHECK(ret, FAIL, "H5Pset_attr_creation_order");
+ }
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid, DSET1_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Add attributes, until well into dense storage */
+ for (u = 0; u < (max_compact * 2); u++) {
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Acreate2(dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, H5I_INVALID_HID, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Rename attribute */
+ HDsnprintf(new_attrname, sizeof(new_attrname), "new attr %02u", u);
+
+ /* Rename attribute */
+ ret = H5Arename_by_name(fid, DSET1_NAME, attrname, new_attrname, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Arename_by_name");
+
+ /* Check # of attributes */
+ ret = H5Oget_info3(dataset, &oinfo, H5O_INFO_NUM_ATTRS);
+ CHECK(ret, FAIL, "H5Oget_info3");
+ VERIFY(oinfo.num_attrs, (u + 1), "H5Oget_info3");
+ } /* end for */
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+#endif
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ if (!use_corder) {
+ /* Unlink dataset with attributes */
+ ret = H5Ldelete(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+ }
+
+ } /* end for use_corder */
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, H5I_INVALID_HID, "H5Fopen");
+
+ /* Open dataset */
+ dataset = H5Dopen2(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dopen2");
+
+ /* Verify renamed attributes */
+ for (u = 0; u < (max_compact * 2); u++) {
+ unsigned value; /* Attribute value */
+
+ /* Open attribute */
+ HDsnprintf(attrname, sizeof(attrname), "new attr %02u", u);
+ attr = H5Aopen(dataset, attrname, H5P_DEFAULT);
+ CHECK(attr, H5I_INVALID_HID, "H5Aopen");
+
+ /* Read data from the attribute */
+ ret = H5Aread(attr, H5T_NATIVE_UINT, &value);
+ CHECK(ret, FAIL, "H5Aread");
+ VERIFY(value, u, "H5Aread");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+ } /* end for */
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Unlink dataset with attributes */
+ ret = H5Ldelete(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+#if 0
+ if (h5_using_default_driver(NULL)) {
+ /* Check size of file */
+ filesize = h5_get_file_size(FILENAME, fapl);
+ VERIFY(filesize, empty_filesize, "h5_get_file_size");
+ }
+#endif
+} /* test_attr_dense_rename() */
+
+/****************************************************************
+**
+** test_attr_dense_unlink(): Test basic H5A (attribute) code.
+** Tests unlinking object with attributes in "dense" storage
+**
+****************************************************************/
+static void
+test_attr_dense_unlink(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ unsigned max_compact; /* Maximum # of attributes to store compactly */
+ unsigned min_dense; /* Minimum # of attributes to store "densely" */
+#if 0
+ htri_t is_dense; /* Are attributes stored densely? */
+ size_t mesg_count; /* # of shared messages */
+ h5_stat_size_t empty_filesize; /* Size of empty file */
+ h5_stat_size_t filesize; /* Size of file after modifications */
+#endif
+ H5O_info2_t oinfo; /* Object info */
+ unsigned u; /* Local index variable */
+ int use_min_dset_oh = (dcpl_g != H5P_DEFAULT);
+ herr_t ret; /* Generic return value */
+
+ /* Only run this test for sec2/default driver */
+ if (!h5_using_default_driver(NULL))
+ return;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Unlinking Object with Attributes in Dense Storage\n"));
+
+ if (use_min_dset_oh) { /* using minimized dataset headers */
+ /* modify fcpl...
+ * sidestep "bug" where file space is lost with minimized dset ohdrs
+ */
+ fcpl = H5Pcopy(fcpl);
+ CHECK(fcpl, FAIL, "H5Pcopy");
+ ret = H5Pset_file_space_strategy(fcpl, H5F_FSPACE_STRATEGY_FSM_AGGR, TRUE, 1);
+ CHECK(ret, FAIL, "H5Pset_file_space_strategy");
+ }
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+ if (use_min_dset_oh)
+ CHECK(H5Pclose(fcpl), FAIL, "H5Pclose");
+
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+#if 0
+ empty_filesize = h5_get_file_size(FILENAME, fapl);
+ if (empty_filesize < 0)
+ TestErrPrintf("Line %d: file size wrong!\n", __LINE__);
+#endif
+ /* Re-open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Create dataspace for dataset */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* need DCPL to query the group creation properties */
+ if (use_min_dset_oh) {
+ dcpl = H5Pcopy(dcpl_g);
+ CHECK(dcpl, FAIL, "H5Pcopy");
+ }
+ else {
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ }
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid, DSET1_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Retrieve limits for compact/dense attribute storage */
+ ret = H5Pget_attr_phase_change(dcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+
+ /* Close property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Add attributes, until well into dense storage */
+ for (u = 0; u < (max_compact * 2); u++) {
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Acreate2(dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Check # of attributes */
+ ret = H5Oget_info3(dataset, &oinfo, H5O_INFO_NUM_ATTRS);
+ CHECK(ret, FAIL, "H5Oget_info3");
+ VERIFY(oinfo.num_attrs, (u + 1), "H5Oget_info3");
+ } /* end for */
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+#endif
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Unlink dataset */
+ ret = H5Ldelete(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+#if 0
+ /* Check on dataset's attribute storage status */
+ ret = H5F__get_sohm_mesg_count_test(fid, H5O_ATTR_ID, &mesg_count);
+ CHECK(ret, FAIL, "H5F__get_sohm_mesg_count_test");
+ VERIFY(mesg_count, 0, "H5F__get_sohm_mesg_count_test");
+#endif
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+#if 0
+ if (h5_using_default_driver(NULL)) {
+ /* Check size of file */
+ filesize = h5_get_file_size(FILENAME, fapl);
+ VERIFY(filesize, empty_filesize, "h5_get_file_size");
+ }
+#endif
+} /* test_attr_dense_unlink() */
+
+/****************************************************************
+**
+** test_attr_dense_limits(): Test basic H5A (attribute) code.
+** Tests attribute in "dense" storage limits
+**
+****************************************************************/
+static void
+test_attr_dense_limits(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ unsigned max_compact, rmax_compact; /* Maximum # of attributes to store compactly */
+ unsigned min_dense, rmin_dense; /* Minimum # of attributes to store "densely" */
+#if 0
+ htri_t is_dense; /* Are attributes stored densely? */
+#endif
+ unsigned u; /* Local index variable */
+#if 0
+ h5_stat_size_t empty_filesize; /* Size of empty file */
+ h5_stat_size_t filesize; /* Size of file after modifications */
+#endif
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Phase Change Limits For Attributes in Dense Storage\n"));
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+#if 0
+ /* Get size of file */
+ empty_filesize = h5_get_file_size(FILENAME, fapl);
+ if (empty_filesize < 0)
+ TestErrPrintf("Line %d: file size wrong!\n", __LINE__);
+#endif
+ /* Re-open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Create dataspace for dataset */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* need DCPL to query the group creation properties */
+ if (dcpl_g == H5P_DEFAULT) {
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ }
+ else {
+ dcpl = H5Pcopy(dcpl_g);
+ CHECK(dcpl, FAIL, "H5Pcopy");
+ }
+
+ /* Change limits on compact/dense attribute storage */
+ max_compact = 0;
+ min_dense = 0;
+ ret = H5Pset_attr_phase_change(dcpl, max_compact, min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid, DSET1_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Retrieve limits for compact/dense attribute storage */
+ ret = H5Pget_attr_phase_change(dcpl, &rmax_compact, &rmin_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+ VERIFY(rmax_compact, max_compact, "H5Pget_attr_phase_change");
+ VERIFY(rmin_dense, min_dense, "H5Pget_attr_phase_change");
+
+ /* Close property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+
+ /* Add first attribute, which should be immediately in dense storage */
+
+ /* Create attribute */
+ u = 0;
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Acreate2(dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+#endif
+
+ /* Add second attribute, to allow deletions to be checked easily */
+
+ /* Create attribute */
+ u = 1;
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Acreate2(dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+#endif
+
+ /* Delete second attribute, attributes should still be stored densely */
+
+ /* Delete attribute */
+ ret = H5Adelete(dataset, attrname);
+ CHECK(ret, FAIL, "H5Adelete");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+#endif
+
+ /* Delete first attribute, attributes should not be stored densely */
+
+ /* Delete attribute */
+ u = 0;
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ ret = H5Adelete(dataset, attrname);
+ CHECK(ret, FAIL, "H5Adelete");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Unlink dataset */
+ ret = H5Ldelete(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+#if 0
+ if (h5_using_default_driver(NULL)) {
+ /* Check size of file */
+ filesize = h5_get_file_size(FILENAME, fapl);
+ VERIFY(filesize, empty_filesize, "h5_get_file_size");
+ }
+#endif
+} /* test_attr_dense_limits() */
+
+/****************************************************************
+**
+** test_attr_dense_dup_ids(): Test operations with multiple ID
+** handles with "dense" attribute storage creation
+**
+****************************************************************/
+static void
+test_attr_dense_dup_ids(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t gid1, gid2; /* Group ID */
+ hid_t sid, sid2; /* Dataspace ID */
+ hid_t attr, attr2, add_attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ hsize_t dims[] = {ATTR1_DIM1};
+ int read_data1[ATTR1_DIM1] = {0}; /* Buffer for reading attribute */
+ int rewrite_data[ATTR1_DIM1] = {1234, -423, 9907256}; /* Test data for rewrite */
+ unsigned scalar_data = 1317; /* scalar data for attribute */
+ unsigned read_scalar; /* variable for reading attribute*/
+ unsigned max_compact; /* Maximum # of attributes to store compactly */
+ unsigned min_dense; /* Minimum # of attributes to store "densely" */
+#if 0
+ htri_t is_dense; /* Are attributes stored densely? */
+#endif
+ unsigned u, i; /* Local index variable */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing operations with two IDs for Dense Storage\n"));
+
+ /*-----------------------------------------------------------------------------------
+ * Create an attribute in dense storage and fill it with fill value.
+ */
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Create dataspace for dataset */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* need DCPL to query the group creation properties */
+ if (dcpl_g == H5P_DEFAULT) {
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ }
+ else {
+ dcpl = H5Pcopy(dcpl_g);
+ CHECK(dcpl, FAIL, "H5Pcopy");
+ }
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid, DSET1_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Retrieve limits for compact/dense attribute storage */
+ ret = H5Pget_attr_phase_change(dcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+
+ /* Close property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Add attributes, until just before converting to dense storage */
+ for (u = 0; u < max_compact; u++) {
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Acreate2(dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+ } /* end for */
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Add one more attribute, to push into "dense" storage */
+ /* Create dataspace for attribute */
+ sid2 = H5Screate_simple(ATTR1_RANK, dims, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Acreate2(dataset, attrname, H5T_NATIVE_INT, sid2, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+#endif
+ /* Open the attribute just created and get a second ID */
+ attr2 = H5Aopen(dataset, attrname, H5P_DEFAULT);
+ CHECK(attr2, FAIL, "H5Aopen");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ ret = H5Aclose(attr2);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /*-----------------------------------------------------------------------------------
+ * Reopen the file and verify the fill value for attribute. Also write
+ * some real data.
+ */
+ /* Open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Open the dataset */
+ dataset = H5Dopen2(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+#endif
+ /* Open first attribute for the dataset */
+ attr = H5Aopen(dataset, attrname, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+
+ /* Read attribute with fill value */
+ ret = H5Aread(attr, H5T_NATIVE_INT, read_data1);
+ CHECK(ret, FAIL, "H5Aread");
+
+ /* Verify values read in */
+ for (i = 0; i < ATTR1_DIM1; i++)
+ if (0 != read_data1[i])
+ TestErrPrintf("%d: attribute data different: read_data1[%d]=%d\n", __LINE__, i, read_data1[i]);
+
+ /* Open attribute for the second time */
+ attr2 = H5Aopen(dataset, attrname, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+
+ /* Write attribute information */
+ ret = H5Awrite(attr2, H5T_NATIVE_INT, attr_data1);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr2);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /*-----------------------------------------------------------------------------------
+ * Reopen the file and verify the data. Also rewrite the data and verify it.
+ */
+ /* Open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Open the dataset */
+ dataset = H5Dopen2(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+#endif
+ /* Open first attribute for the dataset */
+ attr = H5Aopen(dataset, attrname, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+
+ /* Read attribute information */
+ ret = H5Aread(attr, H5T_NATIVE_INT, read_data1);
+ CHECK(ret, FAIL, "H5Aread");
+
+ /* Verify values read in */
+ for (i = 0; i < ATTR1_DIM1; i++)
+ if (attr_data1[i] != read_data1[i])
+ TestErrPrintf("%d: attribute data different: attr_data1[%d]=%d, read_data1[%d]=%d\n", __LINE__, i,
+ attr_data1[i], i, read_data1[i]);
+
+ /* Open attribute for the second time */
+ attr2 = H5Aopen(dataset, attrname, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+
+ /* Write attribute information with the second ID */
+ ret = H5Awrite(attr2, H5T_NATIVE_INT, rewrite_data);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Read attribute information with the first ID */
+ ret = H5Aread(attr, H5T_NATIVE_INT, read_data1);
+ CHECK(ret, FAIL, "H5Aread");
+
+ /* Verify values read in */
+ for (i = 0; i < ATTR1_DIM1; i++)
+ if (read_data1[i] != rewrite_data[i])
+ TestErrPrintf("%d: attribute data different: read_data1[%d]=%d, rewrite_data[%d]=%d\n", __LINE__,
+ i, read_data1[i], i, rewrite_data[i]);
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ ret = H5Aclose(attr2);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /*-----------------------------------------------------------------------------------
+ * Open the attribute by index. Verify the data is shared when the attribute
+ * is opened twice.
+ */
+ /* Open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Open the dataset */
+ dataset = H5Dopen2(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+#endif
+ /* Open first attribute for the dataset */
+ attr = H5Aopen_by_idx(dataset, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)4, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+
+ /* Open attribute for the second time */
+ attr2 = H5Aopen_by_idx(dataset, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)4, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+
+ /* Write attribute information with the second ID */
+ ret = H5Awrite(attr2, H5T_NATIVE_UINT, &scalar_data);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Read attribute information with the first ID */
+ ret = H5Aread(attr, H5T_NATIVE_INT, &read_scalar);
+ CHECK(ret, FAIL, "H5Aread");
+
+ /* Verify values read in */
+ if (read_scalar != scalar_data)
+ TestErrPrintf("%d: attribute data different: read_scalar=%d, scalar_data=%d\n", __LINE__, read_scalar,
+ scalar_data);
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ ret = H5Aclose(attr2);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /*-----------------------------------------------------------------------------------
+ * Open one attribute. As it remains open, delete some attributes. The
+ * attribute storage should switch from dense to compact. Then open the
+ * same attribute for the second time and verify that the attribute data
+ * is shared.
+ */
+ /* Open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Open the dataset */
+ dataset = H5Dopen2(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+#endif
+ /* Open attribute of the dataset for the first time */
+ attr = H5Aopen_by_idx(dataset, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)2, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+
+ /* Delete a few attributes until the storage switches to compact */
+ for (u = max_compact; u >= min_dense - 1; u--) {
+ ret = H5Adelete_by_idx(dataset, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)u, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Adelete_by_idx");
+ }
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Open attribute for the second time */
+ attr2 = H5Aopen_by_idx(dataset, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)2, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+
+ /* Write attribute information with the second ID */
+ ret = H5Awrite(attr2, H5T_NATIVE_UINT, &scalar_data);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Read attribute information with the first ID */
+ ret = H5Aread(attr, H5T_NATIVE_INT, &read_scalar);
+ CHECK(ret, FAIL, "H5Aread");
+
+ /* Verify values read in */
+ if (read_scalar != scalar_data)
+ TestErrPrintf("%d: attribute data different: read_scalar=%d, scalar_data=%d\n", __LINE__, read_scalar,
+ scalar_data);
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ ret = H5Aclose(attr2);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /*-----------------------------------------------------------------------------------
+ * Open one attribute. As it remains open, create some attributes. The
+ * attribute storage should switch from compact to dense. Then open the
+ * same attribute for the second time and verify that the attribute data
+ * is shared.
+ */
+ /* Open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Open the dataset */
+ dataset = H5Dopen2(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Open attribute of the dataset for the first time */
+ attr = H5Aopen_by_idx(dataset, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)3, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+
+ /* Delete a few attributes until the storage switches to compact */
+ for (u = min_dense - 1; u <= max_compact; u++) {
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ add_attr = H5Acreate2(dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(add_attr, FAIL, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(add_attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(add_attr);
+ CHECK(ret, FAIL, "H5Aclose");
+ }
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+#endif
+ /* Open attribute for the second time */
+ attr2 = H5Aopen_by_idx(dataset, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)3, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+
+ /* Write attribute information with the second ID */
+ ret = H5Awrite(attr2, H5T_NATIVE_UINT, &scalar_data);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Read attribute information with the first ID */
+ ret = H5Aread(attr, H5T_NATIVE_INT, &read_scalar);
+ CHECK(ret, FAIL, "H5Aread");
+
+ /* Verify values read in */
+ if (read_scalar != scalar_data)
+ TestErrPrintf("%d: attribute data different: read_scalar=%d, scalar_data=%d\n", __LINE__, read_scalar,
+ scalar_data);
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ ret = H5Aclose(attr2);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /*-----------------------------------------------------------------------------------
+ * Verify that the attribute being pointed to by different paths shares
+ * the same data.
+ */
+ /* Open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Create a group */
+ gid1 = H5Gcreate2(fid, GROUP1_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid1, FAIL, "H5Gcreate2");
+
+ /* Create hard link to the first group */
+ ret = H5Lcreate_hard(gid1, GROUP1_NAME, H5L_SAME_LOC, GROUP2_NAME, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lcreate_hard");
+
+ /* Add attributes, until just before converting to dense storage */
+ for (u = 0; u < max_compact; u++) {
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Acreate2(gid1, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+ } /* end for */
+
+ /* Try to create another attribute to make dense storage */
+ attr = H5Acreate2(gid1, ATTR2_NAME, H5T_NATIVE_INT, sid2, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+#if 0
+ /* Check on group's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(gid1);
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+#endif
+ /* Open the hard link just created */
+ gid2 = H5Gopen2(fid, GROUP2_NAME, H5P_DEFAULT);
+ CHECK(gid2, FAIL, "H5Gopen2");
+
+ /* Open the attribute of the group for the second time */
+ attr2 = H5Aopen(gid2, ATTR2_NAME, H5P_DEFAULT);
+ CHECK(attr2, FAIL, "H5Aopen");
+
+ /* Write attribute information with the first attribute handle */
+ ret = H5Awrite(attr, H5T_NATIVE_INT, attr_data1);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Read attribute information with the second attribute handle */
+ ret = H5Aread(attr2, H5T_NATIVE_INT, read_data1);
+ CHECK(ret, FAIL, "H5Aread");
+
+ /* Verify values read in */
+ for (i = 0; i < ATTR1_DIM1; i++)
+ if (attr_data1[i] != read_data1[i])
+ TestErrPrintf("%d: attribute data different: attr_data1[%d]=%d, read_data1[%d]=%d\n", __LINE__, i,
+ attr_data1[i], i, read_data1[i]);
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ ret = H5Aclose(attr2);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close group */
+ ret = H5Gclose(gid1);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Gclose(gid2);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close Attribute dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_attr_dense_dup_ids() */
+
+/****************************************************************
+**
+** test_attr_big(): Test basic H5A (attribute) code.
+** Tests storing "big" attribute in dense storage immediately, if available
+**
+****************************************************************/
+static void
+test_attr_big(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t big_sid; /* "Big" dataspace ID */
+ hsize_t dims[ATTR6_RANK] = {ATTR6_DIM1, ATTR6_DIM2, ATTR6_DIM3}; /* Attribute dimensions */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ unsigned max_compact; /* Maximum # of attributes to store compactly */
+ unsigned min_dense; /* Minimum # of attributes to store "densely" */
+ unsigned nshared_indices; /* # of shared message indices */
+ H5F_libver_t low, high; /* File format bounds */
+#if 0
+ htri_t is_empty; /* Are there any attributes? */
+ htri_t is_dense; /* Are attributes stored densely? */
+#endif
+ unsigned u; /* Local index variable */
+#if 0
+ h5_stat_size_t empty_filesize; /* Size of empty file */
+ h5_stat_size_t filesize; /* Size of file after modifications */
+#endif
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Storing 'Big' Attributes in Dense Storage\n"));
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+#if 0
+ /* Get size of file */
+ empty_filesize = h5_get_file_size(FILENAME, fapl);
+ if (empty_filesize < 0)
+ TestErrPrintf("Line %d: file size wrong!\n", __LINE__);
+#endif
+ /* Re-open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Create dataspace for dataset & "small" attributes */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create "big" dataspace for "big" attributes */
+ big_sid = H5Screate_simple(ATTR6_RANK, dims, NULL);
+ CHECK(big_sid, FAIL, "H5Screate_simple");
+
+ /* need DCPL to query the group creation properties */
+ if (dcpl_g == H5P_DEFAULT) {
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ }
+ else {
+ dcpl = H5Pcopy(dcpl_g);
+ CHECK(dcpl, FAIL, "H5Pcopy");
+ }
+
+ /* Retrieve limits for compact/dense attribute storage */
+ ret = H5Pget_attr_phase_change(dcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+
+ /* Retrieve # of shared message indices (ie. whether attributes are shared or not) */
+ ret = H5Pget_shared_mesg_nindexes(fcpl, &nshared_indices);
+ CHECK(ret, FAIL, "H5Pget_shared_mesg_nindexes");
+
+ /* Retrieve the format bounds for creating objects in the file */
+ ret = H5Pget_libver_bounds(fapl, &low, &high);
+ CHECK(ret, FAIL, "H5Pget_libver_bounds");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid, DSET1_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Close property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_empty = H5O__is_attr_empty_test(dataset);
+ VERIFY(is_empty, TRUE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+
+ /* Add first "small" attribute, which should be in compact storage */
+
+ /* Create attribute */
+ u = 0;
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Acreate2(dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_empty = H5O__is_attr_empty_test(dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+
+ /* Add second "small" attribute, which should stay in compact storage */
+
+ /* Create attribute */
+ u = 1;
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Acreate2(dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_empty = H5O__is_attr_empty_test(dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+
+ /* Add first "big" attribute, which should push storage into dense form */
+
+ /* Create attribute */
+ u = 2;
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Acreate2(dataset, attrname, H5T_NATIVE_UINT, big_sid, H5P_DEFAULT, H5P_DEFAULT);
+ if (low == H5F_LIBVER_LATEST || attr >= 0) {
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Check on dataset's attribute storage status */
+ /* (when attributes are shared, the "big" attribute goes into the shared
+ * message heap instead of forcing the attribute storage into the dense
+ * form - QAK)
+ */
+#if 0
+ is_empty = H5O__is_attr_empty_test(dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, (nshared_indices ? FALSE : TRUE), "H5O__is_attr_dense_test");
+#endif
+
+ /* Add second "big" attribute, which should leave storage in dense form */
+
+ /* Create attribute */
+ u = 3;
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Acreate2(dataset, attrname, H5T_NATIVE_UINT, big_sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Check on dataset's attribute storage status */
+ /* (when attributes are shared, the "big" attribute goes into the shared
+ * message heap instead of forcing the attribute storage into the dense
+ * form - QAK)
+ */
+#if 0
+ is_empty = H5O__is_attr_empty_test(dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, (nshared_indices ? FALSE : TRUE), "H5O__is_attr_dense_test");
+#endif
+
+ /* Delete second "small" attribute, attributes should still be stored densely */
+
+ /* Delete attribute */
+ u = 1;
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ ret = H5Adelete(dataset, attrname);
+ CHECK(ret, FAIL, "H5Adelete");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_empty = H5O__is_attr_empty_test(dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, (nshared_indices ? FALSE : TRUE), "H5O__is_attr_dense_test");
+#endif
+
+ /* Delete second "big" attribute, attributes should still be stored densely */
+
+ /* Delete attribute */
+ u = 3;
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ ret = H5Adelete(dataset, attrname);
+ CHECK(ret, FAIL, "H5Adelete");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_empty = H5O__is_attr_empty_test(dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, (nshared_indices ? FALSE : TRUE), "H5O__is_attr_dense_test");
+#endif
+
+ /* Delete first "big" attribute, attributes should _not_ be stored densely */
+
+ /* Delete attribute */
+ u = 2;
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ ret = H5Adelete(dataset, attrname);
+ CHECK(ret, FAIL, "H5Adelete");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_empty = H5O__is_attr_empty_test(dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+
+ /* Delete first "small" attribute, should be no attributes now */
+
+ /* Delete attribute */
+ u = 0;
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ ret = H5Adelete(dataset, attrname);
+ CHECK(ret, FAIL, "H5Adelete");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_empty = H5O__is_attr_empty_test(dataset);
+ VERIFY(is_empty, TRUE, "H5O__is_attr_empty_test");
+#endif
+ } /* end if */
+#if 0
+ else {
+ /* Shouldn't be able to create "big" attributes with older version of format */
+ VERIFY(attr, FAIL, "H5Acreate2");
+
+ /* Check on dataset's attribute storage status */
+ /* (when attributes are shared, the "big" attribute goes into the shared
+ * message heap instead of forcing the attribute storage into the dense
+ * form - QAK)
+ */
+ is_empty = H5O__is_attr_empty_test(dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+ } /* end else */
+#endif
+
+ /* Close dataspaces */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(big_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Unlink dataset */
+ ret = H5Ldelete(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+#if 0
+ if (h5_using_default_driver(NULL)) {
+ /* Check size of file */
+ filesize = h5_get_file_size(FILENAME, fapl);
+ VERIFY(filesize, empty_filesize, "h5_get_file_size");
+ }
+#endif
+} /* test_attr_big() */
+
+/****************************************************************
+**
+** test_attr_null_space(): Test basic H5A (attribute) code.
+** Tests storing attribute with "null" dataspace
+**
+****************************************************************/
+static void
+test_attr_null_space(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t null_sid; /* "null" dataspace ID */
+ hid_t attr_sid; /* Attribute's dataspace ID */
+ hid_t attr; /* Attribute ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ unsigned value; /* Attribute value */
+ htri_t cmp; /* Results of comparison */
+#if 0
+ hsize_t storage_size; /* Size of storage for attribute */
+#endif
+ H5A_info_t ainfo; /* Attribute info */
+#if 0
+ h5_stat_size_t empty_filesize; /* Size of empty file */
+ h5_stat_size_t filesize; /* Size of file after modifications */
+#endif
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Storing Attributes with 'null' dataspace\n"));
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+#if 0
+ /* Get size of file */
+ empty_filesize = h5_get_file_size(FILENAME, fapl);
+ if (empty_filesize < 0)
+ TestErrPrintf("Line %d: file size wrong!\n", __LINE__);
+#endif
+ /* Re-open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Create dataspace for dataset attributes */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create "null" dataspace for attribute */
+ null_sid = H5Screate(H5S_NULL);
+ CHECK(null_sid, FAIL, "H5Screate");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid, DSET1_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl_g, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Add attribute with 'null' dataspace */
+
+ /* Create attribute */
+ HDstrcpy(attrname, "null attr");
+ attr = H5Acreate2(dataset, attrname, H5T_NATIVE_UINT, null_sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Try to read data from the attribute */
+ /* (shouldn't fail, but should leave buffer alone) */
+ value = 23;
+ ret = H5Aread(attr, H5T_NATIVE_UINT, &value);
+ CHECK(ret, FAIL, "H5Aread");
+ VERIFY(value, 23, "H5Aread");
+
+ /* Get the dataspace for the attribute and make certain it's 'null' */
+ attr_sid = H5Aget_space(attr);
+ CHECK(attr_sid, FAIL, "H5Aget_space");
+
+ /* Compare the dataspaces */
+ cmp = H5Sextent_equal(attr_sid, null_sid);
+ CHECK(cmp, FAIL, "H5Sextent_equal");
+ VERIFY(cmp, TRUE, "H5Sextent_equal");
+
+ /* Close dataspace */
+ ret = H5Sclose(attr_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+#if 0
+ /* Check the storage size for the attribute */
+ storage_size = H5Aget_storage_size(attr);
+ VERIFY(storage_size, 0, "H5Aget_storage_size");
+#endif
+ /* Get the attribute info */
+ ret = H5Aget_info(attr, &ainfo);
+ CHECK(ret, FAIL, "H5Aget_info");
+#if 0
+ VERIFY(ainfo.data_size, storage_size, "H5Aget_info");
+#endif
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Add another attribute with 'null' dataspace */
+
+ /* Create attribute */
+ HDstrcpy(attrname, "null attr #2");
+ attr = H5Acreate2(dataset, attrname, H5T_NATIVE_UINT, null_sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Try to write data to the attribute */
+ /* (shouldn't fail, but should leave buffer alone) */
+ value = 23;
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &value);
+ CHECK(ret, FAIL, "H5Awrite");
+ VERIFY(value, 23, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open the file and check on the attributes */
+
+ /* Re-open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Open dataset */
+ dataset = H5Dopen2(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Open first attribute */
+ HDstrcpy(attrname, "null attr #2");
+ attr = H5Aopen(dataset, attrname, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+
+ /* Try to read data from the attribute */
+ /* (shouldn't fail, but should leave buffer alone) */
+ value = 23;
+ ret = H5Aread(attr, H5T_NATIVE_UINT, &value);
+ CHECK(ret, FAIL, "H5Aread");
+ VERIFY(value, 23, "H5Aread");
+
+ /* Get the dataspace for the attribute and make certain it's 'null' */
+ attr_sid = H5Aget_space(attr);
+ CHECK(attr_sid, FAIL, "H5Aget_space");
+
+ /* Compare the dataspaces */
+ cmp = H5Sextent_equal(attr_sid, null_sid);
+ CHECK(cmp, FAIL, "H5Sextent_equal");
+ VERIFY(cmp, TRUE, "H5Sextent_equal");
+
+ /* Close dataspace */
+ ret = H5Sclose(attr_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+#if 0
+ /* Check the storage size for the attribute */
+ storage_size = H5Aget_storage_size(attr);
+ VERIFY(storage_size, 0, "H5Aget_storage_size");
+#endif
+ /* Get the attribute info */
+ ret = H5Aget_info(attr, &ainfo);
+ CHECK(ret, FAIL, "H5Aget_info");
+#if 0
+ VERIFY(ainfo.data_size, storage_size, "H5Aget_info");
+#endif
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Open second attribute */
+ HDstrcpy(attrname, "null attr");
+ attr = H5Aopen(dataset, attrname, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+
+ /* Try to write data to the attribute */
+ /* (shouldn't fail, but should leave buffer alone) */
+ value = 23;
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &value);
+ CHECK(ret, FAIL, "H5Awrite");
+ VERIFY(value, 23, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Unlink dataset */
+ ret = H5Ldelete(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Close dataspaces */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(null_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+#if 0
+ if (h5_using_default_driver(NULL)) {
+ /* Check size of file */
+ filesize = h5_get_file_size(FILENAME, fapl);
+ VERIFY(filesize, empty_filesize, "h5_get_file_size");
+ }
+#endif
+} /* test_attr_null_space() */
+
+/****************************************************************
+**
+** test_attr_deprec(): Test basic H5A (attribute) code.
+** Tests deprecated API routines
+**
+****************************************************************/
+static void
+test_attr_deprec(hid_t fcpl, hid_t fapl)
+{
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+ hid_t fid; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Deprecated Attribute Routines\n"));
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset attributes */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid, DSET1_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl_g, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Add attribute to dataset */
+
+ /* Create attribute */
+ attr = H5Acreate1(dataset, "attr", H5T_NATIVE_UINT, sid, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate1");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close dataspaces */
+ ret = H5Sclose(sid);
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open the file and operate on the attribute */
+
+ /* Re-open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Open dataset */
+ dataset = H5Dopen2(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+#if 0
+ /* Get number of attributes with bad ID */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Aget_num_attrs((hid_t)-1);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Aget_num_attrs");
+
+ /* Get number of attributes */
+ ret = H5Aget_num_attrs(dataset);
+ VERIFY(ret, 1, "H5Aget_num_attrs");
+#endif
+ /* Open the attribute by index */
+ attr = H5Aopen_idx(dataset, 0);
+ CHECK(attr, FAIL, "H5Aopen_idx");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Open the attribute by name */
+ attr = H5Aopen_name(dataset, "attr");
+ CHECK(attr, FAIL, "H5Aopen_name");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+#else /* H5_NO_DEPRECATED_SYMBOLS */
+ /* Shut compiler up */
+ (void)fcpl;
+ (void)fapl;
+
+ /* Output message about test being skipped */
+ MESSAGE(5, ("Skipping Test On Deprecated Attribute Routines\n"));
+
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
+} /* test_attr_deprec() */
+
+/****************************************************************
+**
+** test_attr_many(): Test basic H5A (attribute) code.
+** Tests storing lots of attributes
+**
+****************************************************************/
+static void
+test_attr_many(hbool_t new_format, hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t gid; /* Group ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t aid; /* Attribute ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ unsigned nattr = (new_format ? NATTR_MANY_NEW : NATTR_MANY_OLD); /* Number of attributes */
+ htri_t exists; /* Whether the attribute exists or not */
+ unsigned u; /* Local index variable */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Storing Many Attributes\n"));
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create dataspace for attribute */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create group for attributes */
+ gid = H5Gcreate2(fid, GROUP1_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gcreate2");
+
+ /* Create many attributes */
+ for (u = 0; u < nattr; u++) {
+ HDsnprintf(attrname, sizeof(attrname), "a-%06u", u);
+
+ exists = H5Aexists(gid, attrname);
+ VERIFY(exists, FALSE, "H5Aexists");
+
+ exists = H5Aexists_by_name(fid, GROUP1_NAME, attrname, H5P_DEFAULT);
+ VERIFY(exists, FALSE, "H5Aexists_by_name");
+
+ aid = H5Acreate2(gid, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+
+ exists = H5Aexists(gid, attrname);
+ VERIFY(exists, TRUE, "H5Aexists");
+
+ exists = H5Aexists_by_name(fid, GROUP1_NAME, attrname, H5P_DEFAULT);
+ VERIFY(exists, TRUE, "H5Aexists_by_name");
+
+ ret = H5Awrite(aid, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ exists = H5Aexists(gid, attrname);
+ VERIFY(exists, TRUE, "H5Aexists");
+
+ exists = H5Aexists_by_name(fid, GROUP1_NAME, attrname, H5P_DEFAULT);
+ VERIFY(exists, TRUE, "H5Aexists_by_name");
+ } /* end for */
+
+ /* Close group */
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open the file and check on the attributes */
+
+ /* Re-open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDONLY, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Re-open group */
+ gid = H5Gopen2(fid, GROUP1_NAME, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gopen2");
+
+ /* Verify attributes */
+ for (u = 0; u < nattr; u++) {
+ unsigned value; /* Attribute value */
+
+ HDsnprintf(attrname, sizeof(attrname), "a-%06u", u);
+
+ exists = H5Aexists(gid, attrname);
+ VERIFY(exists, TRUE, "H5Aexists");
+
+ exists = H5Aexists_by_name(fid, GROUP1_NAME, attrname, H5P_DEFAULT);
+ VERIFY(exists, TRUE, "H5Aexists_by_name");
+
+ aid = H5Aopen(gid, attrname, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Aopen");
+
+ exists = H5Aexists(gid, attrname);
+ VERIFY(exists, TRUE, "H5Aexists");
+
+ exists = H5Aexists_by_name(fid, GROUP1_NAME, attrname, H5P_DEFAULT);
+ VERIFY(exists, TRUE, "H5Aexists_by_name");
+
+ ret = H5Aread(aid, H5T_NATIVE_UINT, &value);
+ CHECK(ret, FAIL, "H5Aread");
+ VERIFY(value, u, "H5Aread");
+
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+ } /* end for */
+
+ /* Close group */
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Close dataspaces */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_attr_many() */
+
+/****************************************************************
+**
+** test_attr_corder_create_empty(): Test basic H5A (attribute) code.
+** Tests basic code to create objects with attribute creation order info
+**
+****************************************************************/
+static void
+test_attr_corder_create_basic(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ unsigned crt_order_flags; /* Creation order flags */
+#if 0
+ htri_t is_empty; /* Are there any attributes? */
+ htri_t is_dense; /* Are attributes stored densely? */
+#endif
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Basic Code for Attributes with Creation Order Info\n"));
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create dataset creation property list */
+ if (dcpl_g == H5P_DEFAULT) {
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ }
+ else {
+ dcpl = H5Pcopy(dcpl_g);
+ CHECK(dcpl, FAIL, "H5Pcopy");
+ }
+#if 0
+ /* Get creation order indexing on object */
+ ret = H5Pget_attr_creation_order(dcpl, &crt_order_flags);
+ CHECK(ret, FAIL, "H5Pget_attr_creation_order");
+ VERIFY(crt_order_flags, 0, "H5Pget_attr_creation_order");
+#endif
+ /* Setting invalid combination of a attribute order creation order indexing on should fail */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Pset_attr_creation_order(dcpl, H5P_CRT_ORDER_INDEXED);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Pset_attr_creation_order");
+
+#if 0
+ ret = H5Pget_attr_creation_order(dcpl, &crt_order_flags);
+ CHECK(ret, FAIL, "H5Pget_attr_creation_order");
+ VERIFY(crt_order_flags, 0, "H5Pget_attr_creation_order");
+#endif
+
+ /* Set attribute creation order tracking & indexing for object */
+ ret = H5Pset_attr_creation_order(dcpl, (H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED));
+ CHECK(ret, FAIL, "H5Pset_attr_creation_order");
+ ret = H5Pget_attr_creation_order(dcpl, &crt_order_flags);
+ CHECK(ret, FAIL, "H5Pget_attr_creation_order");
+ VERIFY(crt_order_flags, (H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED), "H5Pget_attr_creation_order");
+
+ /* Create dataspace for dataset */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid, DSET1_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_empty = H5O__is_attr_empty_test(dataset);
+ VERIFY(is_empty, TRUE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Open dataset created */
+ dataset = H5Dopen2(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_empty = H5O__is_attr_empty_test(dataset);
+ VERIFY(is_empty, TRUE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+
+ /* Retrieve dataset creation property list for group */
+ dcpl = H5Dget_create_plist(dataset);
+ CHECK(dcpl, FAIL, "H5Dget_create_plist");
+
+ /* Query the attribute creation properties */
+ ret = H5Pget_attr_creation_order(dcpl, &crt_order_flags);
+ CHECK(ret, FAIL, "H5Pget_attr_creation_order");
+ VERIFY(crt_order_flags, (H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED), "H5Pget_attr_creation_order");
+
+ /* Close property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_attr_corder_create_basic() */
+
+/****************************************************************
+**
+** test_attr_corder_create_compact(): Test basic H5A (attribute) code.
+** Tests compact attribute storage on objects with attribute creation order info
+**
+****************************************************************/
+static void
+test_attr_corder_create_compact(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t dset1, dset2, dset3; /* Dataset IDs */
+ hid_t my_dataset; /* Current dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ unsigned max_compact; /* Maximum # of links to store in group compactly */
+ unsigned min_dense; /* Minimum # of links to store in group "densely" */
+#if 0
+ htri_t is_empty; /* Are there any attributes? */
+ htri_t is_dense; /* Are attributes stored densely? */
+ hsize_t nattrs; /* Number of attributes on object */
+#endif
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ unsigned curr_dset; /* Current dataset to work on */
+ unsigned u; /* Local index variable */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Compact Storage of Attributes with Creation Order Info\n"));
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create dataset creation property list */
+ if (dcpl_g == H5P_DEFAULT) {
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ }
+ else {
+ dcpl = H5Pcopy(dcpl_g);
+ CHECK(dcpl, FAIL, "H5Pcopy");
+ }
+
+ /* Set attribute creation order tracking & indexing for object */
+ ret = H5Pset_attr_creation_order(dcpl, (H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED));
+ CHECK(ret, FAIL, "H5Pset_attr_creation_order");
+
+ /* Query the attribute creation properties */
+ ret = H5Pget_attr_phase_change(dcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+
+ /* Create dataspace for dataset & attributes */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create datasets */
+ dset1 = H5Dcreate2(fid, DSET1_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset1, FAIL, "H5Dcreate2");
+ dset2 = H5Dcreate2(fid, DSET2_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset2, FAIL, "H5Dcreate2");
+ dset3 = H5Dcreate2(fid, DSET3_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset3, FAIL, "H5Dcreate2");
+
+ /* Work on all the datasets */
+ for (curr_dset = 0; curr_dset < NUM_DSETS; curr_dset++) {
+ switch (curr_dset) {
+ case 0:
+ my_dataset = dset1;
+ break;
+
+ case 1:
+ my_dataset = dset2;
+ break;
+
+ case 2:
+ my_dataset = dset3;
+ break;
+
+ default:
+ HDassert(0 && "Too many datasets!");
+ } /* end switch */
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, TRUE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Create several attributes, but keep storage in compact form */
+ for (u = 0; u < max_compact; u++) {
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Acreate2(my_dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+#if 0
+ /* Verify state of object */
+ ret = H5O__num_attrs_test(my_dataset, &nattrs);
+ CHECK(ret, FAIL, "H5O__num_attrs_test");
+ VERIFY(nattrs, (u + 1), "H5O__num_attrs_test");
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ } /* end for */
+ } /* end for */
+
+ /* Close Datasets */
+ ret = H5Dclose(dset1);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset2);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset3);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Open datasets created */
+ dset1 = H5Dopen2(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(dset1, FAIL, "H5Dopen2");
+ dset2 = H5Dopen2(fid, DSET2_NAME, H5P_DEFAULT);
+ CHECK(dset2, FAIL, "H5Dopen2");
+ dset3 = H5Dopen2(fid, DSET3_NAME, H5P_DEFAULT);
+ CHECK(dset3, FAIL, "H5Dopen2");
+
+ /* Work on all the datasets */
+ for (curr_dset = 0; curr_dset < NUM_DSETS; curr_dset++) {
+ switch (curr_dset) {
+ case 0:
+ my_dataset = dset1;
+ break;
+
+ case 1:
+ my_dataset = dset2;
+ break;
+
+ case 2:
+ my_dataset = dset3;
+ break;
+
+ default:
+ HDassert(0 && "Too many datasets!");
+ } /* end switch */
+#if 0
+ /* Check on dataset's attribute storage status */
+ ret = H5O__num_attrs_test(my_dataset, &nattrs);
+ CHECK(ret, FAIL, "H5O__num_attrs_test");
+ VERIFY(nattrs, max_compact, "H5O__num_attrs_test");
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Loop through attributes, checking their creation order values */
+ /* (the name index is used, but the creation order value is in the same order) */
+ for (u = 0; u < max_compact; u++) {
+ H5A_info_t ainfo; /* Attribute information */
+
+ /* Retrieve information for attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ ret = H5Aget_info_by_name(my_dataset, ".", attrname, &ainfo, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aget_info_by_name");
+
+ /* Verify creation order of attribute */
+ VERIFY(ainfo.corder_valid, TRUE, "H5Aget_info_by_name");
+ VERIFY(ainfo.corder, u, "H5Aget_info_by_name");
+ } /* end for */
+ } /* end for */
+
+ /* Close Datasets */
+ ret = H5Dclose(dset1);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset2);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset3);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_attr_corder_create_compact() */
+
+/****************************************************************
+**
+** test_attr_corder_create_dense(): Test basic H5A (attribute) code.
+** Tests dense attribute storage on objects with attribute creation order info
+**
+****************************************************************/
+static void
+test_attr_corder_create_dense(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t dset1, dset2, dset3; /* Dataset IDs */
+ hid_t my_dataset; /* Current dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ unsigned max_compact; /* Maximum # of links to store in group compactly */
+ unsigned min_dense; /* Minimum # of links to store in group "densely" */
+#if 0
+ htri_t is_empty; /* Are there any attributes? */
+ htri_t is_dense; /* Are attributes stored densely? */
+ hsize_t nattrs; /* Number of attributes on object */
+ hsize_t name_count; /* # of records in name index */
+ hsize_t corder_count; /* # of records in creation order index */
+#endif
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ unsigned curr_dset; /* Current dataset to work on */
+ unsigned u; /* Local index variable */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Dense Storage of Attributes with Creation Order Info\n"));
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create dataset creation property list */
+ if (dcpl_g == H5P_DEFAULT) {
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ }
+ else {
+ dcpl = H5Pcopy(dcpl_g);
+ CHECK(dcpl, FAIL, "H5Pcopy");
+ }
+
+ /* Set attribute creation order tracking & indexing for object */
+ ret = H5Pset_attr_creation_order(dcpl, (H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED));
+ CHECK(ret, FAIL, "H5Pset_attr_creation_order");
+
+ /* Query the attribute creation properties */
+ ret = H5Pget_attr_phase_change(dcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+
+ /* Create dataspace for dataset & attributes */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create datasets */
+ dset1 = H5Dcreate2(fid, DSET1_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset1, FAIL, "H5Dcreate2");
+ dset2 = H5Dcreate2(fid, DSET2_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset2, FAIL, "H5Dcreate2");
+ dset3 = H5Dcreate2(fid, DSET3_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset3, FAIL, "H5Dcreate2");
+
+ /* Work on all the datasets */
+ for (curr_dset = 0; curr_dset < NUM_DSETS; curr_dset++) {
+ switch (curr_dset) {
+ case 0:
+ my_dataset = dset1;
+ break;
+
+ case 1:
+ my_dataset = dset2;
+ break;
+
+ case 2:
+ my_dataset = dset3;
+ break;
+
+ default:
+ HDassert(0 && "Too many datasets!");
+ } /* end switch */
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, TRUE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Create several attributes, but keep storage in compact form */
+ for (u = 0; u < max_compact; u++) {
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Acreate2(my_dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+#if 0
+ /* Verify state of object */
+ ret = H5O__num_attrs_test(my_dataset, &nattrs);
+ CHECK(ret, FAIL, "H5O__num_attrs_test");
+ VERIFY(nattrs, (u + 1), "H5O__num_attrs_test");
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ } /* end for */
+
+ /* Create another attribute, to push into dense storage */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", max_compact);
+ attr = H5Acreate2(my_dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+#if 0
+ /* Verify state of object */
+ ret = H5O__num_attrs_test(my_dataset, &nattrs);
+ CHECK(ret, FAIL, "H5O__num_attrs_test");
+ VERIFY(nattrs, (max_compact + 1), "H5O__num_attrs_test");
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+
+ /* Retrieve & verify # of records in the name & creation order indices */
+ ret = H5O__attr_dense_info_test(my_dataset, &name_count, &corder_count);
+ CHECK(ret, FAIL, "H5O__attr_dense_info_test");
+ VERIFY(name_count, corder_count, "H5O__attr_dense_info_test");
+#endif
+ } /* end for */
+
+ /* Close Datasets */
+ ret = H5Dclose(dset1);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset2);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset3);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Open datasets created */
+ dset1 = H5Dopen2(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(dset1, FAIL, "H5Dopen2");
+ dset2 = H5Dopen2(fid, DSET2_NAME, H5P_DEFAULT);
+ CHECK(dset2, FAIL, "H5Dopen2");
+ dset3 = H5Dopen2(fid, DSET3_NAME, H5P_DEFAULT);
+ CHECK(dset3, FAIL, "H5Dopen2");
+
+ /* Work on all the datasets */
+ for (curr_dset = 0; curr_dset < NUM_DSETS; curr_dset++) {
+ switch (curr_dset) {
+ case 0:
+ my_dataset = dset1;
+ break;
+
+ case 1:
+ my_dataset = dset2;
+ break;
+
+ case 2:
+ my_dataset = dset3;
+ break;
+
+ default:
+ HDassert(0 && "Too many datasets!");
+ } /* end switch */
+#if 0
+ /* Check on dataset's attribute storage status */
+ ret = H5O__num_attrs_test(my_dataset, &nattrs);
+ CHECK(ret, FAIL, "H5O__num_attrs_test");
+ VERIFY(nattrs, (max_compact + 1), "H5O__num_attrs_test");
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+#endif
+ /* Loop through attributes, checking their creation order values */
+ /* (the name index is used, but the creation order value is in the same order) */
+ for (u = 0; u < (max_compact + 1); u++) {
+ H5A_info_t ainfo; /* Attribute information */
+
+ /* Retrieve information for attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ ret = H5Aget_info_by_name(my_dataset, ".", attrname, &ainfo, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aget_info_by_name");
+
+ /* Verify creation order of attribute */
+ VERIFY(ainfo.corder_valid, TRUE, "H5Aget_info_by_name");
+ VERIFY(ainfo.corder, u, "H5Aget_info_by_name");
+ } /* end for */
+ } /* end for */
+
+ /* Close Datasets */
+ ret = H5Dclose(dset1);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset2);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset3);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_attr_corder_create_dense() */
+
+/****************************************************************
+**
+** test_attr_corder_create_reopen(): Test basic H5A (attribute) code.
+** Test creating attributes w/reopening file from using new format
+** to using old format
+**
+****************************************************************/
+static void
+test_attr_corder_create_reopen(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid = -1; /* File ID */
+ hid_t gcpl_id = -1; /* Group creation property list ID */
+ hid_t gid = -1; /* Group ID */
+ hid_t sid = -1; /* Dataspace ID */
+ hid_t aid = -1; /* Attribute ID */
+ int buf; /* Attribute data */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Creating Attributes w/New & Old Format\n"));
+
+ /* Create dataspace for attributes */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create group */
+ gcpl_id = H5Pcreate(H5P_GROUP_CREATE);
+ CHECK(gcpl_id, FAIL, "H5Pcreate");
+ ret = H5Pset_attr_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED);
+ CHECK(ret, FAIL, "H5Pset_attr_creation_order");
+ gid = H5Gcreate2(fid, GROUP1_NAME, H5P_DEFAULT, gcpl_id, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gcreate2");
+
+ /* Create a couple of attributes */
+ aid = H5Acreate2(gid, "attr-003", H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+ buf = 3;
+ ret = H5Awrite(aid, H5T_NATIVE_INT, &buf);
+ CHECK(ret, FAIL, "H5Awrite");
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ aid = H5Acreate2(gid, "attr-004", H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+ buf = 4;
+ ret = H5Awrite(aid, H5T_NATIVE_INT, &buf);
+ CHECK(ret, FAIL, "H5Awrite");
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /***** Close group & GCPL *****/
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Pclose(gcpl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open file, without "use the latest format" flag */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Re-open group */
+ gid = H5Gopen2(fid, GROUP1_NAME, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gopen2");
+
+ /* Delete attribute */
+ ret = H5Adelete(gid, "attr-003");
+ CHECK(aid, FAIL, "H5Adelete");
+
+ /* Create some additional attributes */
+ aid = H5Acreate2(gid, "attr-008", H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+ buf = 8;
+ ret = H5Awrite(aid, H5T_NATIVE_INT, &buf);
+ CHECK(ret, FAIL, "H5Awrite");
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ aid = H5Acreate2(gid, "attr-006", H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+ buf = 6;
+ ret = H5Awrite(aid, H5T_NATIVE_INT, &buf);
+ CHECK(ret, FAIL, "H5Awrite");
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /***** Close group *****/
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Close attribute dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_attr_corder_create_reopen() */
+
+/****************************************************************
+**
+** test_attr_corder_transition(): Test basic H5A (attribute) code.
+** Tests attribute storage transitions on objects with attribute creation order info
+**
+****************************************************************/
+static void
+test_attr_corder_transition(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t dset1, dset2, dset3; /* Dataset IDs */
+ hid_t my_dataset; /* Current dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ unsigned max_compact; /* Maximum # of links to store in group compactly */
+ unsigned min_dense; /* Minimum # of links to store in group "densely" */
+#if 0
+ htri_t is_empty; /* Are there any attributes? */
+ htri_t is_dense; /* Are attributes stored densely? */
+ hsize_t nattrs; /* Number of attributes on object */
+ hsize_t name_count; /* # of records in name index */
+ hsize_t corder_count; /* # of records in creation order index */
+#endif
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ unsigned curr_dset; /* Current dataset to work on */
+ unsigned u; /* Local index variable */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Storage Transitions of Attributes with Creation Order Info\n"));
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create dataset creation property list */
+ if (dcpl_g == H5P_DEFAULT) {
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ }
+ else {
+ dcpl = H5Pcopy(dcpl_g);
+ CHECK(dcpl, FAIL, "H5Pcopy");
+ }
+
+ /* Set attribute creation order tracking & indexing for object */
+ ret = H5Pset_attr_creation_order(dcpl, (H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED));
+ CHECK(ret, FAIL, "H5Pset_attr_creation_order");
+
+ /* Query the attribute creation properties */
+ ret = H5Pget_attr_phase_change(dcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+
+ /* Create dataspace for dataset & attributes */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* XXX: Try to find a way to resize dataset's object header so that the object
+ * header can have one chunk, then retrieve "empty" file size and check
+ * that size after everything is deleted -QAK
+ */
+ /* Create datasets */
+ dset1 = H5Dcreate2(fid, DSET1_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset1, FAIL, "H5Dcreate2");
+ dset2 = H5Dcreate2(fid, DSET2_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset2, FAIL, "H5Dcreate2");
+ dset3 = H5Dcreate2(fid, DSET3_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset3, FAIL, "H5Dcreate2");
+
+ /* Work on all the datasets */
+ for (curr_dset = 0; curr_dset < NUM_DSETS; curr_dset++) {
+ switch (curr_dset) {
+ case 0:
+ my_dataset = dset1;
+ break;
+
+ case 1:
+ my_dataset = dset2;
+ break;
+
+ case 2:
+ my_dataset = dset3;
+ break;
+
+ default:
+ HDassert(0 && "Too many datasets!");
+ } /* end switch */
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, TRUE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ } /* end for */
+
+ /* Close Datasets */
+ ret = H5Dclose(dset1);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset2);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset3);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Open datasets created */
+ dset1 = H5Dopen2(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(dset1, FAIL, "H5Dopen2");
+ dset2 = H5Dopen2(fid, DSET2_NAME, H5P_DEFAULT);
+ CHECK(dset2, FAIL, "H5Dopen2");
+ dset3 = H5Dopen2(fid, DSET3_NAME, H5P_DEFAULT);
+ CHECK(dset3, FAIL, "H5Dopen2");
+
+ /* Work on all the datasets */
+ for (curr_dset = 0; curr_dset < NUM_DSETS; curr_dset++) {
+ switch (curr_dset) {
+ case 0:
+ my_dataset = dset1;
+ break;
+
+ case 1:
+ my_dataset = dset2;
+ break;
+
+ case 2:
+ my_dataset = dset3;
+ break;
+
+ default:
+ HDassert(0 && "Too many datasets!");
+ } /* end switch */
+
+ /* Create several attributes, but keep storage in compact form */
+ for (u = 0; u < max_compact; u++) {
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Acreate2(my_dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+#if 0
+ /* Verify state of object */
+ ret = H5O__num_attrs_test(my_dataset, &nattrs);
+ CHECK(ret, FAIL, "H5O__num_attrs_test");
+ VERIFY(nattrs, (u + 1), "H5O__num_attrs_test");
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ } /* end for */
+
+ /* Create another attribute, to push into dense storage */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", max_compact);
+ attr = H5Acreate2(my_dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+#if 0
+ /* Verify state of object */
+ ret = H5O__num_attrs_test(my_dataset, &nattrs);
+ CHECK(ret, FAIL, "H5O__num_attrs_test");
+ VERIFY(nattrs, (max_compact + 1), "H5O__num_attrs_test");
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+
+ /* Retrieve & verify # of records in the name & creation order indices */
+ ret = H5O__attr_dense_info_test(my_dataset, &name_count, &corder_count);
+ CHECK(ret, FAIL, "H5O__attr_dense_info_test");
+ VERIFY(name_count, corder_count, "H5O__attr_dense_info_test");
+#endif
+ /* Delete several attributes from object, until attribute storage resumes compact form */
+ for (u = max_compact; u >= min_dense; u--) {
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ ret = H5Adelete(my_dataset, attrname);
+ CHECK(ret, FAIL, "H5Adelete");
+#if 0
+ /* Verify state of object */
+ ret = H5O__num_attrs_test(my_dataset, &nattrs);
+ CHECK(ret, FAIL, "H5O__num_attrs_test");
+ VERIFY(nattrs, u, "H5O__num_attrs_test");
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+
+ /* Retrieve & verify # of records in the name & creation order indices */
+ ret = H5O__attr_dense_info_test(my_dataset, &name_count, &corder_count);
+ CHECK(ret, FAIL, "H5O__attr_dense_info_test");
+ VERIFY(name_count, corder_count, "H5O__attr_dense_info_test");
+#endif
+ } /* end for */
+
+ /* Delete another attribute, to push attribute storage into compact form */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", (min_dense - 1));
+ ret = H5Adelete(my_dataset, attrname);
+ CHECK(ret, FAIL, "H5Adelete");
+#if 0
+ /* Verify state of object */
+ ret = H5O__num_attrs_test(my_dataset, &nattrs);
+ CHECK(ret, FAIL, "H5O__num_attrs_test");
+ VERIFY(nattrs, (min_dense - 1), "H5O__num_attrs_test");
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Re-add attributes to get back into dense form */
+ for (u = (min_dense - 1); u < (max_compact + 1); u++) {
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Acreate2(my_dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+ } /* end for */
+#if 0
+ /* Verify state of object */
+ ret = H5O__num_attrs_test(my_dataset, &nattrs);
+ CHECK(ret, FAIL, "H5O__num_attrs_test");
+ VERIFY(nattrs, (max_compact + 1), "H5O__num_attrs_test");
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+
+ /* Retrieve & verify # of records in the name & creation order indices */
+ ret = H5O__attr_dense_info_test(my_dataset, &name_count, &corder_count);
+ CHECK(ret, FAIL, "H5O__attr_dense_info_test");
+ VERIFY(name_count, corder_count, "H5O__attr_dense_info_test");
+#endif
+ } /* end for */
+
+ /* Close Datasets */
+ ret = H5Dclose(dset1);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset2);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset3);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Open datasets created */
+ dset1 = H5Dopen2(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(dset1, FAIL, "H5Dopen2");
+ dset2 = H5Dopen2(fid, DSET2_NAME, H5P_DEFAULT);
+ CHECK(dset2, FAIL, "H5Dopen2");
+ dset3 = H5Dopen2(fid, DSET3_NAME, H5P_DEFAULT);
+ CHECK(dset3, FAIL, "H5Dopen2");
+
+ /* Work on all the datasets */
+ for (curr_dset = 0; curr_dset < NUM_DSETS; curr_dset++) {
+ switch (curr_dset) {
+ case 0:
+ my_dataset = dset1;
+ break;
+
+ case 1:
+ my_dataset = dset2;
+ break;
+
+ case 2:
+ my_dataset = dset3;
+ break;
+
+ default:
+ HDassert(0 && "Too many datasets!");
+ } /* end switch */
+#if 0
+ /* Check on dataset's attribute storage status */
+ ret = H5O__num_attrs_test(my_dataset, &nattrs);
+ CHECK(ret, FAIL, "H5O__num_attrs_test");
+ VERIFY(nattrs, (max_compact + 1), "H5O__num_attrs_test");
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+
+ /* Retrieve & verify # of records in the name & creation order indices */
+ ret = H5O__attr_dense_info_test(my_dataset, &name_count, &corder_count);
+ CHECK(ret, FAIL, "H5O__attr_dense_info_test");
+ VERIFY(name_count, corder_count, "H5O__attr_dense_info_test");
+#endif
+ /* Delete several attributes from object, until attribute storage resumes compact form */
+ for (u = max_compact; u >= min_dense; u--) {
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ ret = H5Adelete(my_dataset, attrname);
+ CHECK(ret, FAIL, "H5Adelete");
+#if 0
+ /* Verify state of object */
+ ret = H5O__num_attrs_test(my_dataset, &nattrs);
+ CHECK(ret, FAIL, "H5O__num_attrs_test");
+ VERIFY(nattrs, u, "H5O__num_attrs_test");
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+
+ /* Retrieve & verify # of records in the name & creation order indices */
+ ret = H5O__attr_dense_info_test(my_dataset, &name_count, &corder_count);
+ CHECK(ret, FAIL, "H5O__attr_dense_info_test");
+ VERIFY(name_count, corder_count, "H5O__attr_dense_info_test");
+#endif
+ } /* end for */
+
+ /* Delete another attribute, to push attribute storage into compact form */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", (min_dense - 1));
+ ret = H5Adelete(my_dataset, attrname);
+ CHECK(ret, FAIL, "H5Adelete");
+#if 0
+ /* Verify state of object */
+ ret = H5O__num_attrs_test(my_dataset, &nattrs);
+ CHECK(ret, FAIL, "H5O__num_attrs_test");
+ VERIFY(nattrs, (min_dense - 1), "H5O__num_attrs_test");
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Re-add attributes to get back into dense form */
+ for (u = (min_dense - 1); u < (max_compact + 1); u++) {
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Acreate2(my_dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+ } /* end for */
+#if 0
+ /* Verify state of object */
+ ret = H5O__num_attrs_test(my_dataset, &nattrs);
+ CHECK(ret, FAIL, "H5O__num_attrs_test");
+ VERIFY(nattrs, (max_compact + 1), "H5O__num_attrs_test");
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+
+ /* Retrieve & verify # of records in the name & creation order indices */
+ ret = H5O__attr_dense_info_test(my_dataset, &name_count, &corder_count);
+ CHECK(ret, FAIL, "H5O__attr_dense_info_test");
+ VERIFY(name_count, corder_count, "H5O__attr_dense_info_test");
+#endif
+ /* Delete all attributes */
+ for (u = max_compact; u > 0; u--) {
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ ret = H5Adelete(my_dataset, attrname);
+ CHECK(ret, FAIL, "H5Adelete");
+ } /* end for */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", 0);
+ ret = H5Adelete(my_dataset, attrname);
+ CHECK(ret, FAIL, "H5Adelete");
+ } /* end for */
+
+ /* Close Datasets */
+ ret = H5Dclose(dset1);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset2);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset3);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_attr_corder_transition() */
+
+/****************************************************************
+**
+** test_attr_corder_delete(): Test basic H5A (attribute) code.
+** Tests deleting object w/dense attribute storage on objects with attribute creation order info
+**
+****************************************************************/
+static void
+test_attr_corder_delete(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t dset1, dset2, dset3; /* Dataset IDs */
+ hid_t my_dataset; /* Current dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ unsigned max_compact; /* Maximum # of links to store in group compactly */
+ unsigned min_dense; /* Minimum # of links to store in group "densely" */
+#if 0
+ htri_t is_empty; /* Are there any attributes? */
+ htri_t is_dense; /* Are attributes stored densely? */
+ hsize_t nattrs; /* Number of attributes on object */
+ hsize_t name_count; /* # of records in name index */
+ hsize_t corder_count; /* # of records in creation order index */
+#endif
+ unsigned reopen_file; /* Whether to re-open the file before deleting group */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+#ifdef LATER
+ h5_stat_size_t empty_size; /* Size of empty file */
+ h5_stat_size_t file_size; /* Size of file after operating on it */
+#endif /* LATER */
+ unsigned curr_dset; /* Current dataset to work on */
+ unsigned u; /* Local index variable */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Deleting Object w/Dense Attribute Storage and Creation Order Info\n"));
+
+ /* Create dataspace for dataset & attributes */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create dataset creation property list */
+ if (dcpl_g == H5P_DEFAULT) {
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ }
+ else {
+ dcpl = H5Pcopy(dcpl_g);
+ CHECK(dcpl, FAIL, "H5Pcopy");
+ }
+
+ /* Set attribute creation order tracking & indexing for object */
+ ret = H5Pset_attr_creation_order(dcpl, (H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED));
+ CHECK(ret, FAIL, "H5Pset_attr_creation_order");
+
+ /* Query the attribute creation properties */
+ ret = H5Pget_attr_phase_change(dcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+
+/* XXX: Try to find a way to resize dataset's object header so that the object
+ * header can have one chunk, then retrieve "empty" file size and check
+ * that size after everything is deleted -QAK
+ */
+#ifdef LATER
+ /* Create empty file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Get the size of an empty file */
+ empty_size = h5_get_file_size(FILENAME);
+ CHECK(empty_size, FAIL, "h5_get_file_size");
+#endif /* LATER */
+
+ /* Loop to leave file open when deleting dataset, or to close & re-open file
+ * before deleting dataset */
+ for (reopen_file = FALSE; reopen_file <= TRUE; reopen_file++) {
+ /* Create test file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Create datasets */
+ dset1 = H5Dcreate2(fid, DSET1_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset1, FAIL, "H5Dcreate2");
+ dset2 = H5Dcreate2(fid, DSET2_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset2, FAIL, "H5Dcreate2");
+ dset3 = H5Dcreate2(fid, DSET3_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset3, FAIL, "H5Dcreate2");
+
+ /* Work on all the datasets */
+ for (curr_dset = 0; curr_dset < NUM_DSETS; curr_dset++) {
+ switch (curr_dset) {
+ case 0:
+ my_dataset = dset1;
+ break;
+
+ case 1:
+ my_dataset = dset2;
+ break;
+
+ case 2:
+ my_dataset = dset3;
+ break;
+
+ default:
+ HDassert(0 && "Too many datasets!");
+ } /* end switch */
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, TRUE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Create attributes, until attribute storage is in dense form */
+ for (u = 0; u < max_compact * 2; u++) {
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Acreate2(my_dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+ } /* end for */
+#if 0
+ /* Verify state of object */
+ ret = H5O__num_attrs_test(my_dataset, &nattrs);
+ CHECK(ret, FAIL, "H5O__num_attrs_test");
+ VERIFY(nattrs, (max_compact * 2), "H5O__num_attrs_test");
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+
+ /* Retrieve & verify # of records in the name & creation order indices */
+ ret = H5O__attr_dense_info_test(my_dataset, &name_count, &corder_count);
+ CHECK(ret, FAIL, "H5O__attr_dense_info_test");
+ VERIFY(name_count, corder_count, "H5O__attr_dense_info_test");
+#endif
+ } /* end for */
+
+ /* Close Datasets */
+ ret = H5Dclose(dset1);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset2);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset3);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Check for deleting datasets without re-opening file */
+ if (!reopen_file) {
+ ret = H5Ldelete(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+ ret = H5Ldelete(fid, DSET2_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+ ret = H5Ldelete(fid, DSET3_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+ } /* end if */
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Check for deleting dataset after re-opening file */
+ if (reopen_file) {
+ /* Re-open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Delete the datasets */
+ ret = H5Ldelete(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+ ret = H5Ldelete(fid, DSET2_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+ ret = H5Ldelete(fid, DSET3_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ } /* end if */
+
+#ifdef LATER
+ /* Get the size of the file now */
+ file_size = h5_get_file_size(FILENAME);
+ CHECK(file_size, FAIL, "h5_get_file_size");
+ VERIFY(file_size, empty_size, "h5_get_file_size");
+#endif /* LATER */
+ } /* end for */
+
+ /* Close property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_attr_corder_delete() */
+
+/*-------------------------------------------------------------------------
+ * Function: attr_info_by_idx_check
+ *
+ * Purpose: Support routine for attr_info_by_idx, to verify the attribute
+ * info is correct for a attribute
+ *
+ * Note: This routine assumes that the attributes have been added to the
+ * object in alphabetical order.
+ *
+ * Return: Success: 0
+ * Failure: -1
+ *
+ * Programmer: Quincey Koziol
+ * Tuesday, Februrary 13, 2007
+ *
+ *-------------------------------------------------------------------------
+ */
+static int
+attr_info_by_idx_check(hid_t obj_id, const char *attrname, hsize_t n, hbool_t use_index)
+{
+ char tmpname[NAME_BUF_SIZE]; /* Temporary attribute name */
+ H5A_info_t ainfo; /* Attribute info struct */
+ int old_nerrs; /* Number of errors when entering this check */
+ herr_t ret; /* Generic return value */
+
+ /* Retrieve the current # of reported errors */
+ old_nerrs = nerrors;
+
+ /* Verify the information for first attribute, in increasing creation order */
+ HDmemset(&ainfo, 0, sizeof(ainfo));
+ ret = H5Aget_info_by_idx(obj_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, (hsize_t)0, &ainfo, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aget_info_by_idx");
+ VERIFY(ainfo.corder, 0, "H5Aget_info_by_idx");
+
+ /* Verify the information for new attribute, in increasing creation order */
+ HDmemset(&ainfo, 0, sizeof(ainfo));
+ ret = H5Aget_info_by_idx(obj_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, n, &ainfo, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aget_info_by_idx");
+ VERIFY(ainfo.corder, n, "H5Aget_info_by_idx");
+
+ /* Verify the name for new link, in increasing creation order */
+ HDmemset(tmpname, 0, (size_t)NAME_BUF_SIZE);
+ ret = (herr_t)H5Aget_name_by_idx(obj_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, n, tmpname,
+ (size_t)NAME_BUF_SIZE, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aget_name_by_idx");
+ if (HDstrcmp(attrname, tmpname) != 0)
+ TestErrPrintf("Line %d: attribute name size wrong!\n", __LINE__);
+
+ /* Don't test "native" order if there is no creation order index, since
+ * there's not a good way to easily predict the attribute's order in the name
+ * index.
+ */
+ if (use_index) {
+ /* Verify the information for first attribute, in native creation order */
+ HDmemset(&ainfo, 0, sizeof(ainfo));
+ ret = H5Aget_info_by_idx(obj_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC /* H5_ITER_NATIVE */,
+ (hsize_t)0, &ainfo, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aget_info_by_idx");
+ VERIFY(ainfo.corder, 0, "H5Aget_info_by_idx");
+
+ /* Verify the information for new attribute, in native creation order */
+ HDmemset(&ainfo, 0, sizeof(ainfo));
+ ret = H5Aget_info_by_idx(obj_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC /* H5_ITER_NATIVE */, n, &ainfo,
+ H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aget_info_by_idx");
+ VERIFY(ainfo.corder, n, "H5Aget_info_by_idx");
+
+ /* Verify the name for new link, in increasing native order */
+ HDmemset(tmpname, 0, (size_t)NAME_BUF_SIZE);
+ ret = (herr_t)H5Aget_name_by_idx(obj_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC /* H5_ITER_NATIVE */, n,
+ tmpname, (size_t)NAME_BUF_SIZE, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aget_name_by_idx");
+ if (HDstrcmp(attrname, tmpname) != 0)
+ TestErrPrintf("Line %d: attribute name size wrong!\n", __LINE__);
+ } /* end if */
+
+ /* Verify the information for first attribute, in decreasing creation order */
+ HDmemset(&ainfo, 0, sizeof(ainfo));
+ ret = H5Aget_info_by_idx(obj_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, n, &ainfo, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aget_info_by_idx");
+ VERIFY(ainfo.corder, 0, "H5Aget_info_by_idx");
+
+ /* Verify the information for new attribute, in increasing creation order */
+ HDmemset(&ainfo, 0, sizeof(ainfo));
+ ret = H5Aget_info_by_idx(obj_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, (hsize_t)0, &ainfo, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aget_info_by_idx");
+ VERIFY(ainfo.corder, n, "H5Aget_info_by_idx");
+
+ /* Verify the name for new link, in increasing creation order */
+ HDmemset(tmpname, 0, (size_t)NAME_BUF_SIZE);
+ ret = (herr_t)H5Aget_name_by_idx(obj_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, (hsize_t)0, tmpname,
+ (size_t)NAME_BUF_SIZE, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aget_name_by_idx");
+ if (HDstrcmp(attrname, tmpname) != 0)
+ TestErrPrintf("Line %d: attribute name size wrong!\n", __LINE__);
+
+ /* Verify the information for first attribute, in increasing name order */
+ HDmemset(&ainfo, 0, sizeof(ainfo));
+ ret = H5Aget_info_by_idx(obj_id, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)0, &ainfo, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aget_info_by_idx");
+ VERIFY(ainfo.corder, 0, "H5Aget_info_by_idx");
+
+ /* Verify the information for new attribute, in increasing name order */
+ HDmemset(&ainfo, 0, sizeof(ainfo));
+ ret = H5Aget_info_by_idx(obj_id, ".", H5_INDEX_NAME, H5_ITER_INC, n, &ainfo, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aget_info_by_idx");
+ VERIFY(ainfo.corder, n, "H5Aget_info_by_idx");
+
+ /* Verify the name for new link, in increasing name order */
+ HDmemset(tmpname, 0, (size_t)NAME_BUF_SIZE);
+ ret = (herr_t)H5Aget_name_by_idx(obj_id, ".", H5_INDEX_NAME, H5_ITER_INC, n, tmpname,
+ (size_t)NAME_BUF_SIZE, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aget_name_by_idx");
+ if (HDstrcmp(attrname, tmpname) != 0)
+ TestErrPrintf("Line %d: attribute name size wrong!\n", __LINE__);
+
+ /* Don't test "native" order queries on link name order, since there's not
+ * a good way to easily predict the order of the links in the name index.
+ */
+
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ /* Verify the information for first attribute, in decreasing name order */
+ HDmemset(&ainfo, 0, sizeof(ainfo));
+ ret = H5Aget_info_by_idx(obj_id, ".", H5_INDEX_NAME, H5_ITER_DEC, n, &ainfo, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aget_info_by_idx");
+ VERIFY(ainfo.corder, 0, "H5Aget_info_by_idx");
+
+ /* Verify the information for new attribute, in increasing name order */
+ HDmemset(&ainfo, 0, sizeof(ainfo));
+ ret = H5Aget_info_by_idx(obj_id, ".", H5_INDEX_NAME, H5_ITER_DEC, (hsize_t)0, &ainfo, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aget_info_by_idx");
+ VERIFY(ainfo.corder, n, "H5Aget_info_by_idx");
+
+ /* Verify the name for new link, in increasing name order */
+ HDmemset(tmpname, 0, (size_t)NAME_BUF_SIZE);
+ ret = (herr_t)H5Aget_name_by_idx(obj_id, ".", H5_INDEX_NAME, H5_ITER_DEC, (hsize_t)0, tmpname,
+ (size_t)NAME_BUF_SIZE, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aget_name_by_idx");
+ if (HDstrcmp(attrname, tmpname) != 0)
+ TestErrPrintf("Line %d: attribute name size wrong!\n", __LINE__);
+#endif
+ /* Retrieve current # of errors */
+ if (old_nerrs == nerrors)
+ return (0);
+ else
+ return (-1);
+} /* end attr_info_by_idx_check() */
+
+/****************************************************************
+**
+** test_attr_info_by_idx(): Test basic H5A (attribute) code.
+** Tests querying attribute info by index
+**
+****************************************************************/
+static void
+test_attr_info_by_idx(hbool_t new_format, hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t dset1, dset2, dset3; /* Dataset IDs */
+ hid_t my_dataset; /* Current dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ H5A_info_t ainfo; /* Attribute information */
+ unsigned max_compact; /* Maximum # of links to store in group compactly */
+ unsigned min_dense; /* Minimum # of links to store in group "densely" */
+#if 0
+ htri_t is_empty; /* Are there any attributes? */
+ htri_t is_dense; /* Are attributes stored densely? */
+ hsize_t nattrs; /* Number of attributes on object */
+ hsize_t name_count; /* # of records in name index */
+ hsize_t corder_count; /* # of records in creation order index */
+#endif
+ unsigned use_index; /* Use index on creation order values */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ char tmpname[NAME_BUF_SIZE]; /* Temporary attribute name */
+ unsigned curr_dset; /* Current dataset to work on */
+ unsigned u; /* Local index variable */
+ herr_t ret; /* Generic return value */
+
+ /* Create dataspace for dataset & attributes */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create dataset creation property list */
+ if (dcpl_g == H5P_DEFAULT) {
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ }
+ else {
+ dcpl = H5Pcopy(dcpl_g);
+ CHECK(dcpl, FAIL, "H5Pcopy");
+ }
+
+ /* Query the attribute creation properties */
+ ret = H5Pget_attr_phase_change(dcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+
+ /* Loop over using index for creation order value */
+ for (use_index = FALSE; use_index <= TRUE; use_index++) {
+ /* Output message about test being performed */
+ if (use_index)
+ MESSAGE(5, ("Testing Querying Attribute Info By Index w/Creation Order Index\n"))
+ else
+ MESSAGE(5, ("Testing Querying Attribute Info By Index w/o Creation Order Index\n"))
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Set attribute creation order tracking & indexing for object */
+ if (new_format == TRUE) {
+ ret = H5Pset_attr_creation_order(
+ dcpl, (H5P_CRT_ORDER_TRACKED | (use_index ? H5P_CRT_ORDER_INDEXED : (unsigned)0)));
+ CHECK(ret, FAIL, "H5Pset_attr_creation_order");
+ } /* end if */
+
+ /* Create datasets */
+ dset1 = H5Dcreate2(fid, DSET1_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset1, FAIL, "H5Dcreate2");
+ dset2 = H5Dcreate2(fid, DSET2_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset2, FAIL, "H5Dcreate2");
+ dset3 = H5Dcreate2(fid, DSET3_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset3, FAIL, "H5Dcreate2");
+
+ /* Work on all the datasets */
+ for (curr_dset = 0; curr_dset < NUM_DSETS; curr_dset++) {
+ switch (curr_dset) {
+ case 0:
+ my_dataset = dset1;
+ break;
+
+ case 1:
+ my_dataset = dset2;
+ break;
+
+ case 2:
+ my_dataset = dset3;
+ break;
+
+ default:
+ HDassert(0 && "Too many datasets!");
+ } /* end switch */
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, TRUE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Check for query on non-existent attribute */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Aget_info_by_idx(my_dataset, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, (hsize_t)0, &ainfo,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Aget_info_by_idx");
+ H5E_BEGIN_TRY
+ {
+ ret = (herr_t)H5Aget_name_by_idx(my_dataset, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, (hsize_t)0,
+ tmpname, (size_t)NAME_BUF_SIZE, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Aget_name_by_idx");
+
+ /* Create attributes, up to limit of compact form */
+ for (u = 0; u < max_compact; u++) {
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Acreate2(my_dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Verify information for new attribute */
+ ret = attr_info_by_idx_check(my_dataset, attrname, (hsize_t)u, use_index);
+ CHECK(ret, FAIL, "attr_info_by_idx_check");
+ } /* end for */
+#if 0
+ /* Verify state of object */
+ ret = H5O__num_attrs_test(my_dataset, &nattrs);
+ CHECK(ret, FAIL, "H5O__num_attrs_test");
+ VERIFY(nattrs, max_compact, "H5O__num_attrs_test");
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Check for out of bound offset queries */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Aget_info_by_idx(my_dataset, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, (hsize_t)u, &ainfo,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Aget_info_by_idx");
+ H5E_BEGIN_TRY
+ {
+ ret = H5Aget_info_by_idx(my_dataset, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, (hsize_t)u, &ainfo,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Aget_info_by_idx");
+ H5E_BEGIN_TRY
+ {
+ ret = (herr_t)H5Aget_name_by_idx(my_dataset, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, (hsize_t)u,
+ tmpname, (size_t)NAME_BUF_SIZE, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Aget_name_by_idx");
+
+ /* Create more attributes, to push into dense form */
+ for (; u < (max_compact * 2); u++) {
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Acreate2(my_dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+#if 0
+ /* Verify state of object */
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, (new_format ? TRUE : FALSE), "H5O__is_attr_dense_test");
+#endif
+ /* Verify information for new attribute */
+ ret = attr_info_by_idx_check(my_dataset, attrname, (hsize_t)u, use_index);
+ CHECK(ret, FAIL, "attr_info_by_idx_check");
+ } /* end for */
+#if 0
+ /* Verify state of object */
+ ret = H5O__num_attrs_test(my_dataset, &nattrs);
+ CHECK(ret, FAIL, "H5O__num_attrs_test");
+ VERIFY(nattrs, (max_compact * 2), "H5O__num_attrs_test");
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, (new_format ? TRUE : FALSE), "H5O__is_attr_dense_test");
+
+ if (new_format) {
+ /* Retrieve & verify # of records in the name & creation order indices */
+ ret = H5O__attr_dense_info_test(my_dataset, &name_count, &corder_count);
+ CHECK(ret, FAIL, "H5O__attr_dense_info_test");
+ if (use_index)
+ VERIFY(name_count, corder_count, "H5O__attr_dense_info_test");
+ VERIFY(name_count, (max_compact * 2), "H5O__attr_dense_info_test");
+ } /* end if */
+#endif
+ /* Check for out of bound offset queries */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Aget_info_by_idx(my_dataset, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, (hsize_t)u, &ainfo,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Aget_info_by_idx");
+ H5E_BEGIN_TRY
+ {
+ ret = H5Aget_info_by_idx(my_dataset, ".", H5_INDEX_CRT_ORDER, H5_ITER_DEC, (hsize_t)u, &ainfo,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Aget_info_by_idx");
+ H5E_BEGIN_TRY
+ {
+ ret = (herr_t)H5Aget_name_by_idx(my_dataset, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, (hsize_t)u,
+ tmpname, (size_t)NAME_BUF_SIZE, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Aget_name_by_idx");
+ } /* end for */
+
+ /* Close Datasets */
+ ret = H5Dclose(dset1);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset2);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset3);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ } /* end for */
+
+ /* Close property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_attr_info_by_idx() */
+
+/***************************************************************
+**
+** test_attr_info_null_info_pointer(): A test to ensure that
+** passing a NULL attribute info pointer to H5Aget_info
+** (_by_name/_by_idx) doesn't cause bad behavior.
+**
+****************************************************************/
+static void
+test_attr_info_null_info_pointer(hid_t fcpl, hid_t fapl)
+{
+ herr_t err_ret = -1;
+ hid_t fid;
+ hid_t attr;
+ hid_t sid;
+
+ /* Create dataspace for attribute */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create attribute */
+ attr = H5Acreate2(fid, GET_INFO_NULL_POINTER_ATTR_NAME, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aget_info(attr, NULL);
+ }
+ H5E_END_TRY;
+
+ CHECK(err_ret, SUCCEED, "H5Aget_info");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aget_info_by_name(fid, ".", GET_INFO_NULL_POINTER_ATTR_NAME, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ CHECK(err_ret, SUCCEED, "H5Aget_info_by_name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aget_info_by_idx(fid, ".", H5_INDEX_NAME, H5_ITER_INC, 0, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ CHECK(err_ret, SUCCEED, "H5Aget_info_by_idx");
+
+ /* Close dataspace */
+ err_ret = H5Sclose(sid);
+ CHECK(err_ret, FAIL, "H5Sclose");
+
+ /* Close attribute */
+ err_ret = H5Aclose(attr);
+ CHECK(err_ret, FAIL, "H5Aclose");
+
+ /* Close file */
+ err_ret = H5Fclose(fid);
+ CHECK(err_ret, FAIL, "H5Fclose");
+}
+
+/***************************************************************
+**
+** test_attr_rename_invalid_name(): A test to ensure that
+** passing a NULL or empty attribute name to
+** H5Arename(_by_name) doesn't cause bad behavior.
+**
+****************************************************************/
+static void
+test_attr_rename_invalid_name(hid_t fcpl, hid_t fapl)
+{
+ herr_t err_ret = -1;
+ hid_t fid;
+ hid_t attr;
+ hid_t sid;
+
+ /* Create dataspace for attribute */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create attribute */
+ attr = H5Acreate2(fid, INVALID_RENAME_TEST_ATTR_NAME, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Arename(fid, NULL, INVALID_RENAME_TEST_NEW_ATTR_NAME);
+ }
+ H5E_END_TRY;
+
+ CHECK(err_ret, SUCCEED, "H5Arename");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Arename(fid, "", INVALID_RENAME_TEST_NEW_ATTR_NAME);
+ }
+ H5E_END_TRY;
+
+ CHECK(err_ret, SUCCEED, "H5Arename");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Arename(fid, INVALID_RENAME_TEST_ATTR_NAME, NULL);
+ }
+ H5E_END_TRY;
+
+ CHECK(err_ret, SUCCEED, "H5Arename");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Arename(fid, INVALID_RENAME_TEST_ATTR_NAME, "");
+ }
+ H5E_END_TRY;
+
+ CHECK(err_ret, SUCCEED, "H5Arename");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Arename_by_name(fid, ".", NULL, INVALID_RENAME_TEST_NEW_ATTR_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ CHECK(err_ret, SUCCEED, "H5Arename_by_name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Arename_by_name(fid, ".", "", INVALID_RENAME_TEST_NEW_ATTR_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ CHECK(err_ret, SUCCEED, "H5Arename_by_name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Arename_by_name(fid, ".", INVALID_RENAME_TEST_ATTR_NAME, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ CHECK(err_ret, SUCCEED, "H5Arename_by_name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Arename_by_name(fid, ".", INVALID_RENAME_TEST_ATTR_NAME, "", H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ CHECK(err_ret, SUCCEED, "H5Arename_by_name");
+
+ /* Close dataspace */
+ err_ret = H5Sclose(sid);
+ CHECK(err_ret, FAIL, "H5Sclose");
+
+ /* Close attribute */
+ err_ret = H5Aclose(attr);
+ CHECK(err_ret, FAIL, "H5Aclose");
+
+ /* Close file */
+ err_ret = H5Fclose(fid);
+ CHECK(err_ret, FAIL, "H5Fclose");
+}
+
+/***************************************************************
+**
+** test_attr_get_name_invalid_buf(): A test to ensure that
+** passing a NULL buffer to H5Aget_name(_by_idx) when
+** the 'size' parameter is non-zero doesn't cause bad
+** behavior.
+**
+****************************************************************/
+static void
+test_attr_get_name_invalid_buf(hid_t fcpl, hid_t fapl)
+{
+ ssize_t err_ret = -1;
+ hid_t fid;
+ hid_t attr;
+ hid_t sid;
+
+ /* Create dataspace for attribute */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create attribute */
+ attr =
+ H5Acreate2(fid, GET_NAME_INVALID_BUF_TEST_ATTR_NAME, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aget_name(attr, 1, NULL);
+ }
+ H5E_END_TRY;
+
+ VERIFY(err_ret, FAIL, "H5Aget_name");
+
+ H5E_BEGIN_TRY
+ {
+ err_ret = H5Aget_name_by_idx(fid, ".", H5_INDEX_NAME, H5_ITER_INC, 0, NULL, 1, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ VERIFY(err_ret, FAIL, "H5Aget_name_by_idx");
+
+ /* Close dataspace */
+ err_ret = H5Sclose(sid);
+ CHECK(err_ret, FAIL, "H5Sclose");
+
+ /* Close attribute */
+ err_ret = H5Aclose(attr);
+ CHECK(err_ret, FAIL, "H5Aclose");
+
+ /* Close file */
+ err_ret = H5Fclose(fid);
+ CHECK(err_ret, FAIL, "H5Fclose");
+}
+
+/****************************************************************
+**
+** test_attr_delete_by_idx(): Test basic H5A (attribute) code.
+** Tests deleting attribute by index
+**
+****************************************************************/
+static void
+test_attr_delete_by_idx(hbool_t new_format, hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t dset1, dset2, dset3; /* Dataset IDs */
+ hid_t my_dataset; /* Current dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ H5A_info_t ainfo; /* Attribute information */
+ unsigned max_compact; /* Maximum # of links to store in group compactly */
+ unsigned min_dense; /* Minimum # of links to store in group "densely" */
+#if 0
+ htri_t is_empty; /* Are there any attributes? */
+ htri_t is_dense; /* Are attributes stored densely? */
+ hsize_t nattrs; /* Number of attributes on object */
+ hsize_t name_count; /* # of records in name index */
+ hsize_t corder_count; /* # of records in creation order index */
+#endif
+ H5_index_t idx_type; /* Type of index to operate on */
+ H5_iter_order_t order; /* Order within in the index */
+ unsigned use_index; /* Use index on creation order values */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ char tmpname[NAME_BUF_SIZE]; /* Temporary attribute name */
+ unsigned curr_dset; /* Current dataset to work on */
+ unsigned u; /* Local index variable */
+ herr_t ret; /* Generic return value */
+
+ MESSAGE(5, ("Testing Deleting Attribute By Index\n"))
+
+ /* Create dataspace for dataset & attributes */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create dataset creation property list */
+ if (dcpl_g == H5P_DEFAULT) {
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ }
+ else {
+ dcpl = H5Pcopy(dcpl_g);
+ CHECK(dcpl, FAIL, "H5Pcopy");
+ }
+
+ /* Query the attribute creation properties */
+ ret = H5Pget_attr_phase_change(dcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+
+ /* Loop over operating on different indices on link fields */
+ for (idx_type = H5_INDEX_NAME; idx_type <= H5_INDEX_CRT_ORDER; idx_type++) {
+ /* Loop over operating in different orders */
+ for (order = H5_ITER_INC; order <= H5_ITER_DEC; order++) {
+ /* Loop over using index for creation order value */
+ for (use_index = FALSE; use_index <= TRUE; use_index++) {
+ /* Print appropriate test message */
+ if (idx_type == H5_INDEX_CRT_ORDER) {
+ if (order == H5_ITER_INC) {
+ if (use_index)
+ MESSAGE(5, ("Testing Deleting Attribute By Creation Order Index in Increasing "
+ "Order w/Creation Order Index\n"))
+ else
+ MESSAGE(5, ("Testing Deleting Attribute By Creation Order Index in Increasing "
+ "Order w/o Creation Order Index\n"))
+ } /* end if */
+ else {
+ if (use_index)
+ MESSAGE(5, ("Testing Deleting Attribute By Creation Order Index in Decreasing "
+ "Order w/Creation Order Index\n"))
+ else
+ MESSAGE(5, ("Testing Deleting Attribute By Creation Order Index in Decreasing "
+ "Order w/o Creation Order Index\n"))
+ } /* end else */
+ } /* end if */
+ else {
+ if (order == H5_ITER_INC) {
+ if (use_index)
+ MESSAGE(5, ("Testing Deleting Attribute By Name Index in Increasing Order "
+ "w/Creation Order Index\n"))
+ else
+ MESSAGE(5, ("Testing Deleting Attribute By Name Index in Increasing Order w/o "
+ "Creation Order Index\n"))
+ } /* end if */
+ else {
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ if (use_index)
+ MESSAGE(5, ("Testing Deleting Attribute By Name Index in Decreasing Order "
+ "w/Creation Order Index\n"))
+ else
+ MESSAGE(5, ("Testing Deleting Attribute By Name Index in Decreasing Order w/o "
+ "Creation Order Index\n"))
+#else
+ continue;
+#endif
+ } /* end else */
+ } /* end else */
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Set attribute creation order tracking & indexing for object */
+ if (new_format == TRUE) {
+ ret = H5Pset_attr_creation_order(
+ dcpl, (H5P_CRT_ORDER_TRACKED | (use_index ? H5P_CRT_ORDER_INDEXED : (unsigned)0)));
+ CHECK(ret, FAIL, "H5Pset_attr_creation_order");
+ } /* end if */
+
+ /* Create datasets */
+ dset1 = H5Dcreate2(fid, DSET1_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset1, FAIL, "H5Dcreate2");
+ dset2 = H5Dcreate2(fid, DSET2_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset2, FAIL, "H5Dcreate2");
+ dset3 = H5Dcreate2(fid, DSET3_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset3, FAIL, "H5Dcreate2");
+
+ /* Work on all the datasets */
+ for (curr_dset = 0; curr_dset < NUM_DSETS; curr_dset++) {
+ switch (curr_dset) {
+ case 0:
+ my_dataset = dset1;
+ break;
+
+ case 1:
+ my_dataset = dset2;
+ break;
+
+ case 2:
+ my_dataset = dset3;
+ break;
+
+ default:
+ HDassert(0 && "Too many datasets!");
+ } /* end switch */
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, TRUE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Check for deleting non-existent attribute */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Adelete_by_idx(my_dataset, ".", idx_type, order, (hsize_t)0, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Adelete_by_idx");
+
+ /* Create attributes, up to limit of compact form */
+ for (u = 0; u < max_compact; u++) {
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr =
+ H5Acreate2(my_dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Verify information for new attribute */
+ ret = attr_info_by_idx_check(my_dataset, attrname, (hsize_t)u, use_index);
+ CHECK(ret, FAIL, "attr_info_by_idx_check");
+ } /* end for */
+#if 0
+ /* Verify state of object */
+ ret = H5O__num_attrs_test(my_dataset, &nattrs);
+ CHECK(ret, FAIL, "H5O__num_attrs_test");
+ VERIFY(nattrs, max_compact, "H5O__num_attrs_test");
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Check for out of bound deletions */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Adelete_by_idx(my_dataset, ".", idx_type, order, (hsize_t)u, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Adelete_by_idx");
+ } /* end for */
+
+ /* Work on all the datasets */
+ for (curr_dset = 0; curr_dset < NUM_DSETS; curr_dset++) {
+ switch (curr_dset) {
+ case 0:
+ my_dataset = dset1;
+ break;
+
+ case 1:
+ my_dataset = dset2;
+ break;
+
+ case 2:
+ my_dataset = dset3;
+ break;
+
+ default:
+ HDassert(0 && "Too many datasets!");
+ } /* end switch */
+
+ /* Delete attributes from compact storage */
+ for (u = 0; u < (max_compact - 1); u++) {
+ /* Delete first attribute in appropriate order */
+ ret = H5Adelete_by_idx(my_dataset, ".", idx_type, order, (hsize_t)0, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Adelete_by_idx");
+
+ /* Verify the attribute information for first attribute in appropriate order */
+ HDmemset(&ainfo, 0, sizeof(ainfo));
+ ret = H5Aget_info_by_idx(my_dataset, ".", idx_type, order, (hsize_t)0, &ainfo,
+ H5P_DEFAULT);
+ if (new_format) {
+ if (order == H5_ITER_INC) {
+ VERIFY(ainfo.corder, (u + 1), "H5Aget_info_by_idx");
+ } /* end if */
+ else {
+ VERIFY(ainfo.corder, (max_compact - (u + 2)), "H5Aget_info_by_idx");
+ } /* end else */
+ } /* end if */
+
+ /* Verify the name for first attribute in appropriate order */
+ HDmemset(tmpname, 0, (size_t)NAME_BUF_SIZE);
+ ret = (herr_t)H5Aget_name_by_idx(my_dataset, ".", idx_type, order, (hsize_t)0,
+ tmpname, (size_t)NAME_BUF_SIZE, H5P_DEFAULT);
+ if (order == H5_ITER_INC)
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", (u + 1));
+ else
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", (max_compact - (u + 2)));
+ ret = HDstrcmp(attrname, tmpname);
+ VERIFY(ret, 0, "H5Aget_name_by_idx");
+ } /* end for */
+
+ /* Delete last attribute */
+ ret = H5Adelete_by_idx(my_dataset, ".", idx_type, order, (hsize_t)0, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Adelete_by_idx");
+#if 0
+ /* Verify state of attribute storage (empty) */
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, TRUE, "H5O__is_attr_empty_test");
+#endif
+ } /* end for */
+
+ /* Work on all the datasets */
+ for (curr_dset = 0; curr_dset < NUM_DSETS; curr_dset++) {
+ switch (curr_dset) {
+ case 0:
+ my_dataset = dset1;
+ break;
+
+ case 1:
+ my_dataset = dset2;
+ break;
+
+ case 2:
+ my_dataset = dset3;
+ break;
+
+ default:
+ HDassert(0 && "Too many datasets!");
+ } /* end switch */
+
+ /* Create more attributes, to push into dense form */
+ for (u = 0; u < (max_compact * 2); u++) {
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr =
+ H5Acreate2(my_dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+#if 0
+ /* Verify state of object */
+ if (u >= max_compact) {
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, (new_format ? TRUE : FALSE), "H5O__is_attr_dense_test");
+ } /* end if */
+#endif
+ /* Verify information for new attribute */
+ ret = attr_info_by_idx_check(my_dataset, attrname, (hsize_t)u, use_index);
+ CHECK(ret, FAIL, "attr_info_by_idx_check");
+ } /* end for */
+#if 0
+ /* Verify state of object */
+ ret = H5O__num_attrs_test(my_dataset, &nattrs);
+ CHECK(ret, FAIL, "H5O__num_attrs_test");
+ VERIFY(nattrs, (max_compact * 2), "H5O__num_attrs_test");
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, (new_format ? TRUE : FALSE), "H5O__is_attr_dense_test");
+
+ if (new_format) {
+ /* Retrieve & verify # of records in the name & creation order indices */
+ ret = H5O__attr_dense_info_test(my_dataset, &name_count, &corder_count);
+ CHECK(ret, FAIL, "H5O__attr_dense_info_test");
+ if (use_index)
+ VERIFY(name_count, corder_count, "H5O__attr_dense_info_test");
+ VERIFY(name_count, (max_compact * 2), "H5O__attr_dense_info_test");
+ } /* end if */
+#endif
+ /* Check for out of bound deletion */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Adelete_by_idx(my_dataset, ".", idx_type, order, (hsize_t)u, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Adelete_by_idx");
+ } /* end for */
+
+ /* Work on all the datasets */
+ for (curr_dset = 0; curr_dset < NUM_DSETS; curr_dset++) {
+ switch (curr_dset) {
+ case 0:
+ my_dataset = dset1;
+ break;
+
+ case 1:
+ my_dataset = dset2;
+ break;
+
+ case 2:
+ my_dataset = dset3;
+ break;
+
+ default:
+ HDassert(0 && "Too many datasets!");
+ } /* end switch */
+
+ /* Delete attributes from dense storage */
+ for (u = 0; u < ((max_compact * 2) - 1); u++) {
+ /* Delete first attribute in appropriate order */
+ ret = H5Adelete_by_idx(my_dataset, ".", idx_type, order, (hsize_t)0, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Adelete_by_idx");
+
+ /* Verify the attribute information for first attribute in appropriate order */
+ HDmemset(&ainfo, 0, sizeof(ainfo));
+ ret = H5Aget_info_by_idx(my_dataset, ".", idx_type, order, (hsize_t)0, &ainfo,
+ H5P_DEFAULT);
+ if (new_format) {
+ if (order == H5_ITER_INC) {
+ VERIFY(ainfo.corder, (u + 1), "H5Aget_info_by_idx");
+ } /* end if */
+ else {
+ VERIFY(ainfo.corder, ((max_compact * 2) - (u + 2)), "H5Aget_info_by_idx");
+ } /* end else */
+ } /* end if */
+
+ /* Verify the name for first attribute in appropriate order */
+ HDmemset(tmpname, 0, (size_t)NAME_BUF_SIZE);
+ ret = (herr_t)H5Aget_name_by_idx(my_dataset, ".", idx_type, order, (hsize_t)0,
+ tmpname, (size_t)NAME_BUF_SIZE, H5P_DEFAULT);
+ if (order == H5_ITER_INC)
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", (u + 1));
+ else
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u",
+ ((max_compact * 2) - (u + 2)));
+ ret = HDstrcmp(attrname, tmpname);
+ VERIFY(ret, 0, "H5Aget_name_by_idx");
+ } /* end for */
+
+ /* Delete last attribute */
+ ret = H5Adelete_by_idx(my_dataset, ".", idx_type, order, (hsize_t)0, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Adelete_by_idx");
+#if 0
+ /* Verify state of attribute storage (empty) */
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, TRUE, "H5O__is_attr_empty_test");
+#endif
+ /* Check for deletion on empty attribute storage again */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Adelete_by_idx(my_dataset, ".", idx_type, order, (hsize_t)0, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Adelete_by_idx");
+ } /* end for */
+
+ /* Delete attributes in middle */
+
+ /* Work on all the datasets */
+ for (curr_dset = 0; curr_dset < NUM_DSETS; curr_dset++) {
+ switch (curr_dset) {
+ case 0:
+ my_dataset = dset1;
+ break;
+
+ case 1:
+ my_dataset = dset2;
+ break;
+
+ case 2:
+ my_dataset = dset3;
+ break;
+
+ default:
+ HDassert(0 && "Too many datasets!");
+ } /* end switch */
+
+ /* Create attributes, to push into dense form */
+ for (u = 0; u < (max_compact * 2); u++) {
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr =
+ H5Acreate2(my_dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+#if 0
+ /* Verify state of object */
+ if (u >= max_compact) {
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, (new_format ? TRUE : FALSE), "H5O__is_attr_dense_test");
+ } /* end if */
+#endif
+ /* Verify information for new attribute */
+ ret = attr_info_by_idx_check(my_dataset, attrname, (hsize_t)u, use_index);
+ CHECK(ret, FAIL, "attr_info_by_idx_check");
+ } /* end for */
+ } /* end for */
+
+ /* Work on all the datasets */
+ for (curr_dset = 0; curr_dset < NUM_DSETS; curr_dset++) {
+ switch (curr_dset) {
+ case 0:
+ my_dataset = dset1;
+ break;
+
+ case 1:
+ my_dataset = dset2;
+ break;
+
+ case 2:
+ my_dataset = dset3;
+ break;
+
+ default:
+ HDassert(0 && "Too many datasets!");
+ } /* end switch */
+
+ /* Delete every other attribute from dense storage, in appropriate order */
+ for (u = 0; u < max_compact; u++) {
+ /* Delete attribute */
+ ret = H5Adelete_by_idx(my_dataset, ".", idx_type, order, (hsize_t)u, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Adelete_by_idx");
+
+ /* Verify the attribute information for first attribute in appropriate order */
+ HDmemset(&ainfo, 0, sizeof(ainfo));
+ ret = H5Aget_info_by_idx(my_dataset, ".", idx_type, order, (hsize_t)u, &ainfo,
+ H5P_DEFAULT);
+ if (new_format) {
+ if (order == H5_ITER_INC) {
+ VERIFY(ainfo.corder, ((u * 2) + 1), "H5Aget_info_by_idx");
+ } /* end if */
+ else {
+ VERIFY(ainfo.corder, ((max_compact * 2) - ((u * 2) + 2)),
+ "H5Aget_info_by_idx");
+ } /* end else */
+ } /* end if */
+
+ /* Verify the name for first attribute in appropriate order */
+ HDmemset(tmpname, 0, (size_t)NAME_BUF_SIZE);
+ ret = (herr_t)H5Aget_name_by_idx(my_dataset, ".", idx_type, order, (hsize_t)u,
+ tmpname, (size_t)NAME_BUF_SIZE, H5P_DEFAULT);
+ if (order == H5_ITER_INC)
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", ((u * 2) + 1));
+ else
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u",
+ ((max_compact * 2) - ((u * 2) + 2)));
+ ret = HDstrcmp(attrname, tmpname);
+ VERIFY(ret, 0, "H5Aget_name_by_idx");
+ } /* end for */
+ } /* end for */
+
+ /* Work on all the datasets */
+ for (curr_dset = 0; curr_dset < NUM_DSETS; curr_dset++) {
+ switch (curr_dset) {
+ case 0:
+ my_dataset = dset1;
+ break;
+
+ case 1:
+ my_dataset = dset2;
+ break;
+
+ case 2:
+ my_dataset = dset3;
+ break;
+
+ default:
+ HDassert(0 && "Too many datasets!");
+ } /* end switch */
+
+ /* Delete remaining attributes from dense storage, in appropriate order */
+ for (u = 0; u < (max_compact - 1); u++) {
+ /* Delete attribute */
+ ret = H5Adelete_by_idx(my_dataset, ".", idx_type, order, (hsize_t)0, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Adelete_by_idx");
+
+ /* Verify the attribute information for first attribute in appropriate order */
+ HDmemset(&ainfo, 0, sizeof(ainfo));
+ ret = H5Aget_info_by_idx(my_dataset, ".", idx_type, order, (hsize_t)0, &ainfo,
+ H5P_DEFAULT);
+ if (new_format) {
+ if (order == H5_ITER_INC) {
+ VERIFY(ainfo.corder, ((u * 2) + 3), "H5Aget_info_by_idx");
+ } /* end if */
+ else {
+ VERIFY(ainfo.corder, ((max_compact * 2) - ((u * 2) + 4)),
+ "H5Aget_info_by_idx");
+ } /* end else */
+ } /* end if */
+
+ /* Verify the name for first attribute in appropriate order */
+ HDmemset(tmpname, 0, (size_t)NAME_BUF_SIZE);
+ ret = (herr_t)H5Aget_name_by_idx(my_dataset, ".", idx_type, order, (hsize_t)0,
+ tmpname, (size_t)NAME_BUF_SIZE, H5P_DEFAULT);
+ if (order == H5_ITER_INC)
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", ((u * 2) + 3));
+ else
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u",
+ ((max_compact * 2) - ((u * 2) + 4)));
+ ret = HDstrcmp(attrname, tmpname);
+ VERIFY(ret, 0, "H5Aget_name_by_idx");
+ } /* end for */
+
+ /* Delete last attribute */
+ ret = H5Adelete_by_idx(my_dataset, ".", idx_type, order, (hsize_t)0, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Adelete_by_idx");
+#if 0
+ /* Verify state of attribute storage (empty) */
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, TRUE, "H5O__is_attr_empty_test");
+#endif
+ /* Check for deletion on empty attribute storage again */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Adelete_by_idx(my_dataset, ".", idx_type, order, (hsize_t)0, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Adelete_by_idx");
+ } /* end for */
+
+ /* Close Datasets */
+ ret = H5Dclose(dset1);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset2);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset3);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ } /* end for */
+ } /* end for */
+ } /* end for */
+
+ /* Close property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_attr_delete_by_idx() */
+
+/****************************************************************
+**
+** attr_iterate2_cb(): Revised attribute operator
+**
+****************************************************************/
+static herr_t
+attr_iterate2_cb(hid_t loc_id, const char *attr_name, const H5A_info_t *info, void *_op_data)
+{
+ attr_iter_info_t *op_data = (attr_iter_info_t *)_op_data; /* User data */
+ char attrname[NAME_BUF_SIZE]; /* Object name */
+ H5A_info_t my_info; /* Local attribute info */
+
+ /* Increment # of times the callback was called */
+ op_data->ncalled++;
+
+ /* Get the attribute information directly to compare */
+ if (H5Aget_info_by_name(loc_id, ".", attr_name, &my_info, H5P_DEFAULT) < 0)
+ return (H5_ITER_ERROR);
+
+ /* Check more things for revised attribute iteration (vs. older attribute iteration) */
+ if (info) {
+ /* Check for correct order of iteration */
+ /* (if we are operating in increasing or decreasing order) */
+ if (op_data->order != H5_ITER_NATIVE)
+ if (info->corder != op_data->curr)
+ return (H5_ITER_ERROR);
+
+ /* Compare attribute info structs */
+ if (info->corder_valid != my_info.corder_valid)
+ return (H5_ITER_ERROR);
+ if (info->corder != my_info.corder)
+ return (H5_ITER_ERROR);
+ if (info->cset != my_info.cset)
+ return (H5_ITER_ERROR);
+ if (info->data_size != my_info.data_size)
+ return (H5_ITER_ERROR);
+ } /* end if */
+
+ /* Verify name of link */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", (unsigned)my_info.corder);
+ if (HDstrcmp(attr_name, attrname) != 0)
+ return (H5_ITER_ERROR);
+
+ /* Check if we've visited this link before */
+ if ((size_t)op_data->curr >= op_data->max_visit)
+ return (H5_ITER_ERROR);
+ if (op_data->visited[op_data->curr])
+ return (H5_ITER_ERROR);
+ op_data->visited[op_data->curr] = TRUE;
+
+ /* Advance to next value, in correct direction */
+ if (op_data->order != H5_ITER_DEC)
+ op_data->curr++;
+ else
+ op_data->curr--;
+
+ /* Check for stopping in the middle of iterating */
+ if (op_data->stop > 0)
+ if (--op_data->stop == 0)
+ return (CORDER_ITER_STOP);
+
+ return (H5_ITER_CONT);
+} /* end attr_iterate2_cb() */
+
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+
+/****************************************************************
+**
+** attr_iterate1_cb(): Attribute operator
+**
+****************************************************************/
+#if 0
+static herr_t
+attr_iterate1_cb(hid_t loc_id, const char *attr_name, void *_op_data)
+{
+ return (attr_iterate2_cb(loc_id, attr_name, NULL, _op_data));
+} /* end attr_iterate1_cb() */
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
+#endif
+
+#ifndef NO_ITERATION_RESTART
+/*-------------------------------------------------------------------------
+ * Function: attr_iterate2_fail_cb
+ *
+ * Purpose: Callback routine for iterating over attributes on object that
+ * always returns failure
+ *
+ * Return: Success: 0
+ * Failure: -1
+ *
+ * Programmer: Quincey Koziol
+ * Tuesday, February 20, 2007
+ *
+ *-------------------------------------------------------------------------
+ */
+static int
+attr_iterate2_fail_cb(hid_t H5_ATTR_UNUSED group_id, const char H5_ATTR_UNUSED *attr_name,
+ const H5A_info_t H5_ATTR_UNUSED *info, void H5_ATTR_UNUSED *_op_data)
+{
+ return (H5_ITER_ERROR);
+} /* end attr_iterate2_fail_cb() */
+
+/*-------------------------------------------------------------------------
+ * Function: attr_iterate_check
+ *
+ * Purpose: Check iteration over attributes on an object
+ *
+ * Return: Success: 0
+ * Failure: -1
+ *
+ * Programmer: Quincey Koziol
+ * Tuesday, February 20, 2007
+ *
+ *-------------------------------------------------------------------------
+ */
+static int
+attr_iterate_check(hid_t fid, const char *dsetname, hid_t obj_id, H5_index_t idx_type, H5_iter_order_t order,
+ unsigned max_attrs, attr_iter_info_t *iter_info)
+{
+ unsigned v; /* Local index variable */
+ hsize_t skip; /* # of attributes to skip on object */
+#if 0
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+ unsigned oskip; /* # of attributes to skip on object, with H5Aiterate1 */
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
+#endif
+ int old_nerrs; /* Number of errors when entering this check */
+ herr_t ret; /* Generic return value */
+
+ /* Retrieve the current # of reported errors */
+ old_nerrs = nerrors;
+
+ /* Iterate over attributes on object */
+ iter_info->nskipped = (unsigned)(skip = 0);
+ iter_info->order = order;
+ iter_info->stop = -1;
+ iter_info->ncalled = 0;
+ iter_info->curr = order != H5_ITER_DEC ? 0 : (max_attrs - 1);
+ HDmemset(iter_info->visited, 0, sizeof(hbool_t) * iter_info->max_visit);
+ ret = H5Aiterate2(obj_id, idx_type, order, &skip, attr_iterate2_cb, iter_info);
+ CHECK(ret, FAIL, "H5Aiterate2");
+
+ /* Verify that we visited all the attributes */
+ VERIFY(skip, max_attrs, "H5Aiterate2");
+ for (v = 0; v < max_attrs; v++)
+ VERIFY(iter_info->visited[v], TRUE, "H5Aiterate2");
+
+ /* Iterate over attributes on object */
+ iter_info->nskipped = (unsigned)(skip = 0);
+ iter_info->order = order;
+ iter_info->stop = -1;
+ iter_info->ncalled = 0;
+ iter_info->curr = order != H5_ITER_DEC ? 0 : (max_attrs - 1);
+ HDmemset(iter_info->visited, 0, sizeof(hbool_t) * iter_info->max_visit);
+ ret = H5Aiterate_by_name(fid, dsetname, idx_type, order, &skip, attr_iterate2_cb, iter_info, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aiterate_by_name");
+
+ /* Verify that we visited all the attributes */
+ VERIFY(skip, max_attrs, "H5Aiterate_by_name");
+ for (v = 0; v < max_attrs; v++)
+ VERIFY(iter_info->visited[v], TRUE, "H5Aiterate_by_name");
+
+ /* Iterate over attributes on object */
+ iter_info->nskipped = (unsigned)(skip = 0);
+ iter_info->order = order;
+ iter_info->stop = -1;
+ iter_info->ncalled = 0;
+ iter_info->curr = order != H5_ITER_DEC ? 0 : (max_attrs - 1);
+ HDmemset(iter_info->visited, 0, sizeof(hbool_t) * iter_info->max_visit);
+ ret = H5Aiterate_by_name(obj_id, ".", idx_type, order, &skip, attr_iterate2_cb, iter_info, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aiterate_by_name");
+
+ /* Verify that we visited all the attributes */
+ VERIFY(skip, max_attrs, "H5Aiterate_by_name");
+ for (v = 0; v < max_attrs; v++)
+ VERIFY(iter_info->visited[v], TRUE, "H5Aiterate_by_name");
+
+#if 0
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+ /* Iterate over attributes on object, with H5Aiterate1 */
+ iter_info->nskipped = oskip = 0;
+ iter_info->order = order;
+ iter_info->stop = -1;
+ iter_info->ncalled = 0;
+ iter_info->curr = order != H5_ITER_DEC ? 0 : (max_attrs - 1);
+ HDmemset(iter_info->visited, 0, sizeof(hbool_t) * iter_info->max_visit);
+ ret = H5Aiterate1(obj_id, &oskip, attr_iterate1_cb, iter_info);
+ CHECK(ret, FAIL, "H5Aiterate1");
+
+ /* Verify that we visited all the attributes */
+ VERIFY(skip, max_attrs, "H5Aiterate1");
+ for (v = 0; v < max_attrs; v++)
+ VERIFY(iter_info->visited[v], TRUE, "H5Aiterate1");
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
+#endif
+
+ /* Skip over some attributes on object */
+ iter_info->nskipped = (unsigned)(skip = max_attrs / 2);
+ iter_info->order = order;
+ iter_info->stop = -1;
+ iter_info->ncalled = 0;
+ iter_info->curr = order != H5_ITER_DEC ? skip : ((max_attrs - 1) - skip);
+ HDmemset(iter_info->visited, 0, sizeof(hbool_t) * iter_info->max_visit);
+ ret = H5Aiterate2(obj_id, idx_type, order, &skip, attr_iterate2_cb, iter_info);
+ CHECK(ret, FAIL, "H5Aiterate2");
+
+ /* Verify that we visited all the attributes */
+ VERIFY(skip, max_attrs, "H5Aiterate2");
+ if (order == H5_ITER_INC) {
+ for (v = 0; v < (max_attrs / 2); v++)
+ VERIFY(iter_info->visited[v + (max_attrs / 2)], TRUE, "H5Aiterate2");
+ } /* end if */
+ else if (order == H5_ITER_DEC) {
+ for (v = 0; v < (max_attrs / 2); v++)
+ VERIFY(iter_info->visited[v], TRUE, "H5Aiterate2");
+ } /* end if */
+ else {
+ unsigned nvisit = 0; /* # of links visited */
+
+ HDassert(order == H5_ITER_NATIVE);
+ for (v = 0; v < max_attrs; v++)
+ if (iter_info->visited[v] == TRUE)
+ nvisit++;
+
+ VERIFY(skip, (max_attrs / 2), "H5Aiterate2");
+ } /* end else */
+
+ /* Skip over some attributes on object */
+ iter_info->nskipped = (unsigned)(skip = max_attrs / 2);
+ iter_info->order = order;
+ iter_info->stop = -1;
+ iter_info->ncalled = 0;
+ iter_info->curr = order != H5_ITER_DEC ? skip : ((max_attrs - 1) - skip);
+ HDmemset(iter_info->visited, 0, sizeof(hbool_t) * iter_info->max_visit);
+ ret = H5Aiterate_by_name(fid, dsetname, idx_type, order, &skip, attr_iterate2_cb, iter_info, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aiterate_by_name");
+
+ /* Verify that we visited all the attributes */
+ VERIFY(skip, max_attrs, "H5Aiterate_by_name");
+ if (order == H5_ITER_INC) {
+ for (v = 0; v < (max_attrs / 2); v++)
+ VERIFY(iter_info->visited[v + (max_attrs / 2)], TRUE, "H5Aiterate_by_name");
+ } /* end if */
+ else if (order == H5_ITER_DEC) {
+ for (v = 0; v < (max_attrs / 2); v++)
+ VERIFY(iter_info->visited[v], TRUE, "H5Aiterate_by_name");
+ } /* end if */
+ else {
+ unsigned nvisit = 0; /* # of links visited */
+
+ HDassert(order == H5_ITER_NATIVE);
+ for (v = 0; v < max_attrs; v++)
+ if (iter_info->visited[v] == TRUE)
+ nvisit++;
+
+ VERIFY(skip, (max_attrs / 2), "H5Aiterate_by_name");
+ } /* end else */
+
+ /* Skip over some attributes on object */
+ iter_info->nskipped = (unsigned)(skip = max_attrs / 2);
+ iter_info->order = order;
+ iter_info->stop = -1;
+ iter_info->ncalled = 0;
+ iter_info->curr = order != H5_ITER_DEC ? skip : ((max_attrs - 1) - skip);
+ HDmemset(iter_info->visited, 0, sizeof(hbool_t) * iter_info->max_visit);
+ ret = H5Aiterate_by_name(obj_id, ".", idx_type, order, &skip, attr_iterate2_cb, iter_info, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aiterate_by_name");
+
+ /* Verify that we visited all the attributes */
+ VERIFY(skip, max_attrs, "H5Aiterate_by_name");
+ if (order == H5_ITER_INC) {
+ for (v = 0; v < (max_attrs / 2); v++)
+ VERIFY(iter_info->visited[v + (max_attrs / 2)], TRUE, "H5Aiterate_by_name");
+ } /* end if */
+ else if (order == H5_ITER_DEC) {
+ for (v = 0; v < (max_attrs / 2); v++)
+ VERIFY(iter_info->visited[v], TRUE, "H5Aiterate_by_name");
+ } /* end if */
+ else {
+ unsigned nvisit = 0; /* # of links visited */
+
+ HDassert(order == H5_ITER_NATIVE);
+ for (v = 0; v < max_attrs; v++)
+ if (iter_info->visited[v] == TRUE)
+ nvisit++;
+
+ VERIFY(skip, (max_attrs / 2), "H5Aiterate_by_name");
+ } /* end else */
+
+#if 0
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+ /* Skip over some attributes on object, with H5Aiterate1 */
+ iter_info->nskipped = oskip = max_attrs / 2;
+ iter_info->order = order;
+ iter_info->stop = -1;
+ iter_info->ncalled = 0;
+ iter_info->curr = order != H5_ITER_DEC ? (unsigned)oskip : ((max_attrs - 1) - oskip);
+ HDmemset(iter_info->visited, 0, sizeof(hbool_t) * iter_info->max_visit);
+ ret = H5Aiterate1(obj_id, &oskip, attr_iterate1_cb, iter_info);
+ CHECK(ret, FAIL, "H5Aiterate1");
+
+ /* Verify that we visited all the links */
+ VERIFY(oskip, max_attrs, "H5Aiterate1");
+ if (order == H5_ITER_INC) {
+ for (v = 0; v < (max_attrs / 2); v++)
+ VERIFY(iter_info->visited[v + (max_attrs / 2)], TRUE, "H5Aiterate1");
+ } /* end if */
+ else if (order == H5_ITER_DEC) {
+ for (v = 0; v < (max_attrs / 2); v++)
+ VERIFY(iter_info->visited[v], TRUE, "H5Aiterate1");
+ } /* end if */
+ else {
+ unsigned nvisit = 0; /* # of links visited */
+
+ HDassert(order == H5_ITER_NATIVE);
+ for (v = 0; v < max_attrs; v++)
+ if (iter_info->visited[v] == TRUE)
+ nvisit++;
+
+ VERIFY(skip, (max_attrs / 2), "H5Aiterate1");
+ } /* end else */
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
+#endif
+
+ /* Iterate over attributes on object, stopping in the middle */
+ iter_info->nskipped = (unsigned)(skip = 0);
+ iter_info->order = order;
+ iter_info->stop = 3;
+ iter_info->ncalled = 0;
+ iter_info->curr = order != H5_ITER_DEC ? 0 : (max_attrs - 1);
+ HDmemset(iter_info->visited, 0, sizeof(hbool_t) * iter_info->max_visit);
+ ret = H5Aiterate2(obj_id, idx_type, order, &skip, attr_iterate2_cb, iter_info);
+ CHECK(ret, FAIL, "H5Aiterate2");
+ VERIFY(ret, CORDER_ITER_STOP, "H5Aiterate2");
+ VERIFY(iter_info->ncalled, 3, "H5Aiterate2");
+
+ /* Iterate over attributes on object, stopping in the middle */
+ iter_info->nskipped = (unsigned)(skip = 0);
+ iter_info->order = order;
+ iter_info->stop = 3;
+ iter_info->ncalled = 0;
+ iter_info->curr = order != H5_ITER_DEC ? 0 : (max_attrs - 1);
+ HDmemset(iter_info->visited, 0, sizeof(hbool_t) * iter_info->max_visit);
+ ret = H5Aiterate_by_name(fid, dsetname, idx_type, order, &skip, attr_iterate2_cb, iter_info, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aiterate_by_name");
+ VERIFY(ret, CORDER_ITER_STOP, "H5Aiterate_by_name");
+ VERIFY(iter_info->ncalled, 3, "H5Aiterate_by_name");
+
+ /* Iterate over attributes on object, stopping in the middle */
+ iter_info->nskipped = (unsigned)(skip = 0);
+ iter_info->order = order;
+ iter_info->stop = 3;
+ iter_info->ncalled = 0;
+ iter_info->curr = order != H5_ITER_DEC ? 0 : (max_attrs - 1);
+ HDmemset(iter_info->visited, 0, sizeof(hbool_t) * iter_info->max_visit);
+ ret = H5Aiterate_by_name(obj_id, ".", idx_type, order, &skip, attr_iterate2_cb, iter_info, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aiterate_by_name");
+ VERIFY(ret, CORDER_ITER_STOP, "H5Aiterate_by_name");
+ VERIFY(iter_info->ncalled, 3, "H5Aiterate_by_name");
+
+#if 0
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+ /* Iterate over attributes on object, stopping in the middle, with H5Aiterate1() */
+ iter_info->nskipped = oskip = 0;
+ iter_info->order = order;
+ iter_info->stop = 3;
+ iter_info->ncalled = 0;
+ iter_info->curr = order != H5_ITER_DEC ? 0 : (max_attrs - 1);
+ HDmemset(iter_info->visited, 0, sizeof(hbool_t) * iter_info->max_visit);
+ ret = H5Aiterate1(obj_id, &oskip, attr_iterate1_cb, iter_info);
+ CHECK(ret, FAIL, "H5Aiterate1");
+ VERIFY(ret, CORDER_ITER_STOP, "H5Aiterate1");
+ VERIFY(iter_info->ncalled, 3, "H5Aiterate1");
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
+#endif
+
+ /* Check for iteration routine indicating failure */
+ skip = 0;
+ H5E_BEGIN_TRY
+ {
+ ret = H5Aiterate2(obj_id, idx_type, order, &skip, attr_iterate2_fail_cb, NULL);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Aiterate2");
+
+ skip = 0;
+ H5E_BEGIN_TRY
+ {
+ ret = H5Aiterate_by_name(fid, dsetname, idx_type, order, &skip, attr_iterate2_fail_cb, NULL,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Aiterate_by_name");
+
+ skip = 0;
+ H5E_BEGIN_TRY
+ {
+ ret =
+ H5Aiterate_by_name(obj_id, ".", idx_type, order, &skip, attr_iterate2_fail_cb, NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Aiterate_by_name");
+
+ /* Retrieve current # of errors */
+ if (old_nerrs == nerrors)
+ return (0);
+ else
+ return (-1);
+} /* end attr_iterate_check() */
+#endif
+
+/****************************************************************
+**
+** test_attr_iterate2(): Test basic H5A (attribute) code.
+** Tests iterating over attributes by index
+**
+****************************************************************/
+static void
+test_attr_iterate2(hbool_t new_format, hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t dset1, dset2, dset3; /* Dataset IDs */
+ hid_t my_dataset; /* Current dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ unsigned max_compact; /* Maximum # of links to store in group compactly */
+ unsigned min_dense; /* Minimum # of links to store in group "densely" */
+#if 0
+ htri_t is_empty; /* Are there any attributes? */
+ htri_t is_dense; /* Are attributes stored densely? */
+ hsize_t nattrs; /* Number of attributes on object */
+ hsize_t name_count; /* # of records in name index */
+ hsize_t corder_count; /* # of records in creation order index */
+#endif
+ H5_index_t idx_type; /* Type of index to operate on */
+ H5_iter_order_t order; /* Order within in the index */
+ attr_iter_info_t iter_info; /* Iterator info */
+ hbool_t *visited = NULL; /* Array of flags for visiting links */
+#ifndef NO_ITERATION_RESTART
+ hsize_t idx; /* Start index for iteration */
+#endif
+ unsigned use_index; /* Use index on creation order values */
+ const char *dsetname; /* Name of dataset for attributes */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ unsigned curr_dset; /* Current dataset to work on */
+ unsigned u; /* Local index variable */
+ herr_t ret; /* Generic return value */
+
+ /* Create dataspace for dataset & attributes */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create dataset creation property list */
+ if (dcpl_g == H5P_DEFAULT) {
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ }
+ else {
+ dcpl = H5Pcopy(dcpl_g);
+ CHECK(dcpl, FAIL, "H5Pcopy");
+ }
+
+ /* Query the attribute creation properties */
+ ret = H5Pget_attr_phase_change(dcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+
+ /* Allocate the "visited link" array */
+ iter_info.max_visit = max_compact * 2;
+ visited = (hbool_t *)HDmalloc(sizeof(hbool_t) * iter_info.max_visit);
+ CHECK_PTR(visited, "HDmalloc");
+ iter_info.visited = visited;
+
+ /* Loop over operating on different indices on link fields */
+ for (idx_type = H5_INDEX_NAME; idx_type <= H5_INDEX_CRT_ORDER; idx_type++) {
+ /* Loop over operating in different orders */
+ for (order = H5_ITER_INC; order <= H5_ITER_DEC; order++) {
+ /* Loop over using index for creation order value */
+ for (use_index = FALSE; use_index <= TRUE; use_index++) {
+ /* Print appropriate test message */
+ if (idx_type == H5_INDEX_CRT_ORDER) {
+ if (order == H5_ITER_INC) {
+ if (use_index)
+ MESSAGE(5, ("Testing Iterating over Attributes By Creation Order Index in "
+ "Increasing Order w/Creation Order Index\n"))
+ else
+ MESSAGE(5, ("Testing Iterating over Attributes By Creation Order Index in "
+ "Increasing Order w/o Creation Order Index\n"))
+ } /* end if */
+ else {
+ if (use_index)
+ MESSAGE(5, ("Testing Iterating over Attributes By Creation Order Index in "
+ "Decreasing Order w/Creation Order Index\n"))
+ else
+ MESSAGE(5, ("Testing Iterating over Attributes By Creation Order Index in "
+ "Decreasing Order w/o Creation Order Index\n"))
+ } /* end else */
+ } /* end if */
+ else {
+ if (order == H5_ITER_INC) {
+ if (use_index)
+ MESSAGE(5, ("Testing Iterating over Attributes By Name Index in Increasing Order "
+ "w/Creation Order Index\n"))
+ else
+ MESSAGE(5, ("Testing Iterating over Attributes By Name Index in Increasing Order "
+ "w/o Creation Order Index\n"))
+ } /* end if */
+ else {
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ if (use_index)
+ MESSAGE(5, ("Testing Iterating over Attributes By Name Index in Decreasing Order "
+ "w/Creation Order Index\n"))
+ else
+ MESSAGE(5, ("Testing Iterating over Attributes By Name Index in Decreasing Order "
+ "w/o Creation Order Index\n"))
+#else
+ continue;
+#endif
+ } /* end else */
+ } /* end else */
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Set attribute creation order tracking & indexing for object */
+ if (new_format == TRUE) {
+ ret = H5Pset_attr_creation_order(
+ dcpl, (H5P_CRT_ORDER_TRACKED | (use_index ? H5P_CRT_ORDER_INDEXED : (unsigned)0)));
+ CHECK(ret, FAIL, "H5Pset_attr_creation_order");
+ } /* end if */
+
+ /* Create datasets */
+ dset1 = H5Dcreate2(fid, DSET1_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset1, FAIL, "H5Dcreate2");
+ dset2 = H5Dcreate2(fid, DSET2_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset2, FAIL, "H5Dcreate2");
+ dset3 = H5Dcreate2(fid, DSET3_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset3, FAIL, "H5Dcreate2");
+
+ /* Work on all the datasets */
+ for (curr_dset = 0; curr_dset < NUM_DSETS; curr_dset++) {
+ switch (curr_dset) {
+ case 0:
+ my_dataset = dset1;
+ dsetname = DSET1_NAME;
+ break;
+
+ case 1:
+ my_dataset = dset2;
+ dsetname = DSET2_NAME;
+ break;
+
+ case 2:
+ my_dataset = dset3;
+ dsetname = DSET3_NAME;
+ break;
+
+ default:
+ HDassert(0 && "Too many datasets!");
+ } /* end switch */
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, TRUE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Check for iterating over object with no attributes (should be OK) */
+ ret = H5Aiterate2(my_dataset, idx_type, order, NULL, attr_iterate2_cb, NULL);
+ CHECK(ret, FAIL, "H5Aiterate2");
+
+ ret = H5Aiterate_by_name(fid, dsetname, idx_type, order, NULL, attr_iterate2_cb, NULL,
+ H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aiterate_by_name");
+
+ ret = H5Aiterate_by_name(my_dataset, ".", idx_type, order, NULL, attr_iterate2_cb, NULL,
+ H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aiterate_by_name");
+
+ /* Create attributes, up to limit of compact form */
+ for (u = 0; u < max_compact; u++) {
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr =
+ H5Acreate2(my_dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Verify information for new attribute */
+ ret = attr_info_by_idx_check(my_dataset, attrname, (hsize_t)u, use_index);
+ CHECK(ret, FAIL, "attr_info_by_idx_check");
+ } /* end for */
+#if 0
+ /* Verify state of object */
+ ret = H5O__num_attrs_test(my_dataset, &nattrs);
+ CHECK(ret, FAIL, "H5O__num_attrs_test");
+ VERIFY(nattrs, max_compact, "H5O__num_attrs_test");
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+#ifndef NO_ITERATION_RESTART
+ /* Check for out of bound iteration */
+ idx = u;
+ H5E_BEGIN_TRY
+ {
+ ret = H5Aiterate2(my_dataset, idx_type, order, &idx, attr_iterate2_cb, NULL);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Aiterate2");
+
+ idx = u;
+ H5E_BEGIN_TRY
+ {
+ ret = H5Aiterate_by_name(fid, dsetname, idx_type, order, &idx, attr_iterate2_cb, NULL,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Aiterate_by_name");
+
+ idx = u;
+ H5E_BEGIN_TRY
+ {
+ ret = H5Aiterate_by_name(my_dataset, ".", idx_type, order, &idx, attr_iterate2_cb,
+ NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Aiterate_by_name");
+
+ /* Test iteration over attributes stored compactly */
+ ret = attr_iterate_check(fid, dsetname, my_dataset, idx_type, order, u, &iter_info);
+ CHECK(ret, FAIL, "attr_iterate_check");
+#endif
+ } /* end for */
+
+ /* Work on all the datasets */
+ for (curr_dset = 0; curr_dset < NUM_DSETS; curr_dset++) {
+ switch (curr_dset) {
+ case 0:
+ my_dataset = dset1;
+ dsetname = DSET1_NAME;
+ break;
+
+ case 1:
+ my_dataset = dset2;
+ dsetname = DSET2_NAME;
+ break;
+
+ case 2:
+ my_dataset = dset3;
+ dsetname = DSET3_NAME;
+ break;
+
+ default:
+ HDassert(0 && "Too many datasets!");
+ } /* end switch */
+
+ /* Create more attributes, to push into dense form */
+ for (u = max_compact; u < (max_compact * 2); u++) {
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr =
+ H5Acreate2(my_dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+#if 0
+ /* Verify state of object */
+ if (u >= max_compact) {
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, (new_format ? TRUE : FALSE), "H5O__is_attr_dense_test");
+ } /* end if */
+#endif
+ /* Verify information for new attribute */
+ ret = attr_info_by_idx_check(my_dataset, attrname, (hsize_t)u, use_index);
+ CHECK(ret, FAIL, "attr_info_by_idx_check");
+ } /* end for */
+#if 0
+ /* Verify state of object */
+ ret = H5O__num_attrs_test(my_dataset, &nattrs);
+ CHECK(ret, FAIL, "H5O__num_attrs_test");
+ VERIFY(nattrs, (max_compact * 2), "H5O__num_attrs_test");
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, (new_format ? TRUE : FALSE), "H5O__is_attr_dense_test");
+
+ if (new_format) {
+ /* Retrieve & verify # of records in the name & creation order indices */
+ ret = H5O__attr_dense_info_test(my_dataset, &name_count, &corder_count);
+ CHECK(ret, FAIL, "H5O__attr_dense_info_test");
+ if (use_index)
+ VERIFY(name_count, corder_count, "H5O__attr_dense_info_test");
+ VERIFY(name_count, (max_compact * 2), "H5O__attr_dense_info_test");
+ } /* end if */
+#endif
+#ifndef NO_ITERATION_RESTART
+ /* Check for out of bound iteration */
+ idx = u;
+ H5E_BEGIN_TRY
+ {
+ ret = H5Aiterate2(my_dataset, idx_type, order, &idx, attr_iterate2_cb, NULL);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Aiterate2");
+
+ idx = u;
+ H5E_BEGIN_TRY
+ {
+ ret = H5Aiterate_by_name(fid, dsetname, idx_type, order, &idx, attr_iterate2_cb, NULL,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Aiterate_by_name");
+
+ idx = u;
+ H5E_BEGIN_TRY
+ {
+ ret = H5Aiterate_by_name(my_dataset, ".", idx_type, order, &idx, attr_iterate2_cb,
+ NULL, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Aiterate_by_name");
+
+ /* Test iteration over attributes stored densely */
+ ret = attr_iterate_check(fid, dsetname, my_dataset, idx_type, order, u, &iter_info);
+ CHECK(ret, FAIL, "attr_iterate_check");
+#endif
+ } /* end for */
+
+ /* Close Datasets */
+ ret = H5Dclose(dset1);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset2);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset3);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ } /* end for */
+ } /* end for */
+ } /* end for */
+
+ /* Close property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Free the "visited link" array */
+ HDfree(visited);
+} /* test_attr_iterate2() */
+
+/*-------------------------------------------------------------------------
+ * Function: attr_open_by_idx_check
+ *
+ * Purpose: Check opening attribute by index on an object
+ *
+ * Return: Success: 0
+ * Failure: -1
+ *
+ * Programmer: Quincey Koziol
+ * Wednesday, February 21, 2007
+ *
+ *-------------------------------------------------------------------------
+ */
+static int
+attr_open_by_idx_check(hid_t obj_id, H5_index_t idx_type, H5_iter_order_t order, unsigned max_attrs)
+{
+ hid_t attr_id; /* ID of attribute to test */
+ H5A_info_t ainfo; /* Attribute info */
+ int old_nerrs; /* Number of errors when entering this check */
+ unsigned u; /* Local index variable */
+ herr_t ret; /* Generic return value */
+
+ /* Retrieve the current # of reported errors */
+ old_nerrs = nerrors;
+
+ /* Open each attribute on object by index and check that it's the correct one */
+ for (u = 0; u < max_attrs; u++) {
+ /* Open the attribute */
+ attr_id = H5Aopen_by_idx(obj_id, ".", idx_type, order, (hsize_t)u, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr_id, FAIL, "H5Aopen_by_idx");
+
+ /* Get the attribute's information */
+ ret = H5Aget_info(attr_id, &ainfo);
+ CHECK(ret, FAIL, "H5Aget_info");
+
+ /* Check that the object is the correct one */
+ if (order == H5_ITER_INC) {
+ VERIFY(ainfo.corder, u, "H5Aget_info");
+ } /* end if */
+ else if (order == H5_ITER_DEC) {
+ VERIFY(ainfo.corder, (max_attrs - (u + 1)), "H5Aget_info");
+ } /* end if */
+ else {
+ /* XXX: What to do about native order? */
+ } /* end else */
+
+ /* Close attribute */
+ ret = H5Aclose(attr_id);
+ CHECK(ret, FAIL, "H5Aclose");
+ } /* end for */
+
+ /* Retrieve current # of errors */
+ if (old_nerrs == nerrors)
+ return (0);
+ else
+ return (-1);
+} /* end attr_open_by_idx_check() */
+
+/****************************************************************
+**
+** test_attr_open_by_idx(): Test basic H5A (attribute) code.
+** Tests opening attributes by index
+**
+****************************************************************/
+static void
+test_attr_open_by_idx(hbool_t new_format, hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t dset1, dset2, dset3; /* Dataset IDs */
+ hid_t my_dataset; /* Current dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ unsigned max_compact; /* Maximum # of links to store in group compactly */
+ unsigned min_dense; /* Minimum # of links to store in group "densely" */
+#if 0
+ htri_t is_empty; /* Are there any attributes? */
+ htri_t is_dense; /* Are attributes stored densely? */
+ hsize_t nattrs; /* Number of attributes on object */
+ hsize_t name_count; /* # of records in name index */
+ hsize_t corder_count; /* # of records in creation order index */
+#endif
+ H5_index_t idx_type; /* Type of index to operate on */
+ H5_iter_order_t order; /* Order within in the index */
+ unsigned use_index; /* Use index on creation order values */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ unsigned curr_dset; /* Current dataset to work on */
+ unsigned u; /* Local index variable */
+ hid_t ret_id; /* Generic hid_t return value */
+ herr_t ret; /* Generic return value */
+
+ /* Create dataspace for dataset & attributes */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create dataset creation property list */
+ if (dcpl_g == H5P_DEFAULT) {
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ }
+ else {
+ dcpl = H5Pcopy(dcpl_g);
+ CHECK(dcpl, FAIL, "H5Pcopy");
+ }
+
+ /* Query the attribute creation properties */
+ ret = H5Pget_attr_phase_change(dcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+
+ /* Loop over operating on different indices on link fields */
+ for (idx_type = H5_INDEX_NAME; idx_type <= H5_INDEX_CRT_ORDER; idx_type++) {
+ /* Loop over operating in different orders */
+ for (order = H5_ITER_INC; order <= H5_ITER_DEC; order++) {
+ /* Loop over using index for creation order value */
+ for (use_index = FALSE; use_index <= TRUE; use_index++) {
+ /* Print appropriate test message */
+ if (idx_type == H5_INDEX_CRT_ORDER) {
+ if (order == H5_ITER_INC) {
+ if (use_index)
+ MESSAGE(5, ("Testing Opening Attributes By Creation Order Index in Increasing "
+ "Order w/Creation Order Index\n"))
+ else
+ MESSAGE(5, ("Testing Opening Attributes By Creation Order Index in Increasing "
+ "Order w/o Creation Order Index\n"))
+ } /* end if */
+ else {
+ if (use_index)
+ MESSAGE(5, ("Testing Opening Attributes By Creation Order Index in Decreasing "
+ "Order w/Creation Order Index\n"))
+ else
+ MESSAGE(5, ("Testing Opening Attributes By Creation Order Index in Decreasing "
+ "Order w/o Creation Order Index\n"))
+ } /* end else */
+ } /* end if */
+ else {
+ if (order == H5_ITER_INC) {
+ if (use_index)
+ MESSAGE(5, ("Testing Opening Attributes By Name Index in Increasing Order "
+ "w/Creation Order Index\n"))
+ else
+ MESSAGE(5, ("Testing Opening Attributes By Name Index in Increasing Order w/o "
+ "Creation Order Index\n"))
+ } /* end if */
+ else {
+#ifndef NO_DECREASING_ALPHA_ITER_ORDER
+ if (use_index)
+ MESSAGE(5, ("Testing Opening Attributes By Name Index in Decreasing Order "
+ "w/Creation Order Index\n"))
+ else
+ MESSAGE(5, ("Testing Opening Attributes By Name Index in Decreasing Order w/o "
+ "Creation Order Index\n"))
+#else
+ continue;
+#endif
+ } /* end else */
+ } /* end else */
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Set attribute creation order tracking & indexing for object */
+ if (new_format == TRUE) {
+ ret = H5Pset_attr_creation_order(
+ dcpl, (H5P_CRT_ORDER_TRACKED | (use_index ? H5P_CRT_ORDER_INDEXED : (unsigned)0)));
+ CHECK(ret, FAIL, "H5Pset_attr_creation_order");
+ } /* end if */
+
+ /* Create datasets */
+ dset1 = H5Dcreate2(fid, DSET1_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset1, FAIL, "H5Dcreate2");
+ dset2 = H5Dcreate2(fid, DSET2_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset2, FAIL, "H5Dcreate2");
+ dset3 = H5Dcreate2(fid, DSET3_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset3, FAIL, "H5Dcreate2");
+
+ /* Work on all the datasets */
+ for (curr_dset = 0; curr_dset < NUM_DSETS; curr_dset++) {
+ switch (curr_dset) {
+ case 0:
+ my_dataset = dset1;
+ break;
+
+ case 1:
+ my_dataset = dset2;
+ break;
+
+ case 2:
+ my_dataset = dset3;
+ break;
+
+ default:
+ HDassert(0 && "Too many datasets!");
+ } /* end switch */
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, TRUE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Check for opening an attribute on an object with no attributes */
+ H5E_BEGIN_TRY
+ {
+ ret_id = H5Aopen_by_idx(my_dataset, ".", idx_type, order, (hsize_t)0, H5P_DEFAULT,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret_id, FAIL, "H5Aopen_by_idx");
+
+ /* Create attributes, up to limit of compact form */
+ for (u = 0; u < max_compact; u++) {
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr =
+ H5Acreate2(my_dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Verify information for new attribute */
+ ret = attr_info_by_idx_check(my_dataset, attrname, (hsize_t)u, use_index);
+ CHECK(ret, FAIL, "attr_info_by_idx_check");
+ } /* end for */
+#if 0
+ /* Verify state of object */
+ ret = H5O__num_attrs_test(my_dataset, &nattrs);
+ CHECK(ret, FAIL, "H5O__num_attrs_test");
+ VERIFY(nattrs, max_compact, "H5O__num_attrs_test");
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Check for out of bound opening an attribute on an object */
+ H5E_BEGIN_TRY
+ {
+ ret_id = H5Aopen_by_idx(my_dataset, ".", idx_type, order, (hsize_t)u, H5P_DEFAULT,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret_id, FAIL, "H5Aopen_by_idx");
+
+ /* Test opening attributes by index stored compactly */
+ ret = attr_open_by_idx_check(my_dataset, idx_type, order, u);
+ CHECK(ret, FAIL, "attr_open_by_idx_check");
+ } /* end for */
+
+ /* Work on all the datasets */
+ for (curr_dset = 0; curr_dset < NUM_DSETS; curr_dset++) {
+ switch (curr_dset) {
+ case 0:
+ my_dataset = dset1;
+ break;
+
+ case 1:
+ my_dataset = dset2;
+ break;
+
+ case 2:
+ my_dataset = dset3;
+ break;
+
+ default:
+ HDassert(0 && "Too many datasets!");
+ } /* end switch */
+
+ /* Create more attributes, to push into dense form */
+ for (u = max_compact; u < (max_compact * 2); u++) {
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr =
+ H5Acreate2(my_dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+#if 0
+ /* Verify state of object */
+ if (u >= max_compact) {
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, (new_format ? TRUE : FALSE), "H5O__is_attr_dense_test");
+ } /* end if */
+#endif
+ /* Verify information for new attribute */
+ ret = attr_info_by_idx_check(my_dataset, attrname, (hsize_t)u, use_index);
+ CHECK(ret, FAIL, "attr_info_by_idx_check");
+ } /* end for */
+#if 0
+ /* Verify state of object */
+ ret = H5O__num_attrs_test(my_dataset, &nattrs);
+ CHECK(ret, FAIL, "H5O__num_attrs_test");
+ VERIFY(nattrs, (max_compact * 2), "H5O__num_attrs_test");
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, (new_format ? TRUE : FALSE), "H5O__is_attr_dense_test");
+
+ if (new_format) {
+ /* Retrieve & verify # of records in the name & creation order indices */
+ ret = H5O__attr_dense_info_test(my_dataset, &name_count, &corder_count);
+ CHECK(ret, FAIL, "H5O__attr_dense_info_test");
+ if (use_index)
+ VERIFY(name_count, corder_count, "H5O__attr_dense_info_test");
+ VERIFY(name_count, (max_compact * 2), "H5O__attr_dense_info_test");
+ } /* end if */
+#endif
+ /* Check for out of bound opening an attribute on an object */
+ H5E_BEGIN_TRY
+ {
+ ret_id = H5Aopen_by_idx(my_dataset, ".", idx_type, order, (hsize_t)u, H5P_DEFAULT,
+ H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret_id, FAIL, "H5Aopen_by_idx");
+
+ /* Test opening attributes by index stored compactly */
+ ret = attr_open_by_idx_check(my_dataset, idx_type, order, u);
+ CHECK(ret, FAIL, "attr_open_by_idx_check");
+ } /* end for */
+
+ /* Close Datasets */
+ ret = H5Dclose(dset1);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset2);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset3);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ } /* end for */
+ } /* end for */
+ } /* end for */
+
+ /* Close property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_attr_open_by_idx() */
+
+/*-------------------------------------------------------------------------
+ * Function: attr_open_check
+ *
+ * Purpose: Check opening attribute on an object
+ *
+ * Return: Success: 0
+ * Failure: -1
+ *
+ * Programmer: Quincey Koziol
+ * Wednesday, February 21, 2007
+ *
+ *-------------------------------------------------------------------------
+ */
+static int
+attr_open_check(hid_t fid, const char *dsetname, hid_t obj_id, unsigned max_attrs)
+{
+ hid_t attr_id; /* ID of attribute to test */
+ H5A_info_t ainfo; /* Attribute info */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ int old_nerrs; /* Number of errors when entering this check */
+ unsigned u; /* Local index variable */
+ herr_t ret; /* Generic return value */
+
+ /* Retrieve the current # of reported errors */
+ old_nerrs = nerrors;
+
+ /* Open each attribute on object by index and check that it's the correct one */
+ for (u = 0; u < max_attrs; u++) {
+ /* Open the attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr_id = H5Aopen(obj_id, attrname, H5P_DEFAULT);
+ CHECK(attr_id, FAIL, "H5Aopen");
+
+ /* Get the attribute's information */
+ ret = H5Aget_info(attr_id, &ainfo);
+ CHECK(ret, FAIL, "H5Aget_info");
+
+ /* Check that the object is the correct one */
+ VERIFY(ainfo.corder, u, "H5Aget_info");
+
+ /* Close attribute */
+ ret = H5Aclose(attr_id);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Open the attribute */
+ attr_id = H5Aopen_by_name(obj_id, ".", attrname, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr_id, FAIL, "H5Aopen_by_name");
+
+ /* Get the attribute's information */
+ ret = H5Aget_info(attr_id, &ainfo);
+ CHECK(ret, FAIL, "H5Aget_info");
+
+ /* Check that the object is the correct one */
+ VERIFY(ainfo.corder, u, "H5Aget_info");
+
+ /* Close attribute */
+ ret = H5Aclose(attr_id);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Open the attribute */
+ attr_id = H5Aopen_by_name(fid, dsetname, attrname, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr_id, FAIL, "H5Aopen_by_name");
+
+ /* Get the attribute's information */
+ ret = H5Aget_info(attr_id, &ainfo);
+ CHECK(ret, FAIL, "H5Aget_info");
+
+ /* Check that the object is the correct one */
+ VERIFY(ainfo.corder, u, "H5Aget_info");
+
+ /* Close attribute */
+ ret = H5Aclose(attr_id);
+ CHECK(ret, FAIL, "H5Aclose");
+ } /* end for */
+
+ /* Retrieve current # of errors */
+ if (old_nerrs == nerrors)
+ return (0);
+ else
+ return (-1);
+} /* end attr_open_check() */
+
+/****************************************************************
+**
+** test_attr_open_by_name(): Test basic H5A (attribute) code.
+** Tests opening attributes by name
+**
+****************************************************************/
+static void
+test_attr_open_by_name(hbool_t new_format, hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t dset1, dset2, dset3; /* Dataset IDs */
+ hid_t my_dataset; /* Current dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ unsigned max_compact; /* Maximum # of links to store in group compactly */
+ unsigned min_dense; /* Minimum # of links to store in group "densely" */
+#if 0
+ htri_t is_empty; /* Are there any attributes? */
+ htri_t is_dense; /* Are attributes stored densely? */
+ hsize_t nattrs; /* Number of attributes on object */
+ hsize_t name_count; /* # of records in name index */
+ hsize_t corder_count; /* # of records in creation order index */
+#endif
+ unsigned use_index; /* Use index on creation order values */
+ const char *dsetname; /* Name of dataset for attributes */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ unsigned curr_dset; /* Current dataset to work on */
+ unsigned u; /* Local index variable */
+ hid_t ret_id; /* Generic hid_t return value */
+ herr_t ret; /* Generic return value */
+
+ /* Create dataspace for dataset & attributes */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create dataset creation property list */
+ if (dcpl_g == H5P_DEFAULT) {
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ }
+ else {
+ dcpl = H5Pcopy(dcpl_g);
+ CHECK(dcpl, FAIL, "H5Pcopy");
+ }
+
+ /* Query the attribute creation properties */
+ ret = H5Pget_attr_phase_change(dcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+
+ /* Loop over using index for creation order value */
+ for (use_index = FALSE; use_index <= TRUE; use_index++) {
+ /* Print appropriate test message */
+ if (use_index)
+ MESSAGE(5, ("Testing Opening Attributes By Name w/Creation Order Index\n"))
+ else
+ MESSAGE(5, ("Testing Opening Attributes By Name w/o Creation Order Index\n"))
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Set attribute creation order tracking & indexing for object */
+ if (new_format == TRUE) {
+ ret = H5Pset_attr_creation_order(
+ dcpl, (H5P_CRT_ORDER_TRACKED | (use_index ? H5P_CRT_ORDER_INDEXED : (unsigned)0)));
+ CHECK(ret, FAIL, "H5Pset_attr_creation_order");
+ } /* end if */
+
+ /* Create datasets */
+ dset1 = H5Dcreate2(fid, DSET1_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset1, FAIL, "H5Dcreate2");
+ dset2 = H5Dcreate2(fid, DSET2_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset2, FAIL, "H5Dcreate2");
+ dset3 = H5Dcreate2(fid, DSET3_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset3, FAIL, "H5Dcreate2");
+
+ /* Work on all the datasets */
+ for (curr_dset = 0; curr_dset < NUM_DSETS; curr_dset++) {
+ switch (curr_dset) {
+ case 0:
+ my_dataset = dset1;
+ dsetname = DSET1_NAME;
+ break;
+
+ case 1:
+ my_dataset = dset2;
+ dsetname = DSET2_NAME;
+ break;
+
+ case 2:
+ my_dataset = dset3;
+ dsetname = DSET3_NAME;
+ break;
+
+ default:
+ HDassert(0 && "Too many datasets!");
+ } /* end switch */
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, TRUE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Check for opening a non-existent attribute on an object with no attributes */
+ H5E_BEGIN_TRY
+ {
+ ret_id = H5Aopen(my_dataset, "foo", H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret_id, FAIL, "H5Aopen");
+
+ H5E_BEGIN_TRY
+ {
+ ret_id = H5Aopen_by_name(my_dataset, ".", "foo", H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret_id, FAIL, "H5Aopen_by_name");
+
+ H5E_BEGIN_TRY
+ {
+ ret_id = H5Aopen_by_name(fid, dsetname, "foo", H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret_id, FAIL, "H5Aopen_by_name");
+
+ /* Create attributes, up to limit of compact form */
+ for (u = 0; u < max_compact; u++) {
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Acreate2(my_dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Verify information for new attribute */
+ ret = attr_info_by_idx_check(my_dataset, attrname, (hsize_t)u, use_index);
+ CHECK(ret, FAIL, "attr_info_by_idx_check");
+ } /* end for */
+#if 0
+ /* Verify state of object */
+ ret = H5O__num_attrs_test(my_dataset, &nattrs);
+ CHECK(ret, FAIL, "H5O__num_attrs_test");
+ VERIFY(nattrs, max_compact, "H5O__num_attrs_test");
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Check for opening a non-existent attribute on an object with compact attribute storage */
+ H5E_BEGIN_TRY
+ {
+ ret_id = H5Aopen(my_dataset, "foo", H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret_id, FAIL, "H5Aopen");
+
+ H5E_BEGIN_TRY
+ {
+ ret_id = H5Aopen_by_name(my_dataset, ".", "foo", H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret_id, FAIL, "H5Aopen_by_name");
+
+ H5E_BEGIN_TRY
+ {
+ ret_id = H5Aopen_by_name(fid, dsetname, "foo", H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret_id, FAIL, "H5Aopen_by_name");
+
+ /* Test opening attributes stored compactly */
+ ret = attr_open_check(fid, dsetname, my_dataset, u);
+ CHECK(ret, FAIL, "attr_open_check");
+ } /* end for */
+
+ /* Work on all the datasets */
+ for (curr_dset = 0; curr_dset < NUM_DSETS; curr_dset++) {
+ switch (curr_dset) {
+ case 0:
+ my_dataset = dset1;
+ dsetname = DSET1_NAME;
+ break;
+
+ case 1:
+ my_dataset = dset2;
+ dsetname = DSET2_NAME;
+ break;
+
+ case 2:
+ my_dataset = dset3;
+ dsetname = DSET3_NAME;
+ break;
+
+ default:
+ HDassert(0 && "Too many datasets!");
+ } /* end switch */
+
+ /* Create more attributes, to push into dense form */
+ for (u = max_compact; u < (max_compact * 2); u++) {
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Acreate2(my_dataset, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+#if 0
+ /* Verify state of object */
+ if (u >= max_compact) {
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, (new_format ? TRUE : FALSE), "H5O__is_attr_dense_test");
+ } /* end if */
+#endif
+ /* Verify information for new attribute */
+ ret = attr_info_by_idx_check(my_dataset, attrname, (hsize_t)u, use_index);
+ CHECK(ret, FAIL, "attr_info_by_idx_check");
+ } /* end for */
+#if 0
+ /* Verify state of object */
+ ret = H5O__num_attrs_test(my_dataset, &nattrs);
+ CHECK(ret, FAIL, "H5O__num_attrs_test");
+ VERIFY(nattrs, (max_compact * 2), "H5O__num_attrs_test");
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, (new_format ? TRUE : FALSE), "H5O__is_attr_dense_test");
+
+ if (new_format) {
+ /* Retrieve & verify # of records in the name & creation order indices */
+ ret = H5O__attr_dense_info_test(my_dataset, &name_count, &corder_count);
+ CHECK(ret, FAIL, "H5O__attr_dense_info_test");
+ if (use_index)
+ VERIFY(name_count, corder_count, "H5O__attr_dense_info_test");
+ VERIFY(name_count, (max_compact * 2), "H5O__attr_dense_info_test");
+ } /* end if */
+#endif
+ /* Check for opening a non-existent attribute on an object with dense attribute storage */
+ H5E_BEGIN_TRY
+ {
+ ret_id = H5Aopen(my_dataset, "foo", H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret_id, FAIL, "H5Aopen");
+
+ H5E_BEGIN_TRY
+ {
+ ret_id = H5Aopen_by_name(my_dataset, ".", "foo", H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret_id, FAIL, "H5Aopen_by_name");
+
+ H5E_BEGIN_TRY
+ {
+ ret_id = H5Aopen_by_name(fid, dsetname, "foo", H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret_id, FAIL, "H5Aopen_by_name");
+
+ /* Test opening attributes stored compactly */
+ ret = attr_open_check(fid, dsetname, my_dataset, u);
+ CHECK(ret, FAIL, "attr_open_check");
+ } /* end for */
+
+ /* Close Datasets */
+ ret = H5Dclose(dset1);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset2);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset3);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ } /* end for */
+
+ /* Close property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_attr_open_by_name() */
+
+/****************************************************************
+**
+** test_attr_create_by_name(): Test basic H5A (attribute) code.
+** Tests creating attributes by name
+**
+****************************************************************/
+static void
+test_attr_create_by_name(hbool_t new_format, hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t dset1, dset2, dset3; /* Dataset IDs */
+ hid_t my_dataset; /* Current dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ unsigned max_compact; /* Maximum # of links to store in group compactly */
+ unsigned min_dense; /* Minimum # of links to store in group "densely" */
+#if 0
+ htri_t is_empty; /* Are there any attributes? */
+ htri_t is_dense; /* Are attributes stored densely? */
+ hsize_t nattrs; /* Number of attributes on object */
+ hsize_t name_count; /* # of records in name index */
+ hsize_t corder_count; /* # of records in creation order index */
+#endif
+ unsigned use_index; /* Use index on creation order values */
+ const char *dsetname; /* Name of dataset for attributes */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ unsigned curr_dset; /* Current dataset to work on */
+ unsigned u; /* Local index variable */
+ herr_t ret; /* Generic return value */
+
+ /* Create dataspace for dataset & attributes */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create dataset creation property list */
+ if (dcpl_g == H5P_DEFAULT) {
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ }
+ else {
+ dcpl = H5Pcopy(dcpl_g);
+ CHECK(dcpl, FAIL, "H5Pcopy");
+ }
+
+ /* Query the attribute creation properties */
+ ret = H5Pget_attr_phase_change(dcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+
+ /* Loop over using index for creation order value */
+ for (use_index = FALSE; use_index <= TRUE; use_index++) {
+ /* Print appropriate test message */
+ if (use_index)
+ MESSAGE(5, ("Testing Creating Attributes By Name w/Creation Order Index\n"))
+ else
+ MESSAGE(5, ("Testing Creating Attributes By Name w/o Creation Order Index\n"))
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Set attribute creation order tracking & indexing for object */
+ if (new_format == TRUE) {
+ ret = H5Pset_attr_creation_order(
+ dcpl, (H5P_CRT_ORDER_TRACKED | (use_index ? H5P_CRT_ORDER_INDEXED : (unsigned)0)));
+ CHECK(ret, FAIL, "H5Pset_attr_creation_order");
+ } /* end if */
+
+ /* Create datasets */
+ dset1 = H5Dcreate2(fid, DSET1_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset1, FAIL, "H5Dcreate2");
+ dset2 = H5Dcreate2(fid, DSET2_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset2, FAIL, "H5Dcreate2");
+ dset3 = H5Dcreate2(fid, DSET3_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset3, FAIL, "H5Dcreate2");
+
+ /* Work on all the datasets */
+ for (curr_dset = 0; curr_dset < NUM_DSETS; curr_dset++) {
+ switch (curr_dset) {
+ case 0:
+ my_dataset = dset1;
+ dsetname = DSET1_NAME;
+ break;
+
+ case 1:
+ my_dataset = dset2;
+ dsetname = DSET2_NAME;
+ break;
+
+ case 2:
+ my_dataset = dset3;
+ dsetname = DSET3_NAME;
+ break;
+
+ default:
+ HDassert(0 && "Too many datasets!");
+ } /* end switch */
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, TRUE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Create attributes, up to limit of compact form */
+ for (u = 0; u < max_compact; u++) {
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Acreate_by_name(fid, dsetname, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate_by_name");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Verify information for new attribute */
+ ret = attr_info_by_idx_check(my_dataset, attrname, (hsize_t)u, use_index);
+ CHECK(ret, FAIL, "attr_info_by_idx_check");
+ } /* end for */
+#if 0
+ /* Verify state of object */
+ ret = H5O__num_attrs_test(my_dataset, &nattrs);
+ CHECK(ret, FAIL, "H5O__num_attrs_test");
+ VERIFY(nattrs, max_compact, "H5O__num_attrs_test");
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Test opening attributes stored compactly */
+ ret = attr_open_check(fid, dsetname, my_dataset, u);
+ CHECK(ret, FAIL, "attr_open_check");
+ } /* end for */
+
+ /* Work on all the datasets */
+ for (curr_dset = 0; curr_dset < NUM_DSETS; curr_dset++) {
+ switch (curr_dset) {
+ case 0:
+ my_dataset = dset1;
+ dsetname = DSET1_NAME;
+ break;
+
+ case 1:
+ my_dataset = dset2;
+ dsetname = DSET2_NAME;
+ break;
+
+ case 2:
+ my_dataset = dset3;
+ dsetname = DSET3_NAME;
+ break;
+
+ default:
+ HDassert(0 && "Too many datasets!");
+ } /* end switch */
+
+ /* Create more attributes, to push into dense form */
+ for (u = max_compact; u < (max_compact * 2); u++) {
+ /* Create attribute */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+ attr = H5Acreate_by_name(fid, dsetname, attrname, H5T_NATIVE_UINT, sid, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate_by_name");
+
+ /* Write data into the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &u);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+#if 0
+ /* Verify state of object */
+ if (u >= max_compact) {
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, (new_format ? TRUE : FALSE), "H5O__is_attr_dense_test");
+ } /* end if */
+#endif
+ /* Verify information for new attribute */
+ ret = attr_info_by_idx_check(my_dataset, attrname, (hsize_t)u, use_index);
+ CHECK(ret, FAIL, "attr_info_by_idx_check");
+ } /* end for */
+#if 0
+ /* Verify state of object */
+ ret = H5O__num_attrs_test(my_dataset, &nattrs);
+ CHECK(ret, FAIL, "H5O__num_attrs_test");
+ VERIFY(nattrs, (max_compact * 2), "H5O__num_attrs_test");
+ is_empty = H5O__is_attr_empty_test(my_dataset);
+ VERIFY(is_empty, FALSE, "H5O__is_attr_empty_test");
+ is_dense = H5O__is_attr_dense_test(my_dataset);
+ VERIFY(is_dense, (new_format ? TRUE : FALSE), "H5O__is_attr_dense_test");
+
+ if (new_format) {
+ /* Retrieve & verify # of records in the name & creation order indices */
+ ret = H5O__attr_dense_info_test(my_dataset, &name_count, &corder_count);
+ CHECK(ret, FAIL, "H5O__attr_dense_info_test");
+ if (use_index)
+ VERIFY(name_count, corder_count, "H5O__attr_dense_info_test");
+ VERIFY(name_count, (max_compact * 2), "H5O__attr_dense_info_test");
+ } /* end if */
+#endif
+ /* Test opening attributes stored compactly */
+ ret = attr_open_check(fid, dsetname, my_dataset, u);
+ CHECK(ret, FAIL, "attr_open_check");
+ } /* end for */
+
+ /* Close Datasets */
+ ret = H5Dclose(dset1);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset2);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset3);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ } /* end for */
+
+ /* Close property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_attr_create_by_name() */
+
+/****************************************************************
+**
+** test_attr_shared_write(): Test basic H5A (attribute) code.
+** Tests writing mix of shared & un-shared attributes in "compact" & "dense" storage
+**
+****************************************************************/
+static void
+test_attr_shared_write(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* File ID */
+ hid_t my_fcpl; /* File creation property list ID */
+ hid_t dataset, dataset2; /* Dataset IDs */
+ hid_t attr_tid; /* Attribute's datatype ID */
+ hid_t sid, big_sid; /* Dataspace IDs */
+ hsize_t big_dims[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3}; /* Dimensions for "big" attribute */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ unsigned max_compact; /* Maximum # of attributes to store compactly */
+ unsigned min_dense; /* Minimum # of attributes to store "densely" */
+#if 0
+ htri_t is_dense; /* Are attributes stored densely? */
+ htri_t is_shared; /* Is attributes shared? */
+ hsize_t shared_refcount; /* Reference count of shared attribute */
+#endif
+ unsigned attr_value; /* Attribute value */
+ unsigned *big_value; /* Data for "big" attribute */
+#if 0
+ size_t mesg_count; /* # of shared messages */
+#endif
+ unsigned test_shared; /* Index over shared component type */
+ unsigned u; /* Local index variable */
+#if 0
+ h5_stat_size_t empty_filesize; /* Size of empty file */
+ h5_stat_size_t filesize; /* Size of file after modifications */
+#endif
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Writing Shared & Unshared Attributes in Compact & Dense Storage\n"));
+
+ /* Allocate & initialize "big" attribute data */
+ big_value = (unsigned *)HDmalloc((size_t)(SPACE1_DIM1 * SPACE1_DIM2 * SPACE1_DIM3) * sizeof(unsigned));
+ CHECK_PTR(big_value, "HDmalloc");
+ HDmemset(big_value, 1, sizeof(unsigned) * (size_t)(SPACE1_DIM1 * SPACE1_DIM2 * SPACE1_DIM3));
+
+ /* Create dataspace for dataset */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create "big" dataspace for "large" attributes */
+ big_sid = H5Screate_simple(SPACE1_RANK, big_dims, NULL);
+ CHECK(big_sid, FAIL, "H5Screate_simple");
+
+ /* Loop over type of shared components */
+ for (test_shared = 0; test_shared < 3; test_shared++) {
+ /* Make copy of file creation property list */
+ my_fcpl = H5Pcopy(fcpl);
+ CHECK(my_fcpl, FAIL, "H5Pcopy");
+
+ /* Set up datatype for attributes */
+ attr_tid = H5Tcopy(H5T_NATIVE_UINT);
+ CHECK(attr_tid, FAIL, "H5Tcopy");
+
+ /* Special setup for each type of shared components */
+ if (test_shared == 0) {
+ /* Make attributes > 500 bytes shared */
+ ret = H5Pset_shared_mesg_nindexes(my_fcpl, (unsigned)1);
+ CHECK_I(ret, "H5Pset_shared_mesg_nindexes");
+ ret = H5Pset_shared_mesg_index(my_fcpl, (unsigned)0, H5O_SHMESG_ATTR_FLAG, (unsigned)500);
+ CHECK_I(ret, "H5Pset_shared_mesg_index");
+ } /* end if */
+ else {
+ /* Set up copy of file creation property list */
+
+ ret = H5Pset_shared_mesg_nindexes(my_fcpl, (unsigned)3);
+ CHECK_I(ret, "H5Pset_shared_mesg_nindexes");
+
+ /* Make attributes > 500 bytes shared */
+ ret = H5Pset_shared_mesg_index(my_fcpl, (unsigned)0, H5O_SHMESG_ATTR_FLAG, (unsigned)500);
+ CHECK_I(ret, "H5Pset_shared_mesg_index");
+
+ /* Make datatypes & dataspaces > 1 byte shared (i.e. all of them :-) */
+ ret = H5Pset_shared_mesg_index(my_fcpl, (unsigned)1, H5O_SHMESG_DTYPE_FLAG, (unsigned)1);
+ CHECK_I(ret, "H5Pset_shared_mesg_index");
+ ret = H5Pset_shared_mesg_index(my_fcpl, (unsigned)2, H5O_SHMESG_SDSPACE_FLAG, (unsigned)1);
+ CHECK_I(ret, "H5Pset_shared_mesg_index");
+ } /* end else */
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, my_fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Close FCPL copy */
+ ret = H5Pclose(my_fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+#if 0
+ /* Get size of file */
+ empty_filesize = h5_get_file_size(FILENAME, fapl);
+ if (empty_filesize < 0)
+ TestErrPrintf("Line %d: file size wrong!\n", __LINE__);
+#endif
+
+ /* Re-open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Commit datatype to file */
+ if (test_shared == 2) {
+ ret = H5Tcommit2(fid, TYPE1_NAME, attr_tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+ } /* end if */
+
+ /* Set up to query the object creation properties */
+ if (dcpl_g == H5P_DEFAULT) {
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ }
+ else {
+ dcpl = H5Pcopy(dcpl_g);
+ CHECK(dcpl, FAIL, "H5Pcopy");
+ }
+
+ /* Create datasets */
+ dataset = H5Dcreate2(fid, DSET1_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+ dataset2 = H5Dcreate2(fid, DSET2_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dataset2, FAIL, "H5Dcreate2");
+
+ /* Check on dataset's message storage status */
+ if (test_shared != 0) {
+#if 0
+ /* Datasets' datatypes can be shared */
+ ret = H5F__get_sohm_mesg_count_test(fid, H5O_DTYPE_ID, &mesg_count);
+ CHECK(ret, FAIL, "H5F__get_sohm_mesg_count_test");
+ VERIFY(mesg_count, 1, "H5F__get_sohm_mesg_count_test");
+
+ /* Datasets' dataspace can be shared */
+ ret = H5F__get_sohm_mesg_count_test(fid, H5O_SDSPACE_ID, &mesg_count);
+ CHECK(ret, FAIL, "H5F__get_sohm_mesg_count_test");
+ VERIFY(mesg_count, 1, "H5F__get_sohm_mesg_count_test");
+#endif
+ } /* end if */
+
+ /* Retrieve limits for compact/dense attribute storage */
+ ret = H5Pget_attr_phase_change(dcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+
+ /* Close property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+#if 0
+ /* Check on datasets' attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+ is_dense = H5O__is_attr_dense_test(dataset2);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Add attributes to each dataset, until after converting to dense storage */
+ for (u = 0; u < max_compact * 2; u++) {
+ /* Create attribute name */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+
+ /* Alternate between creating "small" & "big" attributes */
+ if (u % 2) {
+ /* Create "small" attribute on first dataset */
+ attr = H5Acreate2(dataset, attrname, attr_tid, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+#if 0
+ /* Check that attribute is not shared */
+ is_shared = H5A__is_shared_test(attr);
+ VERIFY(is_shared, FALSE, "H5A__is_shared_test");
+#endif
+ /* Write data into the attribute */
+ attr_value = u + 1;
+ ret = H5Awrite(attr, attr_tid, &attr_value);
+ CHECK(ret, FAIL, "H5Awrite");
+ } /* end if */
+ else {
+ /* Create "big" attribute on first dataset */
+ attr = H5Acreate2(dataset, attrname, attr_tid, big_sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+#if 0
+ /* Check that attribute is shared */
+ is_shared = H5A__is_shared_test(attr);
+ VERIFY(is_shared, TRUE, "H5A__is_shared_test");
+
+ /* Check refcount for attribute */
+ ret = H5A__get_shared_rc_test(attr, &shared_refcount);
+ CHECK(ret, FAIL, "H5A__get_shared_rc_test");
+ VERIFY(shared_refcount, 1, "H5A__get_shared_rc_test");
+#endif
+ /* Write data into the attribute */
+ big_value[0] = u + 1;
+ ret = H5Awrite(attr, attr_tid, big_value);
+ CHECK(ret, FAIL, "H5Awrite");
+#if 0
+ /* Check refcount for attribute */
+ ret = H5A__get_shared_rc_test(attr, &shared_refcount);
+ CHECK(ret, FAIL, "H5A__get_shared_rc_test");
+ VERIFY(shared_refcount, 1, "H5A__get_shared_rc_test");
+#endif
+ } /* end else */
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ if (u < max_compact)
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+ else
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+#endif
+
+ /* Alternate between creating "small" & "big" attributes */
+ if (u % 2) {
+ /* Create "small" attribute on second dataset */
+ attr = H5Acreate2(dataset2, attrname, attr_tid, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+#if 0
+ /* Check that attribute is not shared */
+ is_shared = H5A__is_shared_test(attr);
+ VERIFY(is_shared, FALSE, "H5A__is_shared_test");
+#endif
+ /* Write data into the attribute */
+ attr_value = u + 1;
+ ret = H5Awrite(attr, attr_tid, &attr_value);
+ CHECK(ret, FAIL, "H5Awrite");
+ } /* end if */
+ else {
+ /* Create "big" attribute on second dataset */
+ attr = H5Acreate2(dataset2, attrname, attr_tid, big_sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+#if 0
+ /* Check that attribute is shared */
+ is_shared = H5A__is_shared_test(attr);
+ VERIFY(is_shared, TRUE, "H5A__is_shared_test");
+
+ /* Check refcount for attribute */
+ ret = H5A__get_shared_rc_test(attr, &shared_refcount);
+ CHECK(ret, FAIL, "H5A__get_shared_rc_test");
+ VERIFY(shared_refcount, 1, "H5A__get_shared_rc_test");
+#endif
+ /* Write data into the attribute */
+ big_value[0] = u + 1;
+ ret = H5Awrite(attr, attr_tid, big_value);
+ CHECK(ret, FAIL, "H5Awrite");
+#if 0
+ /* Check refcount for attribute */
+ ret = H5A__get_shared_rc_test(attr, &shared_refcount);
+ CHECK(ret, FAIL, "H5A__get_shared_rc_test");
+ VERIFY(shared_refcount, 2, "H5A__get_shared_rc_test");
+#endif
+ } /* end else */
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset2);
+ if (u < max_compact)
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+ else
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+#endif
+ } /* end for */
+
+ /* Close attribute's datatype */
+ ret = H5Tclose(attr_tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close Datasets */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dataset2);
+ CHECK(ret, FAIL, "H5Dclose");
+#if 0
+ /* Check on shared message status now */
+ if (test_shared != 0) {
+ if (test_shared == 1) {
+ /* Check on datatype storage status */
+ ret = H5F__get_sohm_mesg_count_test(fid, H5O_DTYPE_ID, &mesg_count);
+ CHECK(ret, FAIL, "H5F__get_sohm_mesg_count_test");
+ VERIFY(mesg_count, 2, "H5F__get_sohm_mesg_count_test");
+ } /* end if */
+
+ /* Check on dataspace storage status */
+ ret = H5F__get_sohm_mesg_count_test(fid, H5O_SDSPACE_ID, &mesg_count);
+ CHECK(ret, FAIL, "H5F__get_sohm_mesg_count_test");
+ VERIFY(mesg_count, 2, "H5F__get_sohm_mesg_count_test");
+ } /* end if */
+#endif
+ /* Unlink datasets with attributes */
+ ret = H5Ldelete(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+ ret = H5Ldelete(fid, DSET2_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ /* Unlink committed datatype */
+ if (test_shared == 2) {
+ ret = H5Ldelete(fid, TYPE1_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+ } /* end if */
+#if 0
+ /* Check on attribute storage status */
+ ret = H5F__get_sohm_mesg_count_test(fid, H5O_ATTR_ID, &mesg_count);
+ CHECK(ret, FAIL, "H5F__get_sohm_mesg_count_test");
+ VERIFY(mesg_count, 0, "H5F__get_sohm_mesg_count_test");
+
+ if (test_shared != 0) {
+ /* Check on datatype storage status */
+ ret = H5F__get_sohm_mesg_count_test(fid, H5O_DTYPE_ID, &mesg_count);
+ CHECK(ret, FAIL, "H5F__get_sohm_mesg_count_test");
+ VERIFY(mesg_count, 0, "H5F__get_sohm_mesg_count_test");
+
+ /* Check on dataspace storage status */
+ ret = H5F__get_sohm_mesg_count_test(fid, H5O_SDSPACE_ID, &mesg_count);
+ CHECK(ret, FAIL, "H5F__get_sohm_mesg_count_test");
+ VERIFY(mesg_count, 0, "H5F__get_sohm_mesg_count_test");
+ } /* end if */
+#endif
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+#if 0
+ if (h5_using_default_driver(NULL)) {
+ /* Check size of file */
+ filesize = h5_get_file_size(FILENAME, fapl);
+ VERIFY(filesize, empty_filesize, "h5_get_file_size");
+ }
+#endif
+ } /* end for */
+
+ /* Close dataspaces */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(big_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Release memory */
+ HDfree(big_value);
+} /* test_attr_shared_write() */
+
+/****************************************************************
+**
+** test_attr_shared_rename(): Test basic H5A (attribute) code.
+** Tests renaming shared attributes in "compact" & "dense" storage
+**
+****************************************************************/
+static void
+test_attr_shared_rename(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t my_fcpl; /* File creation property list ID */
+ hid_t dataset, dataset2; /* Dataset ID2 */
+ hid_t attr_tid; /* Attribute's datatype ID */
+ hid_t sid, big_sid; /* Dataspace IDs */
+ hsize_t big_dims[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3}; /* Dimensions for "big" attribute */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute on first dataset */
+ char attrname2[NAME_BUF_SIZE]; /* Name of attribute on second dataset */
+ unsigned max_compact; /* Maximum # of attributes to store compactly */
+ unsigned min_dense; /* Minimum # of attributes to store "densely" */
+#if 0
+ htri_t is_dense; /* Are attributes stored densely? */
+ htri_t is_shared; /* Is attributes shared? */
+ hsize_t shared_refcount; /* Reference count of shared attribute */
+#endif
+ unsigned attr_value; /* Attribute value */
+ unsigned *big_value; /* Data for "big" attribute */
+#if 0
+ size_t mesg_count; /* # of shared messages */
+#endif
+ unsigned test_shared; /* Index over shared component type */
+ unsigned u; /* Local index variable */
+#if 0
+ h5_stat_size_t empty_filesize; /* Size of empty file */
+ h5_stat_size_t filesize; /* Size of file after modifications */
+#endif
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Renaming Shared & Unshared Attributes in Compact & Dense Storage\n"));
+
+ /* Allocate & initialize "big" attribute data */
+ big_value = (unsigned *)HDmalloc((size_t)(SPACE1_DIM1 * SPACE1_DIM2 * SPACE1_DIM3) * sizeof(unsigned));
+ CHECK_PTR(big_value, "HDmalloc");
+ HDmemset(big_value, 1, sizeof(unsigned) * (size_t)(SPACE1_DIM1 * SPACE1_DIM2 * SPACE1_DIM3));
+
+ /* Create dataspace for dataset */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create "big" dataspace for "large" attributes */
+ big_sid = H5Screate_simple(SPACE1_RANK, big_dims, NULL);
+ CHECK(big_sid, FAIL, "H5Screate_simple");
+
+ /* Loop over type of shared components */
+ for (test_shared = 0; test_shared < 3; test_shared++) {
+ /* Make copy of file creation property list */
+ my_fcpl = H5Pcopy(fcpl);
+ CHECK(my_fcpl, FAIL, "H5Pcopy");
+
+ /* Set up datatype for attributes */
+ attr_tid = H5Tcopy(H5T_NATIVE_UINT);
+ CHECK(attr_tid, FAIL, "H5Tcopy");
+
+ /* Special setup for each type of shared components */
+ if (test_shared == 0) {
+ /* Make attributes > 500 bytes shared */
+ ret = H5Pset_shared_mesg_nindexes(my_fcpl, (unsigned)1);
+ CHECK_I(ret, "H5Pset_shared_mesg_nindexes");
+ ret = H5Pset_shared_mesg_index(my_fcpl, (unsigned)0, H5O_SHMESG_ATTR_FLAG, (unsigned)500);
+ CHECK_I(ret, "H5Pset_shared_mesg_index");
+ } /* end if */
+ else {
+ /* Set up copy of file creation property list */
+
+ ret = H5Pset_shared_mesg_nindexes(my_fcpl, (unsigned)3);
+ CHECK_I(ret, "H5Pset_shared_mesg_nindexes");
+
+ /* Make attributes > 500 bytes shared */
+ ret = H5Pset_shared_mesg_index(my_fcpl, (unsigned)0, H5O_SHMESG_ATTR_FLAG, (unsigned)500);
+ CHECK_I(ret, "H5Pset_shared_mesg_index");
+
+ /* Make datatypes & dataspaces > 1 byte shared (i.e. all of them :-) */
+ ret = H5Pset_shared_mesg_index(my_fcpl, (unsigned)1, H5O_SHMESG_DTYPE_FLAG, (unsigned)1);
+ CHECK_I(ret, "H5Pset_shared_mesg_index");
+ ret = H5Pset_shared_mesg_index(my_fcpl, (unsigned)2, H5O_SHMESG_SDSPACE_FLAG, (unsigned)1);
+ CHECK_I(ret, "H5Pset_shared_mesg_index");
+ } /* end else */
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, my_fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Close FCPL copy */
+ ret = H5Pclose(my_fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+#if 0
+ /* Get size of file */
+ empty_filesize = h5_get_file_size(FILENAME, fapl);
+ if (empty_filesize < 0)
+ TestErrPrintf("Line %d: file size wrong!\n", __LINE__);
+#endif
+
+ /* Re-open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Commit datatype to file */
+ if (test_shared == 2) {
+ ret = H5Tcommit2(fid, TYPE1_NAME, attr_tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+ } /* end if */
+
+ /* Set up to query the object creation properties */
+ if (dcpl_g == H5P_DEFAULT) {
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ }
+ else {
+ dcpl = H5Pcopy(dcpl_g);
+ CHECK(dcpl, FAIL, "H5Pcopy");
+ }
+
+ /* Create datasets */
+ dataset = H5Dcreate2(fid, DSET1_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+ dataset2 = H5Dcreate2(fid, DSET2_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dataset2, FAIL, "H5Dcreate2");
+#if 0
+ /* Check on dataset's message storage status */
+ if (test_shared != 0) {
+ /* Datasets' datatypes can be shared */
+ ret = H5F__get_sohm_mesg_count_test(fid, H5O_DTYPE_ID, &mesg_count);
+ CHECK(ret, FAIL, "H5F__get_sohm_mesg_count_test");
+ VERIFY(mesg_count, 1, "H5F__get_sohm_mesg_count_test");
+
+ /* Datasets' dataspace can be shared */
+ ret = H5F__get_sohm_mesg_count_test(fid, H5O_SDSPACE_ID, &mesg_count);
+ CHECK(ret, FAIL, "H5F__get_sohm_mesg_count_test");
+ VERIFY(mesg_count, 1, "H5F__get_sohm_mesg_count_test");
+ } /* end if */
+#endif
+ /* Retrieve limits for compact/dense attribute storage */
+ ret = H5Pget_attr_phase_change(dcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+
+ /* Close property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+#if 0
+ /* Check on datasets' attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+ is_dense = H5O__is_attr_dense_test(dataset2);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Add attributes to each dataset, until after converting to dense storage */
+ for (u = 0; u < max_compact * 2; u++) {
+ /* Create attribute name */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+
+ /* Alternate between creating "small" & "big" attributes */
+ if (u % 2) {
+ /* Create "small" attribute on first dataset */
+ attr = H5Acreate2(dataset, attrname, attr_tid, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+#if 0
+ /* Check that attribute is not shared */
+ is_shared = H5A__is_shared_test(attr);
+ VERIFY(is_shared, FALSE, "H5A__is_shared_test");
+#endif
+ /* Write data into the attribute */
+ attr_value = u + 1;
+ ret = H5Awrite(attr, attr_tid, &attr_value);
+ CHECK(ret, FAIL, "H5Awrite");
+ } /* end if */
+ else {
+ /* Create "big" attribute on first dataset */
+ attr = H5Acreate2(dataset, attrname, attr_tid, big_sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+#if 0
+ /* Check that attribute is shared */
+ is_shared = H5A__is_shared_test(attr);
+ VERIFY(is_shared, TRUE, "H5A__is_shared_test");
+
+ /* Check refcount for attribute */
+ ret = H5A__get_shared_rc_test(attr, &shared_refcount);
+ CHECK(ret, FAIL, "H5A__get_shared_rc_test");
+ VERIFY(shared_refcount, 1, "H5A__get_shared_rc_test");
+#endif
+ /* Write data into the attribute */
+ big_value[0] = u + 1;
+ ret = H5Awrite(attr, attr_tid, big_value);
+ CHECK(ret, FAIL, "H5Awrite");
+#if 0
+ /* Check refcount for attribute */
+ ret = H5A__get_shared_rc_test(attr, &shared_refcount);
+ CHECK(ret, FAIL, "H5A__get_shared_rc_test");
+ VERIFY(shared_refcount, 1, "H5A__get_shared_rc_test");
+#endif
+ } /* end else */
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ if (u < max_compact)
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+ else
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+#endif
+
+ /* Alternate between creating "small" & "big" attributes */
+ if (u % 2) {
+ /* Create "small" attribute on second dataset */
+ attr = H5Acreate2(dataset2, attrname, attr_tid, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+#if 0
+ /* Check that attribute is not shared */
+ is_shared = H5A__is_shared_test(attr);
+ VERIFY(is_shared, FALSE, "H5A__is_shared_test");
+#endif
+ /* Write data into the attribute */
+ attr_value = u + 1;
+ ret = H5Awrite(attr, attr_tid, &attr_value);
+ CHECK(ret, FAIL, "H5Awrite");
+ } /* end if */
+ else {
+ /* Create "big" attribute on second dataset */
+ attr = H5Acreate2(dataset2, attrname, attr_tid, big_sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+#if 0
+ /* Check that attribute is shared */
+ is_shared = H5A__is_shared_test(attr);
+ VERIFY(is_shared, TRUE, "H5A__is_shared_test");
+
+ /* Check refcount for attribute */
+ ret = H5A__get_shared_rc_test(attr, &shared_refcount);
+ CHECK(ret, FAIL, "H5A__get_shared_rc_test");
+ VERIFY(shared_refcount, 1, "H5A__get_shared_rc_test");
+#endif
+ /* Write data into the attribute */
+ big_value[0] = u + 1;
+ ret = H5Awrite(attr, attr_tid, big_value);
+ CHECK(ret, FAIL, "H5Awrite");
+#if 0
+ /* Check refcount for attribute */
+ ret = H5A__get_shared_rc_test(attr, &shared_refcount);
+ CHECK(ret, FAIL, "H5A__get_shared_rc_test");
+ VERIFY(shared_refcount, 2, "H5A__get_shared_rc_test");
+#endif
+ } /* end else */
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset2);
+ if (u < max_compact)
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+ else
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+#endif
+
+ /* Create new attribute name */
+ HDsnprintf(attrname2, sizeof(attrname2), "new attr %02u", u);
+
+ /* Change second dataset's attribute's name */
+ ret = H5Arename_by_name(fid, DSET2_NAME, attrname, attrname2, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Arename_by_name");
+
+ /* Check refcount on attributes now */
+
+ /* Check refcount on renamed attribute */
+ attr = H5Aopen(dataset2, attrname2, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+#if 0
+ if (u % 2) {
+ /* Check that attribute is not shared */
+ is_shared = H5A__is_shared_test(attr);
+ VERIFY(is_shared, FALSE, "H5A__is_shared_test");
+ } /* end if */
+ else {
+ /* Check that attribute is shared */
+ is_shared = H5A__is_shared_test(attr);
+ VERIFY(is_shared, TRUE, "H5A__is_shared_test");
+
+ /* Check refcount for attribute */
+ ret = H5A__get_shared_rc_test(attr, &shared_refcount);
+ CHECK(ret, FAIL, "H5A__get_shared_rc_test");
+ VERIFY(shared_refcount, 1, "H5A__get_shared_rc_test");
+ } /* end else */
+#endif
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Check refcount on original attribute */
+ attr = H5Aopen(dataset, attrname, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+#if 0
+ if (u % 2) {
+ /* Check that attribute is not shared */
+ is_shared = H5A__is_shared_test(attr);
+ VERIFY(is_shared, FALSE, "H5A__is_shared_test");
+ } /* end if */
+ else {
+ /* Check that attribute is shared */
+ is_shared = H5A__is_shared_test(attr);
+ VERIFY(is_shared, TRUE, "H5A__is_shared_test");
+
+ /* Check refcount for attribute */
+ ret = H5A__get_shared_rc_test(attr, &shared_refcount);
+ CHECK(ret, FAIL, "H5A__get_shared_rc_test");
+ VERIFY(shared_refcount, 1, "H5A__get_shared_rc_test");
+ } /* end else */
+#endif
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Change second dataset's attribute's name back to original */
+ ret = H5Arename_by_name(fid, DSET2_NAME, attrname2, attrname, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Arename_by_name");
+
+ /* Check refcount on attributes now */
+
+ /* Check refcount on renamed attribute */
+ attr = H5Aopen(dataset2, attrname, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+#if 0
+ if (u % 2) {
+ /* Check that attribute is not shared */
+ is_shared = H5A__is_shared_test(attr);
+ VERIFY(is_shared, FALSE, "H5A__is_shared_test");
+ } /* end if */
+ else {
+ /* Check that attribute is shared */
+ is_shared = H5A__is_shared_test(attr);
+ VERIFY(is_shared, TRUE, "H5A__is_shared_test");
+
+ /* Check refcount for attribute */
+ ret = H5A__get_shared_rc_test(attr, &shared_refcount);
+ CHECK(ret, FAIL, "H5A__get_shared_rc_test");
+ VERIFY(shared_refcount, 2, "H5A__get_shared_rc_test");
+ } /* end else */
+#endif
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Check refcount on original attribute */
+ attr = H5Aopen(dataset, attrname, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+#if 0
+ if (u % 2) {
+ /* Check that attribute is not shared */
+ is_shared = H5A__is_shared_test(attr);
+ VERIFY(is_shared, FALSE, "H5A__is_shared_test");
+ } /* end if */
+ else {
+ /* Check that attribute is shared */
+ is_shared = H5A__is_shared_test(attr);
+ VERIFY(is_shared, TRUE, "H5A__is_shared_test");
+
+ /* Check refcount for attribute */
+ ret = H5A__get_shared_rc_test(attr, &shared_refcount);
+ CHECK(ret, FAIL, "H5A__get_shared_rc_test");
+ VERIFY(shared_refcount, 2, "H5A__get_shared_rc_test");
+ } /* end else */
+#endif
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+ } /* end for */
+
+ /* Close attribute's datatype */
+ ret = H5Tclose(attr_tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close Datasets */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dataset2);
+ CHECK(ret, FAIL, "H5Dclose");
+#if 0
+ /* Check on shared message status now */
+ if (test_shared != 0) {
+ if (test_shared == 1) {
+ /* Check on datatype storage status */
+ ret = H5F__get_sohm_mesg_count_test(fid, H5O_DTYPE_ID, &mesg_count);
+ CHECK(ret, FAIL, "H5F__get_sohm_mesg_count_test");
+ VERIFY(mesg_count, 2, "H5F__get_sohm_mesg_count_test");
+ } /* end if */
+
+ /* Check on dataspace storage status */
+ ret = H5F__get_sohm_mesg_count_test(fid, H5O_SDSPACE_ID, &mesg_count);
+ CHECK(ret, FAIL, "H5F__get_sohm_mesg_count_test");
+ VERIFY(mesg_count, 2, "H5F__get_sohm_mesg_count_test");
+ } /* end if */
+#endif
+ /* Unlink datasets with attributes */
+ ret = H5Ldelete(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "HLdelete");
+ ret = H5Ldelete(fid, DSET2_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ /* Unlink committed datatype */
+ if (test_shared == 2) {
+ ret = H5Ldelete(fid, TYPE1_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+ } /* end if */
+#if 0
+ /* Check on attribute storage status */
+ ret = H5F__get_sohm_mesg_count_test(fid, H5O_ATTR_ID, &mesg_count);
+ CHECK(ret, FAIL, "H5F__get_sohm_mesg_count_test");
+ VERIFY(mesg_count, 0, "H5F__get_sohm_mesg_count_test");
+
+ if (test_shared != 0) {
+ /* Check on datatype storage status */
+ ret = H5F__get_sohm_mesg_count_test(fid, H5O_DTYPE_ID, &mesg_count);
+ CHECK(ret, FAIL, "H5F__get_sohm_mesg_count_test");
+ VERIFY(mesg_count, 0, "H5F__get_sohm_mesg_count_test");
+
+ /* Check on dataspace storage status */
+ ret = H5F__get_sohm_mesg_count_test(fid, H5O_SDSPACE_ID, &mesg_count);
+ CHECK(ret, FAIL, "H5F__get_sohm_mesg_count_test");
+ VERIFY(mesg_count, 0, "H5F__get_sohm_mesg_count_test");
+ } /* end if */
+#endif
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+#if 0
+ if (h5_using_default_driver(NULL)) {
+ /* Check size of file */
+ filesize = h5_get_file_size(FILENAME, fapl);
+ VERIFY(filesize, empty_filesize, "h5_get_file_size");
+ }
+#endif
+ } /* end for */
+
+ /* Close dataspaces */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(big_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Release memory */
+ HDfree(big_value);
+} /* test_attr_shared_rename() */
+
+/****************************************************************
+**
+** test_attr_shared_delete(): Test basic H5A (attribute) code.
+** Tests deleting shared attributes in "compact" & "dense" storage
+**
+****************************************************************/
+static void
+test_attr_shared_delete(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* File ID */
+ hid_t my_fcpl; /* File creation property list ID */
+ hid_t dataset, dataset2; /* Dataset IDs */
+ hid_t attr_tid; /* Attribute's datatype ID */
+ hid_t sid, big_sid; /* Dataspace IDs */
+ hsize_t big_dims[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3}; /* Dimensions for "big" attribute */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute on first dataset */
+ unsigned max_compact; /* Maximum # of attributes to store compactly */
+ unsigned min_dense; /* Minimum # of attributes to store "densely" */
+#if 0
+ htri_t is_dense; /* Are attributes stored densely? */
+ htri_t is_shared; /* Is attributes shared? */
+ hsize_t shared_refcount; /* Reference count of shared attribute */
+#endif
+ unsigned attr_value; /* Attribute value */
+ unsigned *big_value; /* Data for "big" attribute */
+#if 0
+ size_t mesg_count; /* # of shared messages */
+#endif
+ unsigned test_shared; /* Index over shared component type */
+ unsigned u; /* Local index variable */
+#if 0
+ h5_stat_size_t empty_filesize; /* Size of empty file */
+ h5_stat_size_t filesize; /* Size of file after modifications */
+#endif
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Deleting Shared & Unshared Attributes in Compact & Dense Storage\n"));
+
+ /* Allocate & initialize "big" attribute data */
+ big_value = (unsigned *)HDmalloc((size_t)(SPACE1_DIM1 * SPACE1_DIM2 * SPACE1_DIM3) * sizeof(unsigned));
+ CHECK_PTR(big_value, "HDmalloc");
+ HDmemset(big_value, 1, sizeof(unsigned) * (size_t)(SPACE1_DIM1 * SPACE1_DIM2 * SPACE1_DIM3));
+
+ /* Create dataspace for dataset */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create "big" dataspace for "large" attributes */
+ big_sid = H5Screate_simple(SPACE1_RANK, big_dims, NULL);
+ CHECK(big_sid, FAIL, "H5Screate_simple");
+
+ /* Loop over type of shared components */
+ for (test_shared = 0; test_shared < 3; test_shared++) {
+ /* Make copy of file creation property list */
+ my_fcpl = H5Pcopy(fcpl);
+ CHECK(my_fcpl, FAIL, "H5Pcopy");
+
+ /* Set up datatype for attributes */
+ attr_tid = H5Tcopy(H5T_NATIVE_UINT);
+ CHECK(attr_tid, FAIL, "H5Tcopy");
+
+ /* Special setup for each type of shared components */
+ if (test_shared == 0) {
+ /* Make attributes > 500 bytes shared */
+ ret = H5Pset_shared_mesg_nindexes(my_fcpl, (unsigned)1);
+ CHECK_I(ret, "H5Pset_shared_mesg_nindexes");
+ ret = H5Pset_shared_mesg_index(my_fcpl, (unsigned)0, H5O_SHMESG_ATTR_FLAG, (unsigned)500);
+ CHECK_I(ret, "H5Pset_shared_mesg_index");
+ } /* end if */
+ else {
+ /* Set up copy of file creation property list */
+
+ ret = H5Pset_shared_mesg_nindexes(my_fcpl, (unsigned)3);
+ CHECK_I(ret, "H5Pset_shared_mesg_nindexes");
+
+ /* Make attributes > 500 bytes shared */
+ ret = H5Pset_shared_mesg_index(my_fcpl, (unsigned)0, H5O_SHMESG_ATTR_FLAG, (unsigned)500);
+ CHECK_I(ret, "H5Pset_shared_mesg_index");
+
+ /* Make datatypes & dataspaces > 1 byte shared (i.e. all of them :-) */
+ ret = H5Pset_shared_mesg_index(my_fcpl, (unsigned)1, H5O_SHMESG_DTYPE_FLAG, (unsigned)1);
+ CHECK_I(ret, "H5Pset_shared_mesg_index");
+ ret = H5Pset_shared_mesg_index(my_fcpl, (unsigned)2, H5O_SHMESG_SDSPACE_FLAG, (unsigned)1);
+ CHECK_I(ret, "H5Pset_shared_mesg_index");
+ } /* end else */
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, my_fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Close FCPL copy */
+ ret = H5Pclose(my_fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+#if 0
+ /* Get size of file */
+ empty_filesize = h5_get_file_size(FILENAME, fapl);
+ if (empty_filesize < 0)
+ TestErrPrintf("Line %d: file size wrong!\n", __LINE__);
+#endif
+
+ /* Re-open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Commit datatype to file */
+ if (test_shared == 2) {
+ ret = H5Tcommit2(fid, TYPE1_NAME, attr_tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+ } /* end if */
+
+ /* Set up to query the object creation properties */
+ if (dcpl_g == H5P_DEFAULT) {
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ }
+ else {
+ dcpl = H5Pcopy(dcpl_g);
+ CHECK(dcpl, FAIL, "H5Pcopy");
+ }
+
+ /* Create datasets */
+ dataset = H5Dcreate2(fid, DSET1_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+ dataset2 = H5Dcreate2(fid, DSET2_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dataset2, FAIL, "H5Dcreate2");
+#if 0
+ /* Check on dataset's message storage status */
+ if (test_shared != 0) {
+ /* Datasets' datatypes can be shared */
+ ret = H5F__get_sohm_mesg_count_test(fid, H5O_DTYPE_ID, &mesg_count);
+ CHECK(ret, FAIL, "H5F__get_sohm_mesg_count_test");
+ VERIFY(mesg_count, 1, "H5F__get_sohm_mesg_count_test");
+
+ /* Datasets' dataspace can be shared */
+ ret = H5F__get_sohm_mesg_count_test(fid, H5O_SDSPACE_ID, &mesg_count);
+ CHECK(ret, FAIL, "H5F__get_sohm_mesg_count_test");
+ VERIFY(mesg_count, 1, "H5F__get_sohm_mesg_count_test");
+ } /* end if */
+#endif
+ /* Retrieve limits for compact/dense attribute storage */
+ ret = H5Pget_attr_phase_change(dcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+
+ /* Close property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+#if 0
+ /* Check on datasets' attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+ is_dense = H5O__is_attr_dense_test(dataset2);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Add attributes to each dataset, until after converting to dense storage */
+ for (u = 0; u < max_compact * 2; u++) {
+ /* Create attribute name */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+
+ /* Alternate between creating "small" & "big" attributes */
+ if (u % 2) {
+ /* Create "small" attribute on first dataset */
+ attr = H5Acreate2(dataset, attrname, attr_tid, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+#if 0
+ /* Check that attribute is not shared */
+ is_shared = H5A__is_shared_test(attr);
+ VERIFY(is_shared, FALSE, "H5A__is_shared_test");
+#endif
+ /* Write data into the attribute */
+ attr_value = u + 1;
+ ret = H5Awrite(attr, attr_tid, &attr_value);
+ CHECK(ret, FAIL, "H5Awrite");
+ } /* end if */
+ else {
+ /* Create "big" attribute on first dataset */
+ attr = H5Acreate2(dataset, attrname, attr_tid, big_sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+#if 0
+ /* Check that attribute is shared */
+ is_shared = H5A__is_shared_test(attr);
+ VERIFY(is_shared, TRUE, "H5A__is_shared_test");
+
+ /* Check refcount for attribute */
+ ret = H5A__get_shared_rc_test(attr, &shared_refcount);
+ CHECK(ret, FAIL, "H5A__get_shared_rc_test");
+ VERIFY(shared_refcount, 1, "H5A__get_shared_rc_test");
+#endif
+ /* Write data into the attribute */
+ big_value[0] = u + 1;
+ ret = H5Awrite(attr, attr_tid, big_value);
+ CHECK(ret, FAIL, "H5Awrite");
+#if 0
+ /* Check refcount for attribute */
+ ret = H5A__get_shared_rc_test(attr, &shared_refcount);
+ CHECK(ret, FAIL, "H5A__get_shared_rc_test");
+ VERIFY(shared_refcount, 1, "H5A__get_shared_rc_test");
+#endif
+ } /* end else */
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ if (u < max_compact)
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+ else
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+#endif
+
+ /* Alternate between creating "small" & "big" attributes */
+ if (u % 2) {
+ /* Create "small" attribute on second dataset */
+ attr = H5Acreate2(dataset2, attrname, attr_tid, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+#if 0
+ /* Check that attribute is not shared */
+ is_shared = H5A__is_shared_test(attr);
+ VERIFY(is_shared, FALSE, "H5A__is_shared_test");
+#endif
+ /* Write data into the attribute */
+ attr_value = u + 1;
+ ret = H5Awrite(attr, attr_tid, &attr_value);
+ CHECK(ret, FAIL, "H5Awrite");
+ } /* end if */
+ else {
+ /* Create "big" attribute on second dataset */
+ attr = H5Acreate2(dataset2, attrname, attr_tid, big_sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+#if 0
+ /* Check that attribute is shared */
+ is_shared = H5A__is_shared_test(attr);
+ VERIFY(is_shared, TRUE, "H5A__is_shared_test");
+
+ /* Check refcount for attribute */
+ ret = H5A__get_shared_rc_test(attr, &shared_refcount);
+ CHECK(ret, FAIL, "H5A__get_shared_rc_test");
+ VERIFY(shared_refcount, 1, "H5A__get_shared_rc_test");
+#endif
+ /* Write data into the attribute */
+ big_value[0] = u + 1;
+ ret = H5Awrite(attr, attr_tid, big_value);
+ CHECK(ret, FAIL, "H5Awrite");
+#if 0
+ /* Check refcount for attribute */
+ ret = H5A__get_shared_rc_test(attr, &shared_refcount);
+ CHECK(ret, FAIL, "H5A__get_shared_rc_test");
+ VERIFY(shared_refcount, 2, "H5A__get_shared_rc_test");
+#endif
+ } /* end else */
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset2);
+ if (u < max_compact)
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+ else
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+#endif
+ } /* end for */
+
+ /* Delete attributes from second dataset */
+ for (u = 0; u < max_compact * 2; u++) {
+ /* Create attribute name */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+
+ /* Delete second dataset's attribute */
+ ret = H5Adelete_by_name(fid, DSET2_NAME, attrname, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Adelete_by_name");
+
+ /* Check refcount on attributes now */
+
+ /* Check refcount on first dataset's attribute */
+ attr = H5Aopen(dataset, attrname, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+#if 0
+ if (u % 2) {
+ /* Check that attribute is not shared */
+ is_shared = H5A__is_shared_test(attr);
+ VERIFY(is_shared, FALSE, "H5A__is_shared_test");
+ } /* end if */
+ else {
+ /* Check that attribute is shared */
+ is_shared = H5A__is_shared_test(attr);
+ VERIFY(is_shared, TRUE, "H5A__is_shared_test");
+
+ /* Check refcount for attribute */
+ ret = H5A__get_shared_rc_test(attr, &shared_refcount);
+ CHECK(ret, FAIL, "H5A__get_shared_rc_test");
+ VERIFY(shared_refcount, 1, "H5A__get_shared_rc_test");
+ } /* end else */
+#endif
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+ } /* end for */
+
+ /* Close attribute's datatype */
+ ret = H5Tclose(attr_tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close Datasets */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dataset2);
+ CHECK(ret, FAIL, "H5Dclose");
+#if 0
+ /* Check on shared message status now */
+ if (test_shared != 0) {
+ if (test_shared == 1) {
+ /* Check on datatype storage status */
+ ret = H5F__get_sohm_mesg_count_test(fid, H5O_DTYPE_ID, &mesg_count);
+ CHECK(ret, FAIL, "H5F__get_sohm_mesg_count_test");
+ VERIFY(mesg_count, 2, "H5F__get_sohm_mesg_count_test");
+ } /* end if */
+
+ /* Check on dataspace storage status */
+ ret = H5F__get_sohm_mesg_count_test(fid, H5O_SDSPACE_ID, &mesg_count);
+ CHECK(ret, FAIL, "H5F__get_sohm_mesg_count_test");
+ VERIFY(mesg_count, 2, "H5F__get_sohm_mesg_count_test");
+ } /* end if */
+#endif
+ /* Unlink datasets with attributes */
+ ret = H5Ldelete(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+ ret = H5Ldelete(fid, DSET2_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ /* Unlink committed datatype */
+ if (test_shared == 2) {
+ ret = H5Ldelete(fid, TYPE1_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+ } /* end if */
+#if 0
+ /* Check on attribute storage status */
+ ret = H5F__get_sohm_mesg_count_test(fid, H5O_ATTR_ID, &mesg_count);
+ CHECK(ret, FAIL, "H5F__get_sohm_mesg_count_test");
+ VERIFY(mesg_count, 0, "H5F__get_sohm_mesg_count_test");
+
+ if (test_shared != 0) {
+ /* Check on datatype storage status */
+ ret = H5F__get_sohm_mesg_count_test(fid, H5O_DTYPE_ID, &mesg_count);
+ CHECK(ret, FAIL, "H5F__get_sohm_mesg_count_test");
+ VERIFY(mesg_count, 0, "H5F__get_sohm_mesg_count_test");
+
+ /* Check on dataspace storage status */
+ ret = H5F__get_sohm_mesg_count_test(fid, H5O_SDSPACE_ID, &mesg_count);
+ CHECK(ret, FAIL, "H5F__get_sohm_mesg_count_test");
+ VERIFY(mesg_count, 0, "H5F__get_sohm_mesg_count_test");
+ } /* end if */
+#endif
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+#if 0
+ if (h5_using_default_driver(NULL)) {
+ /* Check size of file */
+ filesize = h5_get_file_size(FILENAME, fapl);
+ VERIFY(filesize, empty_filesize, "h5_get_file_size");
+ }
+#endif
+ } /* end for */
+
+ /* Close dataspaces */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(big_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Release memory */
+ HDfree(big_value);
+} /* test_attr_shared_delete() */
+
+/****************************************************************
+**
+** test_attr_shared_unlink(): Test basic H5A (attribute) code.
+** Tests unlinking object with shared attributes in "compact" & "dense" storage
+**
+****************************************************************/
+static void
+test_attr_shared_unlink(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* File ID */
+ hid_t my_fcpl; /* File creation property list ID */
+ hid_t dataset, dataset2; /* Dataset IDs */
+ hid_t attr_tid; /* Attribute's datatype ID */
+ hid_t sid, big_sid; /* Dataspace IDs */
+ hsize_t big_dims[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3}; /* Dimensions for "big" attribute */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute on first dataset */
+ unsigned max_compact; /* Maximum # of attributes to store compactly */
+ unsigned min_dense; /* Minimum # of attributes to store "densely" */
+#if 0
+ htri_t is_dense; /* Are attributes stored densely? */
+ htri_t is_shared; /* Is attributes shared? */
+ hsize_t shared_refcount; /* Reference count of shared attribute */
+#endif
+ unsigned attr_value; /* Attribute value */
+ unsigned *big_value; /* Data for "big" attribute */
+#if 0
+ size_t mesg_count; /* # of shared messages */
+#endif
+ unsigned test_shared; /* Index over shared component type */
+ unsigned u; /* Local index variable */
+#if 0
+ h5_stat_size_t empty_filesize; /* Size of empty file */
+ h5_stat_size_t filesize; /* Size of file after modifications */
+#endif
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Unlinking Object with Shared Attributes in Compact & Dense Storage\n"));
+
+ /* Allocate & initialize "big" attribute data */
+ big_value = (unsigned *)HDmalloc((size_t)(SPACE1_DIM1 * SPACE1_DIM2 * SPACE1_DIM3) * sizeof(unsigned));
+ CHECK_PTR(big_value, "HDmalloc");
+ HDmemset(big_value, 1, sizeof(unsigned) * (size_t)(SPACE1_DIM1 * SPACE1_DIM2 * SPACE1_DIM3));
+
+ /* Create dataspace for dataset */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create "big" dataspace for "large" attributes */
+ big_sid = H5Screate_simple(SPACE1_RANK, big_dims, NULL);
+ CHECK(big_sid, FAIL, "H5Screate_simple");
+
+ /* Loop over type of shared components */
+ for (test_shared = 0; test_shared < 3; test_shared++) {
+ /* Make copy of file creation property list */
+ my_fcpl = H5Pcopy(fcpl);
+ CHECK(my_fcpl, FAIL, "H5Pcopy");
+
+ /* Set up datatype for attributes */
+ attr_tid = H5Tcopy(H5T_NATIVE_UINT);
+ CHECK(attr_tid, FAIL, "H5Tcopy");
+
+ /* Special setup for each type of shared components */
+ if (test_shared == 0) {
+ /* Make attributes > 500 bytes shared */
+ ret = H5Pset_shared_mesg_nindexes(my_fcpl, (unsigned)1);
+ CHECK_I(ret, "H5Pset_shared_mesg_nindexes");
+ ret = H5Pset_shared_mesg_index(my_fcpl, (unsigned)0, H5O_SHMESG_ATTR_FLAG, (unsigned)500);
+ CHECK_I(ret, "H5Pset_shared_mesg_index");
+ } /* end if */
+ else {
+ /* Set up copy of file creation property list */
+
+ ret = H5Pset_shared_mesg_nindexes(my_fcpl, (unsigned)3);
+ CHECK_I(ret, "H5Pset_shared_mesg_nindexes");
+
+ /* Make attributes > 500 bytes shared */
+ ret = H5Pset_shared_mesg_index(my_fcpl, (unsigned)0, H5O_SHMESG_ATTR_FLAG, (unsigned)500);
+ CHECK_I(ret, "H5Pset_shared_mesg_index");
+
+ /* Make datatypes & dataspaces > 1 byte shared (i.e. all of them :-) */
+ ret = H5Pset_shared_mesg_index(my_fcpl, (unsigned)1, H5O_SHMESG_DTYPE_FLAG, (unsigned)1);
+ CHECK_I(ret, "H5Pset_shared_mesg_index");
+ ret = H5Pset_shared_mesg_index(my_fcpl, (unsigned)2, H5O_SHMESG_SDSPACE_FLAG, (unsigned)1);
+ CHECK_I(ret, "H5Pset_shared_mesg_index");
+ } /* end else */
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, my_fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Close FCPL copy */
+ ret = H5Pclose(my_fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+#if 0
+ /* Get size of file */
+ empty_filesize = h5_get_file_size(FILENAME, fapl);
+ if (empty_filesize < 0)
+ TestErrPrintf("Line %d: file size wrong!\n", __LINE__);
+#endif
+
+ /* Re-open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Commit datatype to file */
+ if (test_shared == 2) {
+ ret = H5Tcommit2(fid, TYPE1_NAME, attr_tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+ } /* end if */
+
+ /* Set up to query the object creation properties */
+ if (dcpl_g == H5P_DEFAULT) {
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ }
+ else {
+ dcpl = H5Pcopy(dcpl_g);
+ CHECK(dcpl, FAIL, "H5Pcopy");
+ }
+
+ /* Create datasets */
+ dataset = H5Dcreate2(fid, DSET1_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+ dataset2 = H5Dcreate2(fid, DSET2_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dataset2, FAIL, "H5Dcreate2");
+#if 0
+ /* Check on dataset's message storage status */
+ if (test_shared != 0) {
+ /* Datasets' datatypes can be shared */
+ ret = H5F__get_sohm_mesg_count_test(fid, H5O_DTYPE_ID, &mesg_count);
+ CHECK(ret, FAIL, "H5F__get_sohm_mesg_count_test");
+ VERIFY(mesg_count, 1, "H5F__get_sohm_mesg_count_test");
+
+ /* Datasets' dataspace can be shared */
+ ret = H5F__get_sohm_mesg_count_test(fid, H5O_SDSPACE_ID, &mesg_count);
+ CHECK(ret, FAIL, "H5F__get_sohm_mesg_count_test");
+ VERIFY(mesg_count, 1, "H5F__get_sohm_mesg_count_test");
+ } /* end if */
+#endif
+ /* Retrieve limits for compact/dense attribute storage */
+ ret = H5Pget_attr_phase_change(dcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+
+ /* Close property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+#if 0
+ /* Check on datasets' attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+ is_dense = H5O__is_attr_dense_test(dataset2);
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+#endif
+ /* Add attributes to each dataset, until after converting to dense storage */
+ for (u = 0; u < max_compact * 2; u++) {
+ /* Create attribute name */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+
+ /* Alternate between creating "small" & "big" attributes */
+ if (u % 2) {
+ /* Create "small" attribute on first dataset */
+ attr = H5Acreate2(dataset, attrname, attr_tid, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+#if 0
+ /* Check that attribute is not shared */
+ is_shared = H5A__is_shared_test(attr);
+ VERIFY(is_shared, FALSE, "H5A__is_shared_test");
+#endif
+ /* Write data into the attribute */
+ attr_value = u + 1;
+ ret = H5Awrite(attr, attr_tid, &attr_value);
+ CHECK(ret, FAIL, "H5Awrite");
+ } /* end if */
+ else {
+ /* Create "big" attribute on first dataset */
+ attr = H5Acreate2(dataset, attrname, attr_tid, big_sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+#if 0
+ /* ChecFk that attribute is shared */
+ is_shared = H5A__is_shared_test(attr);
+ VERIFY(is_shared, TRUE, "H5A__is_shared_test");
+
+ /* Check refcount for attribute */
+ ret = H5A__get_shared_rc_test(attr, &shared_refcount);
+ CHECK(ret, FAIL, "H5A__get_shared_rc_test");
+ VERIFY(shared_refcount, 1, "H5A__get_shared_rc_test");
+#endif
+ /* Write data into the attribute */
+ big_value[0] = u + 1;
+ ret = H5Awrite(attr, attr_tid, big_value);
+ CHECK(ret, FAIL, "H5Awrite");
+#if 0
+ /* Check refcount for attribute */
+ ret = H5A__get_shared_rc_test(attr, &shared_refcount);
+ CHECK(ret, FAIL, "H5A__get_shared_rc_test");
+ VERIFY(shared_refcount, 1, "H5A__get_shared_rc_test");
+#endif
+ } /* end else */
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ if (u < max_compact)
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+ else
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+#endif
+
+ /* Alternate between creating "small" & "big" attributes */
+ if (u % 2) {
+ /* Create "small" attribute on second dataset */
+ attr = H5Acreate2(dataset2, attrname, attr_tid, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+#if 0
+ /* Check that attribute is not shared */
+ is_shared = H5A__is_shared_test(attr);
+ VERIFY(is_shared, FALSE, "H5A__is_shared_test");
+#endif
+ /* Write data into the attribute */
+ attr_value = u + 1;
+ ret = H5Awrite(attr, attr_tid, &attr_value);
+ CHECK(ret, FAIL, "H5Awrite");
+ } /* end if */
+ else {
+ /* Create "big" attribute on second dataset */
+ attr = H5Acreate2(dataset2, attrname, attr_tid, big_sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+#if 0
+ /* Check that attribute is shared */
+ is_shared = H5A__is_shared_test(attr);
+ VERIFY(is_shared, TRUE, "H5A__is_shared_test");
+
+ /* Check refcount for attribute */
+ ret = H5A__get_shared_rc_test(attr, &shared_refcount);
+ CHECK(ret, FAIL, "H5A__get_shared_rc_test");
+ VERIFY(shared_refcount, 1, "H5A__get_shared_rc_test");
+#endif
+ /* Write data into the attribute */
+ big_value[0] = u + 1;
+ ret = H5Awrite(attr, attr_tid, big_value);
+ CHECK(ret, FAIL, "H5Awrite");
+#if 0
+ /* Check refcount for attribute */
+ ret = H5A__get_shared_rc_test(attr, &shared_refcount);
+ CHECK(ret, FAIL, "H5A__get_shared_rc_test");
+ VERIFY(shared_refcount, 2, "H5A__get_shared_rc_test");
+#endif
+ } /* end else */
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+#if 0
+ /* Check on dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset2);
+ if (u < max_compact)
+ VERIFY(is_dense, FALSE, "H5O__is_attr_dense_test");
+ else
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+#endif
+ } /* end for */
+
+ /* Close attribute's datatype */
+ ret = H5Tclose(attr_tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close second dataset */
+ ret = H5Dclose(dataset2);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Unlink second dataset */
+ ret = H5Ldelete(fid, DSET2_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+#if 0
+ /* Check on first dataset's attribute storage status */
+ is_dense = H5O__is_attr_dense_test(dataset);
+ VERIFY(is_dense, TRUE, "H5O__is_attr_dense_test");
+#endif
+ /* Check ref count on attributes of first dataset */
+ for (u = 0; u < max_compact * 2; u++) {
+ /* Create attribute name */
+ HDsnprintf(attrname, sizeof(attrname), "attr %02u", u);
+
+ /* Open attribute on first dataset */
+ attr = H5Aopen(dataset, attrname, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+#if 0
+ if (u % 2) {
+ /* Check that attribute is not shared */
+ is_shared = H5A__is_shared_test(attr);
+ VERIFY(is_shared, FALSE, "H5A__is_shared_test");
+ } /* end if */
+ else {
+ /* Check that attribute is shared */
+ is_shared = H5A__is_shared_test(attr);
+ VERIFY(is_shared, TRUE, "H5A__is_shared_test");
+
+ /* Check refcount for attribute */
+ ret = H5A__get_shared_rc_test(attr, &shared_refcount);
+ CHECK(ret, FAIL, "H5A__get_shared_rc_test");
+ VERIFY(shared_refcount, 1, "H5A__get_shared_rc_test");
+ } /* end else */
+#endif
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+ } /* end for */
+
+ /* Close Datasets */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Unlink first dataset */
+ ret = H5Ldelete(fid, DSET1_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ /* Unlink committed datatype */
+ if (test_shared == 2) {
+ ret = H5Ldelete(fid, TYPE1_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+ } /* end if */
+#if 0
+ /* Check on attribute storage status */
+ ret = H5F__get_sohm_mesg_count_test(fid, H5O_ATTR_ID, &mesg_count);
+ CHECK(ret, FAIL, "H5F__get_sohm_mesg_count_test");
+ VERIFY(mesg_count, 0, "H5F__get_sohm_mesg_count_test");
+
+ if (test_shared != 0) {
+ /* Check on datatype storage status */
+ ret = H5F__get_sohm_mesg_count_test(fid, H5O_DTYPE_ID, &mesg_count);
+ CHECK(ret, FAIL, "H5F__get_sohm_mesg_count_test");
+ VERIFY(mesg_count, 0, "H5F__get_sohm_mesg_count_test");
+
+ /* Check on dataspace storage status */
+ ret = H5F__get_sohm_mesg_count_test(fid, H5O_SDSPACE_ID, &mesg_count);
+ CHECK(ret, FAIL, "H5F__get_sohm_mesg_count_test");
+ VERIFY(mesg_count, 0, "H5F__get_sohm_mesg_count_test");
+ } /* end if */
+#endif
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+#if 0
+ if (h5_using_default_driver(NULL)) {
+ /* Check size of file */
+ filesize = h5_get_file_size(FILENAME, fapl);
+ VERIFY(filesize, empty_filesize, "h5_get_file_size");
+ }
+#endif
+ } /* end for */
+
+ /* Close dataspaces */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(big_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Release memory */
+ HDfree(big_value);
+} /* test_attr_shared_unlink() */
+
+/****************************************************************
+**
+** test_attr_bug1(): Test basic H5A (attribute) code.
+** Tests odd sequence of allocating and deallocating space in the file.
+** The series of actions below constructs a file with an attribute
+** in each object header chunk, except the first. Then, the attributes
+** are removed and re-created in a way that makes the object header
+** allocation code remove an object header chunk "in the middle" of
+** the sequence of the chunks.
+**
+****************************************************************/
+static void
+test_attr_bug1(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* File ID */
+ hid_t gid; /* Group ID */
+ hid_t aid; /* Attribute ID */
+ hid_t sid; /* Dataspace ID */
+ herr_t ret; /* Generic return status */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Allocating and De-allocating Attributes in Unusual Way\n"));
+
+ /* Create dataspace ID for attributes */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create main group to operate on */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ gid = H5Gcreate2(fid, GROUP1_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gcreate2");
+
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open file and create another group, then attribute on first group */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Create second group */
+ gid = H5Gcreate2(fid, GROUP2_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gcreate2");
+
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Re-open first group */
+ gid = H5Gopen2(fid, GROUP1_NAME, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gopen2");
+
+ /* Create attribute on first group */
+ aid = H5Acreate2(gid, ATTR7_NAME, H5T_NATIVE_DOUBLE, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open file and create another group, then another attribute on first group */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Create third group */
+ gid = H5Gcreate2(fid, GROUP3_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gcreate2");
+
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Unlink second group */
+ ret = H5Ldelete(fid, GROUP2_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ /* Re-open first group */
+ gid = H5Gopen2(fid, GROUP1_NAME, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gopen2");
+
+ /* Create another attribute on first group */
+ aid = H5Acreate2(gid, ATTR8_NAME, H5T_NATIVE_DOUBLE, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open file and re-create attributes on first group */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Re-open first group */
+ gid = H5Gopen2(fid, GROUP1_NAME, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gopen2");
+
+ /* Delete first attribute */
+ ret = H5Adelete(gid, ATTR7_NAME);
+ CHECK(ret, FAIL, "H5Adelete");
+
+ /* Re-create first attribute */
+ aid = H5Acreate2(gid, ATTR7_NAME, H5T_NATIVE_DOUBLE, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Delete second attribute */
+ ret = H5Adelete(gid, ATTR8_NAME);
+ CHECK(ret, FAIL, "H5Adelete");
+
+ /* Re-create second attribute */
+ aid = H5Acreate2(gid, ATTR8_NAME, H5T_NATIVE_DOUBLE, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Close dataspace ID */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Gclose");
+} /* test_attr_bug1() */
+
+/****************************************************************
+**
+** test_attr_bug2(): Test basic H5A (attribute) code.
+** Tests deleting a large number of attributes with the
+** intention of creating a null message with a size that
+** is too large. This routine deletes every other
+** attribute, but the original bug could also be
+** reproduced by deleting every attribute except a few to
+** keep the chunk open.
+**
+****************************************************************/
+static void
+test_attr_bug2(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* File ID */
+ hid_t gid; /* Group ID */
+ hid_t aid; /* Attribute ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t tid; /* Datatype ID */
+ hid_t gcpl; /* Group creation property list */
+ hsize_t dims[2] = {10, 100}; /* Attribute dimensions */
+ char aname[16]; /* Attribute name */
+ unsigned i; /* index */
+ herr_t ret; /* Generic return status */
+ htri_t tri_ret; /* htri_t return status */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Allocating and De-allocating Attributes in Unusual Way\n"));
+
+ /* Create group creation property list */
+ gcpl = H5Pcreate(H5P_GROUP_CREATE);
+ CHECK(gcpl, FAIL, "H5Pcreate");
+
+ /* Prevent the library from switching to dense attribute storage */
+ /* Not doing this with the latest format actually triggers a different bug.
+ * This will be tested here as soon as it is fixed. -NAF
+ */
+ ret = H5Pset_attr_phase_change(gcpl, BUG2_NATTR + 10, BUG2_NATTR + 5);
+ CHECK(ret, FAIL, "H5Pset_attr_phase_change");
+
+ /* Create dataspace ID for attributes */
+ sid = H5Screate_simple(2, dims, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Create main group to operate on */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ gid = H5Gcreate2(fid, GROUP1_NAME, H5P_DEFAULT, gcpl, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gcreate2");
+
+ /* Create attributes on group */
+ for (i = 0; i < BUG2_NATTR; i++) {
+ HDsnprintf(aname, sizeof(aname), "%03u", i);
+ aid = H5Acreate2(gid, aname, H5T_STD_I32LE, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+ }
+
+ /* Delete every other attribute */
+ for (i = 1; i < BUG2_NATTR; i += 2) {
+ HDsnprintf(aname, sizeof(aname), "%03u", i);
+ ret = H5Adelete(gid, aname);
+ CHECK(ret, FAIL, "H5Adelete");
+ }
+
+ /* Close IDs */
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Reopen file and group */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDONLY, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ gid = H5Gopen2(fid, GROUP1_NAME, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gopen");
+
+ /* Open an attribute in the middle */
+ i = (BUG2_NATTR / 4) * 2;
+ HDsnprintf(aname, sizeof(aname), "%03u", i);
+ aid = H5Aopen(gid, aname, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Aopen");
+
+ /* Verify that the attribute has the correct datatype */
+ tid = H5Aget_type(aid);
+ CHECK(tid, FAIL, "H5Aget_type");
+
+ tri_ret = H5Tequal(tid, H5T_STD_I32LE);
+ VERIFY(tri_ret, TRUE, "H5Tequal");
+
+ /* Close IDs */
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Now test a variation on this bug - where either the size of chunk 0 goes
+ * down a "notch" or two, or chunk 1 becomes completely null at the same
+ * time that a null message that is too large is formed */
+ dims[0] = 25;
+ dims[1] = 41; /* 1025*4 byte attribute size */
+
+ /* Create dataspace ID for attributes */
+ sid = H5Screate_simple(2, dims, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Create main group to operate on */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ gid = H5Gcreate2(fid, GROUP1_NAME, H5P_DEFAULT, gcpl, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gcreate2");
+
+ /* Create attributes on group */
+ for (i = 0; i < BUG2_NATTR2; i++) {
+ HDsnprintf(aname, sizeof(aname), "%03u", i);
+ aid = H5Acreate2(gid, aname, H5T_STD_I32LE, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+ }
+
+ /* Delete every other attribute */
+ for (i = 0; i < BUG2_NATTR2; i++) {
+ HDsnprintf(aname, sizeof(aname), "%03u", i);
+ ret = H5Adelete(gid, aname);
+ CHECK(ret, FAIL, "H5Adelete");
+ }
+
+ /* Close IDs */
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Pclose(gcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+} /* test_attr_bug2() */
+
+/****************************************************************
+**
+** test_attr_bug3(): Test basic H5A (attribute) code.
+** Tests creating and deleting attributes which use a
+** datatype and/or dataspace stored in the same object
+** header.
+**
+****************************************************************/
+static void
+test_attr_bug3(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* File ID */
+ hid_t aid1, aid2; /* Attribute IDs */
+ hid_t sid1, sid2; /* Dataspace ID */
+ hid_t tid1, tid2; /* Datatype IDs */
+ hid_t did; /* Dataset ID */
+ hsize_t dims1[2] = {2, 2}, dims2[2] = {3, 3}; /* Dimensions */
+ int wdata1[2][2];
+ unsigned wdata2[3][3]; /* Write buffers */
+ unsigned u, v; /* Local index variables */
+ herr_t ret; /* Generic return status */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Attributes in the Same Header as their Datatypes\n"));
+
+ /* Create dataspaces */
+ sid1 = H5Screate_simple(2, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+ sid2 = H5Screate_simple(2, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Create file to operate on */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create datatypes and commit tid1 */
+ tid1 = H5Tcopy(H5T_STD_I16BE);
+ CHECK(tid1, FAIL, "H5Tcopy");
+ tid2 = H5Tcopy(H5T_STD_U64LE);
+ CHECK(tid1, FAIL, "H5Tcopy");
+ ret = H5Tcommit2(fid, "dtype", tid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+
+ /* Create dataset */
+ did = H5Dcreate2(fid, "dset", tid2, sid2, H5P_DEFAULT, dcpl_g, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+
+ /* Create attribute on datatype, using that datatype as its datatype */
+ aid1 = H5Acreate2(tid1, "attr", tid1, sid1, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid1, FAIL, "H5Acreate2");
+
+ /* Create attribute on dataset, using its datatype and dataspace */
+ aid2 = H5Acreate2(did, "attr", tid2, sid2, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid2, FAIL, "H5Acreate2");
+
+ /* Close attributes */
+ ret = H5Aclose(aid1);
+ CHECK(ret, FAIL, "H5Aclose");
+ ret = H5Aclose(aid2);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Reopen attributes */
+ aid1 = H5Aopen(tid1, "attr", H5P_DEFAULT);
+ CHECK(aid1, FAIL, "H5Aopen");
+ aid2 = H5Aopen(did, "attr", H5P_DEFAULT);
+ CHECK(aid2, FAIL, "H5Aopen");
+
+ /* Initialize the write buffers */
+ for (u = 0; u < dims1[0]; u++)
+ for (v = 0; v < dims1[1]; v++)
+ wdata1[u][v] = (int)((u * dims1[1]) + v);
+ for (u = 0; u < dims2[0]; u++)
+ for (v = 0; v < dims2[1]; v++)
+ wdata2[u][v] = (unsigned)((u * dims2[1]) + v);
+
+ /* Write data to the attributes */
+ ret = H5Awrite(aid1, H5T_NATIVE_INT, wdata1);
+ CHECK(ret, FAIL, "H5Awrite");
+ ret = H5Awrite(aid2, H5T_NATIVE_UINT, wdata2);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attributes */
+ ret = H5Aclose(aid1);
+ CHECK(ret, FAIL, "H5Aclose");
+ ret = H5Aclose(aid2);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Delete attributes */
+ ret = H5Adelete(tid1, "attr");
+ CHECK(ret, FAIL, "H5Adelete");
+ ret = H5Adelete(did, "attr");
+ CHECK(ret, FAIL, "H5Adelete");
+
+ /* Recreate attributes */
+ aid1 = H5Acreate2(tid1, "attr", tid1, sid1, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid1, FAIL, "H5Acreate2");
+ aid2 = H5Acreate2(did, "attr", tid2, sid2, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid2, FAIL, "H5Acreate2");
+
+ /* Delete attributes (note they are still open) */
+ ret = H5Adelete(tid1, "attr");
+ CHECK(ret, FAIL, "H5Adelete");
+ ret = H5Adelete(did, "attr");
+ CHECK(ret, FAIL, "H5Adelete");
+
+ /* Close dataspaces and transient datatype */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close dataset and committed datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Delete dataset and committed datatype */
+ ret = H5Ldelete(fid, "dtype", H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Ldelete(fid, "dset", H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close attributes */
+ ret = H5Aclose(aid1);
+ CHECK(ret, FAIL, "H5Aclose");
+ ret = H5Aclose(aid2);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_attr_bug3() */
+
+/****************************************************************
+**
+** test_attr_bug4(): Test basic H5A (attribute) code.
+** Attempts to trigger a bug which would result in being
+** unable to add an attribute to a named datatype. This
+** happened when an object header chunk was too small to
+** hold a continuation message and could not be extended.
+**
+****************************************************************/
+static void
+test_attr_bug4(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* File ID */
+ hid_t gid; /* Group ID */
+ hid_t aid1, aid2, aid3; /* Attribute IDs */
+ hid_t sid; /* Dataspace ID */
+ hid_t tid; /* Datatype ID */
+ hid_t did; /* Dataset ID */
+ hsize_t dims[1] = {5}; /* Attribute dimensions */
+ herr_t ret; /* Generic return status */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing that attributes can always be added to named datatypes\n"));
+
+ /* Create dataspace */
+ sid = H5Screate_simple(1, dims, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Open root group */
+ gid = H5Gopen2(fid, "/", H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gcreate2");
+
+ /* Create committed datatype */
+ tid = H5Tcopy(H5T_STD_I32LE);
+ CHECK(tid, FAIL, "H5Tcopy");
+ ret = H5Tcommit2(fid, "dtype", tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+
+ /* Create dataset */
+ did = H5Dcreate2(fid, "dset", tid, sid, H5P_DEFAULT, dcpl_g, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+
+ /* Create attributes on group and dataset */
+ aid1 = H5Acreate2(gid, "attr", tid, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid1, FAIL, "H5Acreate2");
+ aid2 = H5Acreate2(did, "attr", tid, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid2, FAIL, "H5Acreate2");
+
+ /* Create attribute on datatype (this is the main test) */
+ aid3 = H5Acreate2(tid, "attr", tid, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid3, FAIL, "H5Acreate2");
+
+ /* Close IDs */
+ ret = H5Aclose(aid3);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ ret = H5Aclose(aid2);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ ret = H5Aclose(aid1);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_attr_bug4() */
+
+/****************************************************************
+**
+** test_attr_bug5(): Test basic H5A (attribute) code.
+** Tests opening an attribute multiple times through
+** objects opened through different file handles.
+**
+****************************************************************/
+static void
+test_attr_bug5(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid1, fid2; /* File IDs */
+ hid_t gid1, gid2; /* Group IDs */
+ hid_t did1, did2; /* Dataset IDs */
+ hid_t tid1, tid2; /* Datatype IDs */
+ hid_t aidg1, aidg2, aidd1, aidd2, aidt1, aidt2; /* Attribute IDs */
+ hid_t sid; /* Dataspace ID */
+ hsize_t dims[1] = {5}; /* Attribute dimensions */
+ herr_t ret; /* Generic return status */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Opening an Attribute Through Multiple Files Concurrently\n"));
+
+ /* Create dataspace ID for attributes and datasets */
+ sid = H5Screate_simple(1, dims, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Open root group */
+ gid1 = H5Gopen2(fid1, "/", H5P_DEFAULT);
+ CHECK(gid1, FAIL, "H5Gopen2");
+
+ /* Create and commit datatype */
+ tid1 = H5Tcopy(H5T_STD_I32LE);
+ CHECK(tid1, FAIL, "H5Tcopy");
+ ret = H5Tcommit2(fid1, BUG3_DT_NAME, tid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+
+ /* Create dataset */
+ did1 = H5Dcreate2(fid1, BUG3_DSET_NAME, tid1, sid, H5P_DEFAULT, dcpl_g, H5P_DEFAULT);
+ CHECK(did1, FAIL, "H5Dcreate2");
+
+ /* Create attribute on root group */
+ aidg1 = H5Acreate2(gid1, BUG3_ATTR_NAME, tid1, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aidg1, FAIL, "H5Acreate2");
+
+ /* Create attribute on dataset */
+ aidd1 = H5Acreate2(did1, BUG3_ATTR_NAME, tid1, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aidd1, FAIL, "H5Acreate2");
+
+ /* Create attribute on datatype */
+ aidt1 = H5Acreate2(tid1, BUG3_ATTR_NAME, tid1, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aidt1, FAIL, "H5Acreate2");
+
+ /* Close all IDs */
+ ret = H5Aclose(aidt1);
+ CHECK(ret, FAIL, "H5Aclose");
+ ret = H5Aclose(aidd1);
+ CHECK(ret, FAIL, "H5Aclose");
+ ret = H5Aclose(aidg1);
+ CHECK(ret, FAIL, "H5Aclose");
+ ret = H5Dclose(did1);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Gclose(gid1);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Open file twice */
+ fid1 = H5Fopen(FILENAME, H5F_ACC_RDONLY, fapl);
+ CHECK(fid1, FAIL, "H5Fopen");
+ fid2 = H5Fopen(FILENAME, H5F_ACC_RDONLY, fapl);
+ CHECK(fid2, FAIL, "H5Fopen");
+
+ /* Open the root group twice */
+ gid1 = H5Gopen2(fid1, "/", H5P_DEFAULT);
+ CHECK(gid1, FAIL, "H5Gopen2");
+ gid2 = H5Gopen2(fid2, "/", H5P_DEFAULT);
+ CHECK(gid2, FAIL, "H5Gopen2");
+
+ /* Open the root group attribute twice */
+ aidg1 = H5Aopen(gid1, BUG3_ATTR_NAME, H5P_DEFAULT);
+ CHECK(aidg1, FAIL, "H5Aopen");
+ aidg2 = H5Aopen(gid2, BUG3_ATTR_NAME, H5P_DEFAULT);
+ CHECK(aidg1, FAIL, "H5Aopen");
+
+ /* Open the dataset twice */
+ did1 = H5Dopen2(fid1, BUG3_DSET_NAME, H5P_DEFAULT);
+ CHECK(did1, FAIL, "H5Dopen2");
+ did2 = H5Dopen2(fid2, BUG3_DSET_NAME, H5P_DEFAULT);
+ CHECK(did2, FAIL, "H5Dopen2");
+
+ /* Open the dataset attribute twice */
+ aidd1 = H5Aopen(did1, BUG3_ATTR_NAME, H5P_DEFAULT);
+ CHECK(aidd1, FAIL, "H5Aopen");
+ aidd2 = H5Aopen(did2, BUG3_ATTR_NAME, H5P_DEFAULT);
+ CHECK(aidd1, FAIL, "H5Aopen");
+
+ /* Open the datatype twice */
+ tid1 = H5Topen2(fid1, BUG3_DT_NAME, H5P_DEFAULT);
+ CHECK(tid1, FAIL, "H5Topen2");
+ tid2 = H5Topen2(fid2, BUG3_DT_NAME, H5P_DEFAULT);
+ CHECK(tid2, FAIL, "H5Topen2");
+
+ /* Open the datatype attribute twice */
+ aidt1 = H5Aopen(tid1, BUG3_ATTR_NAME, H5P_DEFAULT);
+ CHECK(aidt1, FAIL, "H5Aopen");
+ aidt2 = H5Aopen(tid2, BUG3_ATTR_NAME, H5P_DEFAULT);
+ CHECK(aidt2, FAIL, "H5Aopen");
+
+ /* Close all attributes */
+ ret = H5Aclose(aidg1);
+ CHECK(ret, FAIL, "H5Aclose");
+ ret = H5Aclose(aidg2);
+ CHECK(ret, FAIL, "H5Aclose");
+ ret = H5Aclose(aidd1);
+ CHECK(ret, FAIL, "H5Aclose");
+ ret = H5Aclose(aidd2);
+ CHECK(ret, FAIL, "H5Aclose");
+ ret = H5Aclose(aidt1);
+ CHECK(ret, FAIL, "H5Aclose");
+ ret = H5Aclose(aidt2);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close root groups */
+ ret = H5Gclose(gid1);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Gclose(gid2);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close datasets */
+ ret = H5Dclose(did1);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(did2);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close datatypes */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close files */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+ ret = H5Fclose(fid2);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_attr_bug5() */
+
+/****************************************************************
+**
+** test_attr_bug6(): Test basic H5A (attribute) code.
+** Tests if reading an empty attribute is OK.
+**
+****************************************************************/
+static void
+test_attr_bug6(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* File ID */
+ hid_t gid; /* Group ID */
+ hid_t aid1, aid2; /* Attribute IDs */
+ hid_t sid; /* Dataspace ID */
+ hsize_t dims[ATTR1_RANK] = {ATTR1_DIM1}; /* Attribute dimensions */
+ int intar[ATTR1_DIM1]; /* Data reading buffer */
+ herr_t ret; /* Generic return status */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing that empty attribute can be read\n"));
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Open root group */
+ gid = H5Gopen2(fid, "/", H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gopen2");
+
+ /* Create dataspace */
+ sid = H5Screate_simple(1, dims, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Create attribute on group */
+ aid1 = H5Acreate2(gid, ATTR1_NAME, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid1, FAIL, "H5Acreate2");
+
+ ret = H5Aclose(aid1);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Open the attribute again */
+ aid2 = H5Aopen(gid, ATTR1_NAME, H5P_DEFAULT);
+ CHECK(aid2, FAIL, "H5Aopen");
+
+ ret = H5Aread(aid2, H5T_NATIVE_INT, intar);
+ CHECK(ret, FAIL, "H5Aread");
+
+ /* Close IDs */
+ ret = H5Aclose(aid2);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_attr_bug6() */
+
+/****************************************************************
+**
+** test_attr_bug7(): Test basic H5A (attribute) code.
+** (Really tests object header allocation code).
+** Tests creating and deleting attributes in such a way as
+** to change the size of the "chunk #0 size" field.
+** Includes testing "skipping" a possible size of the
+** field, i.e. going from 1 to 4 bytes or 4 to 1 byte.
+**
+****************************************************************/
+#if 0
+static void
+test_attr_bug7(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* File ID */
+ hid_t aid; /* Attribute ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t tid; /* Datatype ID */
+ hsize_t dims_s = 140; /* Small attribute dimensions */
+ hsize_t dims_l = 65480; /* Large attribute dimensions */
+ H5A_info_t ainfo; /* Attribute info */
+ herr_t ret; /* Generic return status */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing adding and deleting large attributes\n"));
+
+ /* Create committed datatype to operate on. Use a committed datatype so that
+ * there is nothing after the object header and the first chunk can expand and
+ * contract as necessary. */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+ tid = H5Tcopy(H5T_STD_I32LE);
+ CHECK(tid, FAIL, "H5Tcopy");
+ ret = H5Tcommit2(fid, TYPE1_NAME, tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+
+ /*
+ * Create small attribute
+ */
+ sid = H5Screate_simple(1, &dims_s, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+ aid = H5Acreate2(tid, ATTR1_NAME, H5T_STD_I8LE, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+
+ /* Close file */
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Check attribute */
+ tid = H5Topen2(fid, TYPE1_NAME, H5P_DEFAULT);
+ CHECK(tid, FAIL, "H5Topen2");
+ ret = H5Aget_info_by_name(tid, ".", ATTR1_NAME, &ainfo, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aget_info_by_name");
+ if (ainfo.data_size != dims_s)
+ TestErrPrintf("attribute data size different: data_size=%llu, should be %llu\n",
+ (long long unsigned)ainfo.data_size, (long long unsigned)dims_s);
+
+ /*
+ * Create another small attribute. Should cause chunk size field to expand by
+ * 1 byte (1->2).
+ */
+ aid = H5Acreate2(tid, ATTR2_NAME, H5T_STD_I8LE, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+
+ /* Close file */
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Check attributes */
+ tid = H5Topen2(fid, TYPE1_NAME, H5P_DEFAULT);
+ CHECK(tid, FAIL, "H5Topen2");
+ ret = H5Aget_info_by_name(tid, ".", ATTR1_NAME, &ainfo, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aget_info_by_name");
+ if (ainfo.data_size != dims_s)
+ TestErrPrintf("attribute data size different: data_size=%llu, should be %llu\n",
+ (long long unsigned)ainfo.data_size, (long long unsigned)dims_s);
+ ret = H5Aget_info_by_name(tid, ".", ATTR2_NAME, &ainfo, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aget_info_by_name");
+ if (ainfo.data_size != dims_s)
+ TestErrPrintf("attribute data size different: data_size=%llu, should be %llu\n",
+ (long long unsigned)ainfo.data_size, (long long unsigned)dims_s);
+
+ /*
+ * Create large attribute. Should cause chunk size field to expand by 2 bytes
+ * (2->4).
+ */
+ ret = H5Sset_extent_simple(sid, 1, &dims_l, NULL);
+ CHECK(ret, FAIL, "H5Sset_extent_simple");
+ aid = H5Acreate2(tid, ATTR3_NAME, H5T_STD_I8LE, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+
+ /* Close file */
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Check attributes */
+ tid = H5Topen2(fid, TYPE1_NAME, H5P_DEFAULT);
+ CHECK(tid, FAIL, "H5Topen2");
+ ret = H5Aget_info_by_name(tid, ".", ATTR1_NAME, &ainfo, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aget_info_by_name");
+ if (ainfo.data_size != dims_s)
+ TestErrPrintf("attribute data size different: data_size=%llu, should be %llu\n",
+ (long long unsigned)ainfo.data_size, (long long unsigned)dims_s);
+ ret = H5Aget_info_by_name(tid, ".", ATTR2_NAME, &ainfo, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aget_info_by_name");
+ if (ainfo.data_size != dims_s)
+ TestErrPrintf("attribute data size different: data_size=%llu, should be %llu\n",
+ (long long unsigned)ainfo.data_size, (long long unsigned)dims_s);
+ ret = H5Aget_info_by_name(tid, ".", ATTR3_NAME, &ainfo, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aget_info_by_name");
+ if (ainfo.data_size != dims_l)
+ TestErrPrintf("attribute data size different: data_size=%llu, should be %llu\n",
+ (long long unsigned)ainfo.data_size, (long long unsigned)dims_l);
+
+ /*
+ * Delete last two attributes - should merge into a null message that is too
+ * large, causing the chunk size field to shrink by 3 bytes (4->1).
+ */
+ ret = H5Sset_extent_simple(sid, 1, &dims_l, NULL);
+ CHECK(ret, FAIL, "H5Sset_extent_simple");
+ ret = H5Adelete(tid, ATTR2_NAME);
+ CHECK(ret, FAIL, "H5Adelete");
+ ret = H5Adelete(tid, ATTR3_NAME);
+ CHECK(ret, FAIL, "H5Adelete");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Check attribute */
+ tid = H5Topen2(fid, TYPE1_NAME, H5P_DEFAULT);
+ CHECK(tid, FAIL, "H5Topen2");
+ ret = H5Aget_info_by_name(tid, ".", ATTR1_NAME, &ainfo, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aget_info_by_name");
+ if (ainfo.data_size != dims_s)
+ TestErrPrintf("attribute data size different: data_size=%llu, should be %llu\n",
+ (long long unsigned)ainfo.data_size, (long long unsigned)dims_s);
+
+ /*
+ * Create large attribute. Should cause chunk size field to expand by 3 bytes
+ * (1->4).
+ */
+ aid = H5Acreate2(tid, ATTR2_NAME, H5T_STD_I8LE, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+
+ /* Close file */
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Check attributes */
+ tid = H5Topen2(fid, TYPE1_NAME, H5P_DEFAULT);
+ CHECK(tid, FAIL, "H5Topen2");
+ ret = H5Aget_info_by_name(tid, ".", ATTR1_NAME, &ainfo, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aget_info_by_name");
+ if (ainfo.data_size != dims_s)
+ TestErrPrintf("attribute data size different: data_size=%llu, should be %llu\n",
+ (long long unsigned)ainfo.data_size, (long long unsigned)dims_s);
+ ret = H5Aget_info_by_name(tid, ".", ATTR2_NAME, &ainfo, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aget_info_by_name");
+ if (ainfo.data_size != dims_l)
+ TestErrPrintf("attribute data size different: data_size=%llu, should be %llu\n",
+ (long long unsigned)ainfo.data_size, (long long unsigned)dims_l);
+
+ /* Close IDs */
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_attr_bug7() */
+#endif
+
+/****************************************************************
+**
+** test_attr_bug8(): Test basic H5A (attribute) code.
+** (Really tests object header code).
+** Tests adding a link and attribute to a group in such a
+** way as to cause the "chunk #0 size" field to expand
+** when some object header messages are not loaded into
+** cache. Before the bug was fixed, this would prevent
+** these messages from being shifted to the correct
+** position as the expansion algorithm marked them dirty,
+** invalidating the raw form, when there was no native
+** form to encode.
+**
+****************************************************************/
+static void
+test_attr_bug8(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* File ID */
+ hid_t aid; /* Attribute ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t gid; /* Group ID */
+ hid_t oid; /* Object ID */
+ hsize_t dims = 256; /* Attribute dimensions */
+ H5O_info2_t oinfo; /* Object info */
+ H5A_info_t ainfo; /* Attribute info */
+ H5O_token_t root_token; /* Root group token */
+ int cmp_value; /* Result from H5Otoken_cmp */
+ herr_t ret; /* Generic return status */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing attribute expanding object header with undecoded messages\n"));
+
+ /* Create committed datatype to operate on. Use a committed datatype so that
+ * there is nothing after the object header and the first chunk can expand and
+ * contract as necessary. */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+ gid = H5Gcreate2(fid, GROUP1_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gcreate2");
+
+ /* Get root group token */
+ ret = H5Oget_info3(fid, &oinfo, H5O_INFO_BASIC);
+ CHECK(ret, FAIL, "H5Oget_info");
+ root_token = oinfo.token;
+
+ /*
+ * Create link to root group
+ */
+ ret = H5Lcreate_hard(fid, "/", gid, LINK1_NAME, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lcreate_hard");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDONLY, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Check link */
+ gid = H5Gopen2(fid, GROUP1_NAME, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gopen2");
+ oid = H5Oopen(gid, LINK1_NAME, H5P_DEFAULT);
+ CHECK(oid, FAIL, "H5Oopen");
+ ret = H5Oget_info3(oid, &oinfo, H5O_INFO_BASIC);
+ CHECK(ret, FAIL, "H5Oget_info");
+ ret = H5Otoken_cmp(oid, &oinfo.token, &root_token, &cmp_value);
+ CHECK(ret, FAIL, "H5Otoken_cmp");
+ VERIFY(cmp_value, 0, "H5Otoken_cmp");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Oclose(oid);
+ CHECK(ret, FAIL, "H5Oclose");
+
+ /* Open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /*
+ * Create attribute. Should cause chunk size field to expand by 1 byte
+ * (1->2).
+ */
+ gid = H5Gopen2(fid, GROUP1_NAME, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gopen2");
+ sid = H5Screate_simple(1, &dims, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+ aid = H5Acreate2(gid, ATTR1_NAME, H5T_STD_I8LE, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+
+ /* Close file */
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Open file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDONLY, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Check link and attribute */
+ gid = H5Gopen2(fid, GROUP1_NAME, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gopen2");
+ oid = H5Oopen(gid, LINK1_NAME, H5P_DEFAULT);
+ CHECK(oid, FAIL, "H5Oopen");
+ ret = H5Oget_info3(oid, &oinfo, H5O_INFO_BASIC);
+ CHECK(ret, FAIL, "H5Oget_info");
+ ret = H5Otoken_cmp(oid, &oinfo.token, &root_token, &cmp_value);
+ CHECK(ret, FAIL, "H5Otoken_cmp");
+ VERIFY(cmp_value, 0, "H5Otoken_cmp");
+ ret = H5Aget_info_by_name(gid, ".", ATTR1_NAME, &ainfo, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Aget_info_by_name");
+ if (ainfo.data_size != dims)
+ TestErrPrintf("attribute data size different: data_size=%llu, should be %llu\n",
+ (long long unsigned)ainfo.data_size, (long long unsigned)dims);
+
+ /* Close IDs */
+ ret = H5Oclose(oid);
+ CHECK(ret, FAIL, "H5Oclose");
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_attr_bug8() */
+
+/****************************************************************
+**
+** test_attr_bug9(): Test basic H5A (attribute) code.
+** (Really tests object header code).
+** Tests adding several large attributes to an object until
+** they convert to dense storage. The total size of all
+** attributes is larger than 64K, causing the internal
+** object header code to, after merging the deleted
+** messages in to a NULL message, shrink the object header
+** chunk. Do this twice: once with only attributes in the
+** object header chunk and once with a (small) soft link in
+** the chunk as well. In both cases, the shrunk chunk will
+** initially be too small and a new NULL message must be
+** created.
+**
+****************************************************************/
+static void
+test_attr_bug9(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid = -1; /* File ID */
+ hid_t gid = -1; /* Group ID */
+ hid_t aid = -1; /* Attribute ID */
+ hid_t sid = -1; /* Dataspace ID */
+ hsize_t dims[1] = {32768}; /* Attribute dimensions */
+ int create_link; /* Whether to create a soft link */
+ unsigned max_compact; /* Setting from fcpl */
+ unsigned min_dense; /* Setting from fcpl */
+ char aname[11]; /* Attribute name */
+ unsigned i; /* Local index variable */
+ herr_t ret; /* Generic return status */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing that attributes can always be added to named datatypes\n"));
+
+ /* Create dataspace */
+ sid = H5Screate_simple(1, dims, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Obtain attribute phase change settings */
+ ret = H5Pget_attr_phase_change(fcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+
+ /* Run with and without the soft link */
+ for (create_link = 0; create_link < 2; create_link++) {
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create second group */
+ gid = H5Gcreate2(fid, "group", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gcreate2");
+
+ /* Close second group */
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Open root group */
+ gid = H5Gopen2(fid, "/", H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gopen2");
+
+ /* Create enough attributes to cause a change to dense storage */
+ for (i = 0; i < max_compact + 1; i++) {
+ /* Create attribute */
+ HDsnprintf(aname, sizeof(aname), "%u", i);
+ aid = H5Acreate2(gid, aname, H5T_NATIVE_CHAR, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+
+ /* Close attribute */
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Create enough soft links that exactly one goes into chunk 1 if
+ * requested */
+ if (i == 0 && create_link) {
+ ret = H5Lcreate_soft("b", gid, "a", H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lcreate_soft");
+ ret = H5Lcreate_soft("d", gid, "c", H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lcreate_soft");
+ ret = H5Lcreate_soft("f", gid, "e", H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lcreate_soft");
+ } /* end if */
+ } /* end for */
+
+ /* Close IDs */
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ } /* end for */
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_attr_bug9() */
+
+/****************************************************************
+**
+** test_attr_bug10(): Test basic H5A (attribute) code.
+** Attempts to trigger a bug which would result in a
+** segfault. Create a vlen attribute through a file
+** handle, then open the same file through a different
+** handle, open the same attribute through the second file
+** handle, then close the second file and attribute
+** handles, then write to the attribute through the first
+** handle.
+**
+****************************************************************/
+static void
+test_attr_bug10(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid1, fid2; /* File IDs */
+ hid_t aid1, aid2; /* Attribute IDs */
+ hid_t sid; /* Dataspace ID */
+ hid_t tid; /* Datatype ID */
+ hsize_t dims[1] = {1}; /* Attribute dimensions */
+ const char *wbuf[1] = {"foo"}; /* Write buffer */
+ herr_t ret; /* Generic return status */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing that vlen attributes can be written to after a second file handle is closed\n"));
+
+ /* Create dataspace */
+ sid = H5Screate_simple(1, dims, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Create VL string datatype */
+ tid = H5Tcopy(H5T_C_S1);
+ CHECK(tid, FAIL, "H5Tcreate");
+ ret = H5Tset_size(tid, H5T_VARIABLE);
+ CHECK(ret, FAIL, "H5Tset_size");
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create attribute on root group */
+ aid1 = H5Acreate2(fid1, "attr", tid, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid1, FAIL, "H5Acreate2");
+
+ /* Open the same file again */
+ fid2 = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid2, FAIL, "H5Fcreate");
+
+ /* Open the same attribute through the second file handle */
+ aid2 = H5Aopen(fid2, "attr", H5P_DEFAULT);
+ CHECK(aid2, FAIL, "H5Aopen");
+
+ /* Close the second attribute and file handles */
+ ret = H5Aclose(aid2);
+ CHECK(ret, FAIL, "H5Aclose");
+ ret = H5Fclose(fid2);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Write to the attribute through the first handle */
+ ret = H5Awrite(aid1, tid, wbuf);
+
+ /* Close IDs */
+ ret = H5Aclose(aid1);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_attr_bug10() */
+
+/****************************************************************
+**
+** test_attr_delete_dense():
+** This is to verify the error as described in HDFFV-9277
+** is fixed when deleting the last "large" attribute that
+** is stored densely.
+**
+****************************************************************/
+#if 0 /* Native VOL connector only supports large attributes with latest format */
+static void
+test_attr_delete_last_dense(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* File ID */
+ hid_t gid; /* Group ID */
+ hid_t aid; /* Attribute ID */
+ hid_t sid; /* Dataspace ID */
+ hsize_t dim2[2] = {DIM0, DIM1}; /* Dimension sizes */
+ int i, j; /* Local index variables */
+ double *data = NULL; /* Pointer to the data buffer */
+ herr_t ret; /* Generic return status */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Deleting the last large attribute stored densely\n"));
+
+ /* Create the file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create the group */
+ gid = H5Gcreate2(fid, GRPNAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gcreate");
+
+ /* Create the dataspace */
+ sid = H5Screate_simple(RANK, dim2, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Attach the attribute to the group */
+ aid = H5Acreate2(gid, ATTRNAME, H5T_IEEE_F64LE, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+
+ /* Allocate the data buffer */
+ data = (double *)HDmalloc((size_t)(DIM0 * DIM1) * sizeof(double));
+ CHECK_PTR(data, "HDmalloc");
+
+ /* Initialize the data */
+ for (i = 0; i < DIM0; i++)
+ for (j = 0; j < DIM1; j++)
+ *(data + i * DIM1 + j) = i + j;
+
+ /* Write to the attribute */
+ ret = H5Awrite(aid, H5T_NATIVE_DOUBLE, data);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Closing */
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open the file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Open the group */
+ gid = H5Gopen2(fid, GRPNAME, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gopen");
+
+ /* Delete the attribute */
+ ret = H5Adelete(gid, ATTRNAME);
+ CHECK(ret, FAIL, "H5Adelete");
+
+ /* Closing */
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free the data buffer */
+ if (data)
+ HDfree(data);
+
+} /* test_attr_delete_last_dense() */
+#endif
+
+/****************************************************************
+**
+** test_attr(): Main H5A (attribute) testing routine.
+**
+****************************************************************/
+void
+test_attr(void)
+{
+ hid_t fapl = (-1), fapl2 = (-1); /* File access property lists */
+ hid_t fcpl = (-1), fcpl2 = (-1); /* File creation property lists */
+ hid_t dcpl = -1; /* Dataset creation property list */
+ unsigned new_format; /* Whether to use the new format or not */
+ unsigned use_shared; /* Whether to use shared attributes or not */
+ unsigned minimize_dset_oh; /* Whether to use minimized dataset object headers */
+ herr_t ret; /* Generic return value */
+
+ MESSAGE(5, ("Testing Attributes\n"));
+
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+
+ /* fapl2 uses "latest version of the format" for creating objects in the file */
+ fapl2 = H5Pcopy(fapl);
+ CHECK(fapl2, FAIL, "H5Pcopy");
+ ret = H5Pset_libver_bounds(fapl2, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST);
+ CHECK(ret, FAIL, "H5Pset_libver_bounds");
+
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, FAIL, "H5Pcreate");
+
+ /* files with fcpl2 make all attributes ( > 1 byte) shared
+ * (i.e. all of them :-) */
+ fcpl2 = H5Pcopy(fcpl);
+ CHECK(fcpl2, FAIL, "H5Pcopy");
+ ret = H5Pset_shared_mesg_nindexes(fcpl2, (unsigned)1);
+ CHECK_I(ret, "H5Pset_shared_mesg_nindexes");
+ ret = H5Pset_shared_mesg_index(fcpl2, (unsigned)0, H5O_SHMESG_ATTR_FLAG, (unsigned)1);
+ CHECK_I(ret, "H5Pset_shared_mesg_index");
+
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, H5I_INVALID_HID, "H5Pcreate");
+
+ ret = H5Pset_attr_creation_order(dcpl, H5P_CRT_ORDER_TRACKED);
+ CHECK(ret, FAIL, "");
+
+ dcpl_g = dcpl;
+
+ for (minimize_dset_oh = 0; minimize_dset_oh <= 1; minimize_dset_oh++) {
+ if (minimize_dset_oh != 0)
+ continue;
+
+#if 0
+ if (minimize_dset_oh == 0) {
+ MESSAGE(7, ("testing with default dataset object headers\n"));
+ dcpl_g = H5P_DEFAULT;
+ }
+ else {
+ MESSAGE(7, ("testing with minimzied dataset object headers\n"));
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ ret = H5Pset_dset_no_attrs_hint(dcpl, TRUE);
+ CHECK_I(ret, "H5Pset_dset_no_attrs_hint");
+ dcpl_g = dcpl;
+ }
+#endif
+
+ for (new_format = FALSE; new_format <= TRUE; new_format++) {
+ hid_t my_fapl = fapl;
+
+ if (new_format)
+ continue;
+
+#if 0
+ /* Set the FAPL for the type of format */
+ if (new_format) {
+ MESSAGE(7, ("testing with new file format\n"));
+ my_fapl = fapl2;
+ }
+ else {
+ MESSAGE(7, ("testing with old file format\n"));
+ my_fapl = fapl;
+ }
+#endif
+
+ /* These next two tests use the same file information */
+ test_attr_basic_write(my_fapl); /* Test basic H5A writing code */
+ test_attr_basic_read(my_fapl); /* Test basic H5A reading code */
+
+ /* These next two tests use their own file information */
+ test_attr_flush(my_fapl); /* Test H5A I/O in the presence of H5Fflush calls */
+ test_attr_plist(my_fapl); /* Test attribute property lists */
+
+ /* These next two tests use the same file information */
+ test_attr_compound_write(my_fapl); /* Test complex datatype H5A writing code */
+ test_attr_compound_read(my_fapl); /* Test complex datatype H5A reading code */
+
+ /* These next two tests use the same file information */
+ test_attr_scalar_write(my_fapl); /* Test scalar dataspace H5A writing code */
+ test_attr_scalar_read(my_fapl); /* Test scalar dataspace H5A reading code */
+
+ /* These next four tests use the same file information */
+ test_attr_mult_write(my_fapl); /* Test H5A writing code for multiple attributes */
+ test_attr_mult_read(my_fapl); /* Test H5A reading code for multiple attributes */
+ test_attr_iterate(my_fapl); /* Test H5A iterator code */
+ test_attr_delete(my_fapl); /* Test H5A code for deleting attributes */
+
+ /* This next test uses its own file information */
+ test_attr_dtype_shared(my_fapl); /* Test using shared dataypes in attributes */
+
+ /* This next test uses its own file information */
+ test_attr_duplicate_ids(my_fapl);
+
+ for (use_shared = FALSE; use_shared <= TRUE; use_shared++) {
+ hid_t my_fcpl;
+
+ if (new_format == TRUE && use_shared) {
+ MESSAGE(7, ("testing with shared attributes\n"));
+ my_fcpl = fcpl2;
+ }
+ else {
+ MESSAGE(7, ("testing without shared attributes\n"));
+ my_fcpl = fcpl;
+ }
+
+ test_attr_big(my_fcpl, my_fapl); /* Test storing big attribute */
+ test_attr_null_space(my_fcpl, my_fapl); /* Test storing attribute with NULL dataspace */
+ test_attr_deprec(fcpl, my_fapl); /* Test deprecated API routines */
+ test_attr_many(new_format, my_fcpl, my_fapl); /* Test storing lots of attributes */
+ test_attr_info_null_info_pointer(my_fcpl,
+ my_fapl); /* Test passing a NULL attribute info pointer to
+ H5Aget_info(_by_name/_by_idx) */
+ test_attr_rename_invalid_name(
+ my_fcpl,
+ my_fapl); /* Test passing a NULL or empty attribute name to H5Arename(_by_name) */
+ test_attr_get_name_invalid_buf(
+ my_fcpl, my_fapl); /* Test passing NULL buffer to H5Aget_name(_by_idx) */
+
+ /* New attribute API routine tests */
+ test_attr_info_by_idx(new_format, my_fcpl,
+ my_fapl); /* Test querying attribute info by index */
+ test_attr_delete_by_idx(new_format, my_fcpl, my_fapl); /* Test deleting attribute by index */
+ test_attr_iterate2(new_format, my_fcpl,
+ my_fapl); /* Test iterating over attributes by index */
+ test_attr_open_by_idx(new_format, my_fcpl, my_fapl); /* Test opening attributes by index */
+ test_attr_open_by_name(new_format, my_fcpl, my_fapl); /* Test opening attributes by name */
+ test_attr_create_by_name(new_format, my_fcpl, my_fapl); /* Test creating attributes by name */
+
+ /* Tests that address specific bugs */
+ test_attr_bug1(my_fcpl, my_fapl); /* Test odd allocation operations */
+ test_attr_bug2(my_fcpl, my_fapl); /* Test many deleted attributes */
+ test_attr_bug3(my_fcpl, my_fapl); /* Test "self referential" attributes */
+ test_attr_bug4(my_fcpl, my_fapl); /* Test attributes on named datatypes */
+ test_attr_bug5(my_fcpl,
+ my_fapl); /* Test opening/closing attributes through different file handles */
+ test_attr_bug6(my_fcpl, my_fapl); /* Test reading empty attribute */
+ /* test_attr_bug7 is specific to the "new" object header format,
+ * and in fact fails if used with the old format due to the
+ * attributes being larger than 64K */
+ test_attr_bug8(my_fcpl,
+ my_fapl); /* Test attribute expanding object header with undecoded messages */
+ test_attr_bug9(my_fcpl, my_fapl); /* Test large attributes converting to dense storage */
+ test_attr_bug10(my_fcpl, my_fapl); /* Test writing an attribute after opening and closing
+ through a different file handle */
+
+ /* tests specific to the "new format" */
+ if (new_format == TRUE) {
+ /* General attribute tests */
+ test_attr_dense_create(my_fcpl, my_fapl); /* Test dense attribute storage creation */
+ test_attr_dense_open(my_fcpl, my_fapl); /* Test opening attributes in dense storage */
+ test_attr_dense_delete(my_fcpl, my_fapl); /* Test deleting attributes in dense storage */
+ test_attr_dense_rename(my_fcpl, my_fapl); /* Test renaming attributes in dense storage */
+ test_attr_dense_unlink(
+ my_fcpl, my_fapl); /* Test unlinking object with attributes in dense storage */
+ test_attr_dense_limits(my_fcpl, my_fapl); /* Test dense attribute storage limits */
+ test_attr_dense_dup_ids(my_fcpl,
+ my_fapl); /* Test duplicated IDs for dense attribute storage */
+
+ /* Attribute creation order tests */
+ test_attr_corder_create_basic(
+ my_fcpl, my_fapl); /* Test creating an object w/attribute creation order info */
+ test_attr_corder_create_compact(my_fcpl,
+ my_fapl); /* Test compact attribute storage on an object
+ w/attribute creation order info */
+ test_attr_corder_create_dense(my_fcpl,
+ my_fapl); /* Test dense attribute storage on an object
+ w/attribute creation order info */
+ test_attr_corder_create_reopen(my_fcpl,
+ my_fapl); /* Test creating attributes w/reopening file from
+ using new format to using old format */
+ test_attr_corder_transition(my_fcpl,
+ my_fapl); /* Test attribute storage transitions on an object
+ w/attribute creation order info */
+ test_attr_corder_delete(my_fcpl, my_fapl); /* Test deleting object using dense storage
+ w/attribute creation order info */
+
+ /* More complex tests with exclusively both "new format" and "shared" attributes */
+ if (use_shared == TRUE) {
+ test_attr_shared_write(
+ my_fcpl,
+ my_fapl); /* Test writing to shared attributes in compact & dense storage */
+ test_attr_shared_rename(
+ my_fcpl,
+ my_fapl); /* Test renaming shared attributes in compact & dense storage */
+ test_attr_shared_delete(
+ my_fcpl,
+ my_fapl); /* Test deleting shared attributes in compact & dense storage */
+ test_attr_shared_unlink(my_fcpl, my_fapl); /* Test unlinking object with shared
+ attributes in compact & dense storage */
+ } /* if using shared attributes */
+
+#if 0 /* Native VOL connector only supports large attributes with latest format */
+ test_attr_delete_last_dense(my_fcpl, my_fapl);
+
+ /* test_attr_bug7 is specific to the "new" object header format,
+ * and in fact fails if used with the old format due to the
+ * attributes being larger than 64K */
+ test_attr_bug7(my_fcpl,
+ my_fapl); /* Test creating and deleting large attributes in ohdr chunk 0 */
+#endif
+
+ } /* if using "new format" */
+ } /* for unshared/shared attributes */
+ } /* for old/new format */
+
+ if (minimize_dset_oh != 0) {
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ dcpl_g = H5P_DEFAULT;
+ }
+
+ } /* for default/minimized dataset object headers */
+
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close FCPLs */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(fcpl2);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close FAPLs */
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(fapl2);
+ CHECK(ret, FAIL, "H5Pclose");
+} /* test_attr() */
+
+/*-------------------------------------------------------------------------
+ * Function: cleanup_attr
+ *
+ * Purpose: Cleanup temporary test files
+ *
+ * Return: none
+ *
+ * Programmer: Albert Cheng
+ * July 2, 1998
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+cleanup_attr(void)
+{
+ H5Fdelete(FILENAME, H5P_DEFAULT);
+}
diff --git a/test/API/tchecksum.c b/test/API/tchecksum.c
new file mode 100644
index 0000000..a77ffcd
--- /dev/null
+++ b/test/API/tchecksum.c
@@ -0,0 +1,251 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*-------------------------------------------------------------------------
+ *
+ * Created: tchecksum.c
+ * Aug 21 2006
+ * Quincey Koziol
+ *
+ * Purpose: Test internal checksum routine(s)
+ *
+ *-------------------------------------------------------------------------
+ */
+
+/***********/
+/* Headers */
+/***********/
+#include "testhdf5.h"
+
+/**********/
+/* Macros */
+/**********/
+#define BUF_LEN 3093 /* No particular value */
+
+/*******************/
+/* Local variables */
+/*******************/
+
+/****************************************************************
+**
+** test_chksum_size_one(): Checksum 1 byte buffer
+**
+****************************************************************/
+static void
+test_chksum_size_one(void)
+{
+ uint8_t buf[1] = {23}; /* Buffer to checksum */
+ uint32_t chksum; /* Checksum value */
+
+ /* Buffer w/real data */
+ chksum = H5_checksum_fletcher32(buf, sizeof(buf));
+ VERIFY(chksum, 0x17001700, "H5_checksum_fletcher32");
+
+ chksum = H5_checksum_crc(buf, sizeof(buf));
+ VERIFY(chksum, 0xfa2568b7, "H5_checksum_crc");
+
+ chksum = H5_checksum_lookup3(buf, sizeof(buf), 0);
+ VERIFY(chksum, 0xa209c931, "H5_checksum_lookup3");
+
+ /* Buffer w/zero(s) for data */
+ HDmemset(buf, 0, sizeof(buf));
+ chksum = H5_checksum_fletcher32(buf, sizeof(buf));
+ VERIFY(chksum, 0, "H5_checksum_fletcher32");
+
+ chksum = H5_checksum_crc(buf, sizeof(buf));
+ VERIFY(chksum, 0xfa60fb57, "H5_checksum_crc");
+
+ chksum = H5_checksum_lookup3(buf, sizeof(buf), 0);
+ VERIFY(chksum, 0x8ba9414b, "H5_checksum_lookup3");
+} /* test_chksum_size_one() */
+
+/****************************************************************
+**
+** test_chksum_size_two(): Checksum 2 byte buffer
+**
+****************************************************************/
+static void
+test_chksum_size_two(void)
+{
+ uint8_t buf[2] = {23, 187}; /* Buffer to checksum */
+ uint32_t chksum; /* Checksum value */
+
+ /* Buffer w/real data */
+ chksum = H5_checksum_fletcher32(buf, sizeof(buf));
+ VERIFY(chksum, 0x17bb17bb, "H5_checksum_fletcher32");
+
+ chksum = H5_checksum_crc(buf, sizeof(buf));
+ VERIFY(chksum, 0xfc856608, "H5_checksum_crc");
+
+ chksum = H5_checksum_lookup3(buf, sizeof(buf), 0);
+ VERIFY(chksum, 0x8ba7a6c9, "H5_checksum_lookup3");
+
+ /* Buffer w/zero(s) for data */
+ HDmemset(buf, 0, sizeof(buf));
+ chksum = H5_checksum_fletcher32(buf, sizeof(buf));
+ VERIFY(chksum, 0, "H5_checksum_fletcher32");
+
+ chksum = H5_checksum_crc(buf, sizeof(buf));
+ VERIFY(chksum, 0xfc7e9b20, "H5_checksum_crc");
+
+ chksum = H5_checksum_lookup3(buf, sizeof(buf), 0);
+ VERIFY(chksum, 0x62cd61b3, "H5_checksum_lookup3");
+} /* test_chksum_size_two() */
+
+/****************************************************************
+**
+** test_chksum_size_three(): Checksum 3 byte buffer
+**
+****************************************************************/
+static void
+test_chksum_size_three(void)
+{
+ uint8_t buf[3] = {23, 187, 98}; /* Buffer to checksum */
+ uint32_t chksum; /* Checksum value */
+
+ /* Buffer w/real data */
+ chksum = H5_checksum_fletcher32(buf, sizeof(buf));
+ VERIFY(chksum, 0x917679bb, "H5_checksum_fletcher32");
+
+ chksum = H5_checksum_crc(buf, sizeof(buf));
+ VERIFY(chksum, 0xfebc5d70, "H5_checksum_crc");
+
+ chksum = H5_checksum_lookup3(buf, sizeof(buf), 0);
+ VERIFY(chksum, 0xcebdf4f0, "H5_checksum_lookup3");
+
+ /* Buffer w/zero(s) for data */
+ HDmemset(buf, 0, sizeof(buf));
+ chksum = H5_checksum_fletcher32(buf, sizeof(buf));
+ VERIFY(chksum, 0, "H5_checksum_fletcher32");
+
+ chksum = H5_checksum_crc(buf, sizeof(buf));
+ VERIFY(chksum, 0xf9cc4c7a, "H5_checksum_crc");
+
+ chksum = H5_checksum_lookup3(buf, sizeof(buf), 0);
+ VERIFY(chksum, 0x6bd0060f, "H5_checksum_lookup3");
+} /* test_chksum_size_three() */
+
+/****************************************************************
+**
+** test_chksum_size_four(): Checksum 4 byte buffer
+**
+****************************************************************/
+static void
+test_chksum_size_four(void)
+{
+ uint8_t buf[4] = {23, 187, 98, 217}; /* Buffer to checksum */
+ uint32_t chksum; /* Checksum value */
+
+ /* Buffer w/real data */
+ chksum = H5_checksum_fletcher32(buf, sizeof(buf));
+ VERIFY(chksum, 0x924f7a94, "H5_checksum_fletcher32");
+
+ chksum = H5_checksum_crc(buf, sizeof(buf));
+ VERIFY(chksum, 0xff398a46, "H5_checksum_crc");
+
+ chksum = H5_checksum_lookup3(buf, sizeof(buf), 0);
+ VERIFY(chksum, 0x2c88bb51, "H5_checksum_lookup3");
+
+ /* Buffer w/zero(s) for data */
+ HDmemset(buf, 0, sizeof(buf));
+ chksum = H5_checksum_fletcher32(buf, sizeof(buf));
+ VERIFY(chksum, 0, "H5_checksum_fletcher32");
+
+ chksum = H5_checksum_crc(buf, sizeof(buf));
+ VERIFY(chksum, 0xff117081, "H5_checksum_crc");
+
+ chksum = H5_checksum_lookup3(buf, sizeof(buf), 0);
+ VERIFY(chksum, 0x049396b8, "H5_checksum_lookup3");
+} /* test_chksum_size_four() */
+
+/****************************************************************
+**
+** test_chksum_large(): Checksum larger buffer
+**
+****************************************************************/
+static void
+test_chksum_large(void)
+{
+ uint8_t *large_buf; /* Buffer for checksum calculations */
+ uint32_t chksum; /* Checksum value */
+ size_t u; /* Local index variable */
+
+ /* Allocate the buffer */
+ large_buf = (uint8_t *)HDmalloc((size_t)BUF_LEN);
+ CHECK_PTR(large_buf, "HDmalloc");
+
+ /* Initialize buffer w/known data */
+ for (u = 0; u < BUF_LEN; u++)
+ large_buf[u] = (uint8_t)(u * 3);
+
+ /* Buffer w/real data */
+ chksum = H5_checksum_fletcher32(large_buf, (size_t)BUF_LEN);
+ VERIFY(chksum, 0x85b4e2a, "H5_checksum_fletcher32");
+
+ chksum = H5_checksum_crc(large_buf, (size_t)BUF_LEN);
+ VERIFY(chksum, 0xfbd0f7c0, "H5_checksum_crc");
+
+ chksum = H5_checksum_lookup3(large_buf, (size_t)BUF_LEN, 0);
+ VERIFY(chksum, 0x1bd2ee7b, "H5_checksum_lookup3");
+
+ /* Buffer w/zero(s) for data */
+ HDmemset(large_buf, 0, (size_t)BUF_LEN);
+ chksum = H5_checksum_fletcher32(large_buf, (size_t)BUF_LEN);
+ VERIFY(chksum, 0, "H5_checksum_fletcher32");
+
+ chksum = H5_checksum_crc(large_buf, (size_t)BUF_LEN);
+ VERIFY(chksum, 0xfac8b4c4, "H5_checksum_crc");
+
+ chksum = H5_checksum_lookup3(large_buf, (size_t)BUF_LEN, 0);
+ VERIFY(chksum, 0x930c7afc, "H5_checksum_lookup3");
+
+ /* Release memory for buffer */
+ HDfree(large_buf);
+} /* test_chksum_large() */
+
+/****************************************************************
+**
+** test_checksum(): Main checksum testing routine.
+**
+****************************************************************/
+void
+test_checksum(void)
+{
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing checksum algorithms\n"));
+
+ /* Various checks for fletcher32 checksum algorithm */
+ test_chksum_size_one(); /* Test buffer w/only 1 byte */
+ test_chksum_size_two(); /* Test buffer w/only 2 bytes */
+ test_chksum_size_three(); /* Test buffer w/only 3 bytes */
+ test_chksum_size_four(); /* Test buffer w/only 4 bytes */
+ test_chksum_large(); /* Test buffer w/larger # of bytes */
+
+} /* test_checksum() */
+
+/*-------------------------------------------------------------------------
+ * Function: cleanup_checksum
+ *
+ * Purpose: Cleanup temporary test files
+ *
+ * Return: none
+ *
+ * Programmer: Quincey Koziol
+ * August 21, 2006
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+cleanup_checksum(void)
+{
+ /* no file to clean */
+}
diff --git a/test/API/tconfig.c b/test/API/tconfig.c
new file mode 100644
index 0000000..fdab5ef
--- /dev/null
+++ b/test/API/tconfig.c
@@ -0,0 +1,199 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/***********************************************************
+ *
+ * Test program: tconfig
+ *
+ * Test the definitions in the H5config.h as much as possible
+ *
+ *************************************************************/
+
+#include "testhdf5.h"
+
+/* macros definitions */
+/* verify C int type: verify the size of signed and unsigned int type
+ * with the macro size.
+ */
+#define vrfy_cint_type(ctype, uctype, ctype_macro) \
+ /* check signed type size */ \
+ vrfy_macrosize(ctype, ctype_macro, #ctype_macro); \
+ /* check unsigned type size */ \
+ vrfy_macrosize(uctype, ctype_macro, #ctype_macro);
+
+/* verify C type sizes: verify the sizeof type with the macro size. */
+#define vrfy_ctype(type, macro) vrfy_macrosize(type, macro, #macro);
+
+/* verify if the sizeof(type) matches size defined in macro. */
+/* Needs this extra step so that we can print the macro name. */
+#define vrfy_macrosize(type, macro, macroname) \
+ if (sizeof(type) != (macro)) \
+ TestErrPrintf("Error: sizeof(%s) is %zu but %s is %d\n", #type, sizeof(type), macroname, \
+ (int)(macro));
+
+/* local routine prototypes */
+void test_config_ctypes(void);
+void test_exit_definitions(void);
+
+/*-------------------------------------------------------------------------
+ * Function: test_configure
+ *
+ * Purpose: Main configure definitions testing routine
+ *
+ * Return: none (error is fed back via global variable num_errs)
+ *
+ * Programmer: Albert Cheng
+ * September 25, 2001
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+test_configure(void)
+{
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing configure definitions\n"));
+ test_config_ctypes();
+ test_exit_definitions();
+}
+
+/*-------------------------------------------------------------------------
+ * Function: cleanup_configure
+ *
+ * Purpose: Cleanup temporary test files
+ *
+ * Return: none
+ *
+ * Programmer: Albert Cheng
+ * September 25, 2001
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+cleanup_configure(void)
+{
+ /* no file to clean */
+}
+
+/*-------------------------------------------------------------------------
+ * Function: test_config_ctypes
+ *
+ * Purpose: test C language data type sizes
+ *
+ * Return: none (error is fed back via global variable num_errs)
+ *
+ * Programmer: Albert Cheng
+ * September 25, 2001
+ *
+ * Modifications:
+ * Albert Cheng, 2004/10/14
+ * Verified both signed and unsigned int types.
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+test_config_ctypes(void)
+{
+ /* standard C89 basic types */
+ /* char, signed char, unsigned char are three distinct types. */
+ vrfy_ctype(char, H5_SIZEOF_CHAR);
+ vrfy_cint_type(signed char, unsigned char, H5_SIZEOF_CHAR);
+ vrfy_cint_type(int, unsigned int, H5_SIZEOF_INT);
+ vrfy_cint_type(short, unsigned short, H5_SIZEOF_SHORT);
+ vrfy_cint_type(long, unsigned long, H5_SIZEOF_LONG);
+ vrfy_ctype(float, H5_SIZEOF_FLOAT);
+ vrfy_ctype(double, H5_SIZEOF_DOUBLE);
+ vrfy_ctype(long double, H5_SIZEOF_LONG_DOUBLE);
+
+ /* standard C99 basic types */
+ vrfy_cint_type(long long, unsigned long long, H5_SIZEOF_LONG_LONG);
+ vrfy_cint_type(int8_t, uint8_t, H5_SIZEOF_INT8_T);
+ vrfy_cint_type(int16_t, uint16_t, H5_SIZEOF_INT16_T);
+ vrfy_cint_type(int32_t, uint32_t, H5_SIZEOF_INT32_T);
+ vrfy_cint_type(int64_t, uint64_t, H5_SIZEOF_INT64_T);
+
+ /* Some vendors have different sizes for the signed and unsigned */
+ /* fast8_t. Need to check them individually. */
+#if H5_SIZEOF_INT_FAST8_T > 0
+ vrfy_ctype(int_fast8_t, H5_SIZEOF_INT_FAST8_T);
+#endif
+
+#if H5_SIZEOF_UINT_FAST8_T > 0
+ vrfy_ctype(uint_fast8_t, H5_SIZEOF_UINT_FAST8_T);
+#endif
+
+#if H5_SIZEOF_INT_FAST16_T > 0
+ vrfy_cint_type(int_fast16_t, uint_fast16_t, H5_SIZEOF_INT_FAST16_T);
+#endif
+
+#if H5_SIZEOF_INT_FAST32_T > 0
+ vrfy_cint_type(int_fast32_t, uint_fast32_t, H5_SIZEOF_INT_FAST32_T);
+#endif
+
+#if H5_SIZEOF_INT_FAST64_T > 0
+ vrfy_cint_type(int_fast64_t, uint_fast64_t, H5_SIZEOF_INT_FAST64_T);
+#endif
+
+#if H5_SIZEOF_INT_LEAST8_T > 0
+ vrfy_cint_type(int_least8_t, uint_least8_t, H5_SIZEOF_INT_LEAST8_T);
+#endif
+
+#if H5_SIZEOF_INT_LEAST16_T > 0
+ vrfy_cint_type(int_least16_t, uint_least16_t, H5_SIZEOF_INT_LEAST16_T);
+#endif
+
+#if H5_SIZEOF_INT_LEAST32_T > 0
+ vrfy_cint_type(int_least32_t, uint_least32_t, H5_SIZEOF_INT_LEAST32_T);
+#endif
+
+#if H5_SIZEOF_INT_LEAST64_T > 0
+ vrfy_cint_type(int_least64_t, uint_least64_t, H5_SIZEOF_INT_LEAST64_T);
+#endif
+
+#if H5_SIZEOF_OFF_T > 0
+ vrfy_ctype(off_t, H5_SIZEOF_OFF_T);
+#endif
+
+#if H5_SIZEOF_SIZE_T > 0
+ vrfy_ctype(size_t, H5_SIZEOF_SIZE_T);
+#endif
+
+#if H5_SIZEOF_SSIZE_T > 0
+ vrfy_ctype(ssize_t, H5_SIZEOF_SSIZE_T);
+#endif
+}
+
+/*-------------------------------------------------------------------------
+ * Function: test_exit_definitions
+ *
+ * Purpose: test the exit macros values
+ *
+ * Return: none (error is fed back via global variable num_errs)
+ *
+ * Programmer: Albert Cheng
+ * October 12, 2009
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+test_exit_definitions(void)
+{
+ /* Verify the EXIT_SUCCESS and EXIT_FAILURE are 0 and 1 respectively. */
+ /* This should be true for POSIX compliant systems. */
+ if (EXIT_SUCCESS != 0)
+ TestErrPrintf("Error: EXIT_SUCCESS is %d, should be %d\n", EXIT_SUCCESS, 0);
+ if (EXIT_FAILURE != 1)
+ TestErrPrintf("Error: EXIT_FAILURE is %d, should be %d\n", EXIT_FAILURE, 1);
+}
diff --git a/test/API/tcoords.c b/test/API/tcoords.c
new file mode 100644
index 0000000..9c66b40
--- /dev/null
+++ b/test/API/tcoords.c
@@ -0,0 +1,724 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/***********************************************************
+ *
+ * Test program: tcoords
+ *
+ * Test the element coordinates for dataspace selection. For
+ * chunked dataset, when the hyperslab selection of some
+ * dimensions is full, the library optimize it by "flattening"
+ * the fully selected dimensions. This program tests if the
+ * coordinates of selected elements are correctly calculated.
+ *
+ *************************************************************/
+
+#include "testhdf5.h"
+
+#define FILENAME "coord.h5"
+
+#define SINGLE_END_DSET "single_end_dset"
+#define MULTI_ENDS_SEL_HYPER_DSET "multiple_ends_dset"
+
+#define NAME_LEN 128
+
+/* Data written to the dataset for single block test. Global variable
+ * for convenience. */
+int da_buffer[2][3][6][2];
+
+/***********************************************************
+**
+** test_singleEnd_selElements(): Test element selection of only
+** one block.
+**
+*************************************************************/
+static void
+test_singleEnd_selElements(hid_t file, hbool_t is_chunked)
+{
+ hid_t sid, plid, did, msid;
+ char dset_name[NAME_LEN]; /* Dataset name */
+ size_t elmts_numb;
+ herr_t ret; /* Generic error return */
+ int i, j, k;
+ hsize_t da_dims[4] = {2, 3, 6, 2};
+ hsize_t da_chunksize[4] = {1, 3, 3, 2};
+
+ /* For testing the full selection in the fastest-growing end */
+ int mem1_buffer[1][1][6][2];
+ hsize_t mem1_dims[4] = {1, 1, 6, 2};
+ hsize_t da_elmts1[12][4] = {{0, 0, 0, 0}, {0, 0, 0, 1}, {0, 0, 1, 0}, {0, 0, 1, 1},
+ {0, 0, 2, 0}, {0, 0, 2, 1}, {0, 0, 3, 0}, {0, 0, 3, 1},
+ {0, 0, 4, 0}, {0, 0, 4, 1}, {0, 0, 5, 0}, {0, 0, 5, 1}};
+
+ /* For testing the full selection in the slowest-growing end */
+ int mem2_buffer[2][3][1][1];
+ hsize_t mem2_dims[4] = {2, 3, 1, 1};
+ hsize_t da_elmts2[6][4] = {{0, 0, 0, 0}, {0, 1, 0, 0}, {0, 2, 0, 0},
+ {1, 0, 0, 0}, {1, 1, 0, 0}, {1, 2, 0, 0}};
+
+ /* For testing the full selection in the middle dimensions */
+ int mem3_buffer[1][3][6][1];
+ hsize_t mem3_dims[4] = {1, 3, 6, 1};
+ hsize_t da_elmts3[18][4] = {{0, 0, 0, 0}, {0, 0, 1, 0}, {0, 0, 2, 0}, {0, 0, 3, 0}, {0, 0, 4, 0},
+ {0, 0, 5, 0}, {0, 1, 0, 0}, {0, 1, 1, 0}, {0, 1, 2, 0}, {0, 1, 3, 0},
+ {0, 1, 4, 0}, {0, 1, 5, 0}, {0, 2, 0, 0}, {0, 2, 1, 0}, {0, 2, 2, 0},
+ {0, 2, 3, 0}, {0, 2, 4, 0}, {0, 2, 5, 0}};
+
+ /* Create and write the dataset */
+ sid = H5Screate_simple(4, da_dims, da_dims);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ plid = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(plid, FAIL, "H5Pcreate");
+
+ if (is_chunked) {
+ ret = H5Pset_chunk(plid, 4, da_chunksize);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+ }
+
+ /* Construct dataset's name */
+ HDmemset(dset_name, 0, (size_t)NAME_LEN);
+ HDstrcat(dset_name, SINGLE_END_DSET);
+ if (is_chunked)
+ HDstrcat(dset_name, "_chunked");
+
+ did = H5Dcreate2(file, dset_name, H5T_NATIVE_INT, sid, H5P_DEFAULT, plid, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+
+ /* Initialize the data to be written to file */
+ for (i = 0; i < 2; i++) {
+ for (j = 0; j < 3; j++) {
+ for (k = 0; k < 6; k++) {
+ da_buffer[i][j][k][0] = i * 100 + j * 10 + k;
+ da_buffer[i][j][k][1] = i * 100 + j * 10 + k + 1;
+ }
+ }
+ }
+
+ ret = H5Dwrite(did, H5T_NATIVE_INT, sid, sid, H5P_DEFAULT, da_buffer);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* ****** Case 1: ******
+ * Testing the full selection in the fastest-growing end */
+ did = H5Dopen2(file, dset_name, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen2");
+
+ /* Select the elements in the dataset */
+ elmts_numb = 12;
+
+ ret = H5Sselect_elements(sid, H5S_SELECT_SET, elmts_numb, (const hsize_t *)da_elmts1);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Dataspace for memory buffer */
+ msid = H5Screate_simple(4, mem1_dims, mem1_dims);
+ CHECK(msid, FAIL, "H5Screate_simple");
+
+ ret = H5Sselect_all(msid);
+ CHECK(ret, FAIL, "H5Sselect_all");
+
+ ret = H5Dread(did, H5T_NATIVE_INT, msid, sid, H5P_DEFAULT, mem1_buffer);
+ CHECK(ret, FAIL, "H5Dread");
+
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Sclose(msid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 2; j++)
+ if (da_buffer[0][0][i][j] != mem1_buffer[0][0][i][j]) {
+ TestErrPrintf("%u: Read different values than written at index 0,0,%d,%d\n", __LINE__, i, j);
+ }
+
+ /* ****** Case 2: ******
+ * Testing the full selection in the slowest-growing end */
+ did = H5Dopen2(file, dset_name, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen2");
+
+ /* Select the elements in the dataset */
+ elmts_numb = 6;
+
+ ret = H5Sselect_elements(sid, H5S_SELECT_SET, elmts_numb, (const hsize_t *)da_elmts2);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Dataspace for memory buffer */
+ msid = H5Screate_simple(4, mem2_dims, mem2_dims);
+ CHECK(msid, FAIL, "H5Screate_simple");
+
+ ret = H5Sselect_all(msid);
+ CHECK(ret, FAIL, "H5Sselect_all");
+
+ ret = H5Dread(did, H5T_NATIVE_INT, msid, sid, H5P_DEFAULT, mem2_buffer);
+ CHECK(ret, FAIL, "H5Dread");
+
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Sclose(msid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ for (i = 0; i < 2; i++)
+ for (j = 0; j < 3; j++)
+ if (da_buffer[i][j][0][0] != mem2_buffer[i][j][0][0]) {
+ TestErrPrintf("%u: Read different values than written at index %d,%d,0,0, da_buffer = %d, "
+ "mem2_buffer = %d\n",
+ __LINE__, i, j, da_buffer[i][j][0][0], mem2_buffer[i][j][0][0]);
+ }
+
+ /* ****** Case 3: ******
+ * Testing the full selection in the middle dimensions */
+ did = H5Dopen2(file, dset_name, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen2");
+
+ /* Select the elements in the dataset */
+ elmts_numb = 18;
+
+ ret = H5Sselect_elements(sid, H5S_SELECT_SET, elmts_numb, (const hsize_t *)da_elmts3);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Dataspace for memory buffer */
+ msid = H5Screate_simple(4, mem3_dims, mem3_dims);
+ CHECK(msid, FAIL, "H5Screate_simple");
+
+ ret = H5Sselect_all(msid);
+ CHECK(ret, FAIL, "H5Sselect_all");
+
+ ret = H5Dread(did, H5T_NATIVE_INT, msid, sid, H5P_DEFAULT, mem3_buffer);
+ CHECK(ret, FAIL, "H5Dread");
+
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Sclose(msid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ for (i = 0; i < 3; i++)
+ for (j = 0; j < 6; j++)
+ if (da_buffer[0][i][j][0] != mem3_buffer[0][i][j][0]) {
+ TestErrPrintf("%u: Read different values than written at index 0,%d,%d,0\n", __LINE__, i, j);
+ }
+
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Pclose(plid);
+ CHECK(ret, FAIL, "H5Pclose");
+}
+
+/***********************************************************
+**
+** test_singleEnd_selHyperslab(): Test full hyperslab selection
+** of only one block.
+**
+*************************************************************/
+static void
+test_singleEnd_selHyperslab(hid_t file, hbool_t is_chunked)
+{
+ hid_t sid, did, msid;
+ char dset_name[NAME_LEN]; /* Dataset name */
+ herr_t ret; /* Generic error return */
+ int i, j;
+ hsize_t da_dims[4] = {2, 3, 6, 2};
+
+ /* For testing the full selection in the fastest-growing end */
+ int mem1_buffer[1][1][6][2];
+ hsize_t mem1_dims[4] = {1, 1, 6, 2};
+ hsize_t mem1_start[4] = {0, 0, 0, 0};
+ hsize_t mem1_count[4] = {1, 1, 1, 1};
+ hsize_t mem1_stride[4] = {1, 1, 1, 1};
+ hsize_t mem1_block[4] = {1, 1, 6, 2};
+
+ /* For testing the full selection in the slowest-growing end */
+ int mem2_buffer[2][3][1][1];
+ hsize_t mem2_dims[4] = {2, 3, 1, 1};
+ hsize_t mem2_start[4] = {0, 0, 0, 0};
+ hsize_t mem2_count[4] = {1, 1, 1, 1};
+ hsize_t mem2_stride[4] = {1, 1, 1, 1};
+ hsize_t mem2_block[4] = {2, 3, 1, 1};
+
+ /* For testing the full selection in the middle dimensions */
+ int mem3_buffer[1][3][6][1];
+ hsize_t mem3_dims[4] = {1, 3, 6, 1};
+ hsize_t mem3_start[4] = {0, 0, 0, 0};
+ hsize_t mem3_count[4] = {1, 1, 1, 1};
+ hsize_t mem3_stride[4] = {1, 1, 1, 1};
+ hsize_t mem3_block[4] = {1, 3, 6, 1};
+
+ /* Construct dataset's name */
+ HDmemset(dset_name, 0, NAME_LEN);
+ HDstrcat(dset_name, SINGLE_END_DSET);
+ if (is_chunked)
+ HDstrcat(dset_name, "_chunked");
+
+ /* Dataspace for the dataset in file */
+ sid = H5Screate_simple(4, da_dims, da_dims);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* ****** Case 1: ******
+ * Testing the full selection in the fastest-growing end */
+ did = H5Dopen2(file, dset_name, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen2");
+
+ /* Select the elements in the dataset */
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, mem1_start, mem1_stride, mem1_count, mem1_block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Dataspace for memory buffer */
+ msid = H5Screate_simple(4, mem1_dims, mem1_dims);
+ CHECK(msid, FAIL, "H5Screate_simple");
+
+ ret = H5Sselect_all(msid);
+ CHECK(ret, FAIL, "H5Sselect_all");
+
+ ret = H5Dread(did, H5T_NATIVE_INT, msid, sid, H5P_DEFAULT, mem1_buffer);
+ CHECK(ret, FAIL, "H5Dread");
+
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Sclose(msid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 2; j++)
+ if (da_buffer[0][0][i][j] != mem1_buffer[0][0][i][j]) {
+ TestErrPrintf("%u: Read different values than written at index 0,0,%d,%d\n", __LINE__, i, j);
+ }
+
+ /* ****** Case 2: ******
+ * Testing the full selection in the slowest-growing end */
+ did = H5Dopen2(file, dset_name, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen2");
+
+ /* Select the elements in the dataset */
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, mem2_start, mem2_stride, mem2_count, mem2_block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Dataspace for memory buffer */
+ msid = H5Screate_simple(4, mem2_dims, mem2_dims);
+ CHECK(msid, FAIL, "H5Screate_simple");
+
+ ret = H5Sselect_all(msid);
+ CHECK(ret, FAIL, "H5Sselect_all");
+
+ ret = H5Dread(did, H5T_NATIVE_INT, msid, sid, H5P_DEFAULT, mem2_buffer);
+ CHECK(ret, FAIL, "H5Dread");
+
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Sclose(msid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ for (i = 0; i < 2; i++)
+ for (j = 0; j < 3; j++)
+ if (da_buffer[i][j][0][0] != mem2_buffer[i][j][0][0]) {
+ TestErrPrintf("%u: Read different values than written at index %d,%d,0,0\n", __LINE__, i, j);
+ }
+
+ /* ****** Case 3: ******
+ * Testing the full selection in the middle dimensions */
+ did = H5Dopen2(file, dset_name, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen2");
+
+ /* Select the elements in the dataset */
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, mem3_start, mem3_stride, mem3_count, mem3_block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Dataspace for memory buffer */
+ msid = H5Screate_simple(4, mem3_dims, mem3_dims);
+ CHECK(msid, FAIL, "H5Screate_simple");
+
+ ret = H5Sselect_all(msid);
+ CHECK(ret, FAIL, "H5Sselect_all");
+
+ ret = H5Dread(did, H5T_NATIVE_INT, msid, sid, H5P_DEFAULT, mem3_buffer);
+ CHECK(ret, FAIL, "H5Dread");
+
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Sclose(msid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ for (i = 0; i < 3; i++)
+ for (j = 0; j < 6; j++)
+ if (da_buffer[0][i][j][0] != mem3_buffer[0][i][j][0]) {
+ TestErrPrintf("%u: Read different values than written at index 0,%d,%d,0\n", __LINE__, i, j);
+ }
+
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+}
+
+/***********************************************************
+**
+** test_multiple_end(): Test full hyperslab selection of
+** multiple blocks.
+**
+*************************************************************/
+static void
+test_multiple_ends(hid_t file, hbool_t is_chunked)
+{
+ hid_t sid, plid, did, msid;
+ char dset_name[NAME_LEN]; /* Dataset name */
+ herr_t ret; /* Generic error return */
+ int i, j, k, l, m, n, p;
+ hsize_t da_dims[8] = {4, 5, 3, 4, 2, 3, 6, 2};
+ hsize_t da_chunksize[8] = {1, 5, 3, 2, 2, 3, 3, 2};
+ struct {
+ int arr[4][5][3][4][2][3][6][2];
+ } *data_buf = NULL;
+
+ /* For testing the full selections in the fastest-growing end and in the middle dimensions */
+ struct {
+ int arr[1][1][1][4][2][1][6][2];
+ } *mem1_buffer = NULL;
+ hsize_t mem1_dims[8] = {1, 1, 1, 4, 2, 1, 6, 2};
+ hsize_t mem1_start[8] = {0, 0, 0, 0, 0, 0, 0, 0};
+ hsize_t mem1_count[8] = {1, 1, 1, 1, 1, 1, 1, 1};
+ hsize_t mem1_stride[8] = {1, 1, 1, 1, 1, 1, 1, 1};
+ hsize_t mem1_block[8] = {1, 1, 1, 4, 2, 1, 6, 2};
+
+ /* For testing the full selections in the slowest-growing end and in the middle dimensions */
+ struct {
+ int arr[4][5][1][4][2][1][1][1];
+ } *mem2_buffer = NULL;
+ hsize_t mem2_dims[8] = {4, 5, 1, 4, 2, 1, 1, 1};
+ hsize_t mem2_start[8] = {0, 0, 0, 0, 0, 0, 0, 0};
+ hsize_t mem2_count[8] = {1, 1, 1, 1, 1, 1, 1, 1};
+ hsize_t mem2_stride[8] = {1, 1, 1, 1, 1, 1, 1, 1};
+ hsize_t mem2_block[8] = {4, 5, 1, 4, 2, 1, 1, 1};
+
+ /* For testing two unadjacent full selections in the middle dimensions */
+ struct {
+ int arr[1][5][3][1][1][3][6][1];
+ } *mem3_buffer = NULL;
+ hsize_t mem3_dims[8] = {1, 5, 3, 1, 1, 3, 6, 1};
+ hsize_t mem3_start[8] = {0, 0, 0, 0, 0, 0, 0, 0};
+ hsize_t mem3_count[8] = {1, 1, 1, 1, 1, 1, 1, 1};
+ hsize_t mem3_stride[8] = {1, 1, 1, 1, 1, 1, 1, 1};
+ hsize_t mem3_block[8] = {1, 5, 3, 1, 1, 3, 6, 1};
+
+ /* For testing the full selections in the fastest-growing end and the slowest-growing end */
+ struct {
+ int arr[4][5][1][1][1][1][6][2];
+ } *mem4_buffer = NULL;
+ hsize_t mem4_dims[8] = {4, 5, 1, 1, 1, 1, 6, 2};
+ hsize_t mem4_start[8] = {0, 0, 0, 0, 0, 0, 0, 0};
+ hsize_t mem4_count[8] = {1, 1, 1, 1, 1, 1, 1, 1};
+ hsize_t mem4_stride[8] = {1, 1, 1, 1, 1, 1, 1, 1};
+ hsize_t mem4_block[8] = {4, 5, 1, 1, 1, 1, 6, 2};
+
+ /* For testing the full selections in the fastest-growing end and slowest-growing end,
+ * also in the middle dimensions */
+ struct {
+ int arr[4][5][1][4][2][1][6][2];
+ } *mem5_buffer = NULL;
+ hsize_t mem5_dims[8] = {4, 5, 1, 4, 2, 1, 6, 2};
+ hsize_t mem5_start[8] = {0, 0, 0, 0, 0, 0, 0, 0};
+ hsize_t mem5_count[8] = {1, 1, 1, 1, 1, 1, 1, 1};
+ hsize_t mem5_stride[8] = {1, 1, 1, 1, 1, 1, 1, 1};
+ hsize_t mem5_block[8] = {4, 5, 1, 4, 2, 1, 6, 2};
+
+ /* Initialize dynamic arrays */
+ data_buf = HDcalloc(1, sizeof(*data_buf));
+ CHECK_PTR(data_buf, "HDcalloc");
+ mem1_buffer = HDcalloc(1, sizeof(*mem1_buffer));
+ CHECK_PTR(data_buf, "HDcalloc");
+ mem2_buffer = HDcalloc(1, sizeof(*mem2_buffer));
+ CHECK_PTR(data_buf, "HDcalloc");
+ mem3_buffer = HDcalloc(1, sizeof(*mem3_buffer));
+ CHECK_PTR(data_buf, "HDcalloc");
+ mem4_buffer = HDcalloc(1, sizeof(*mem4_buffer));
+ CHECK_PTR(data_buf, "HDcalloc");
+ mem5_buffer = HDcalloc(1, sizeof(*mem5_buffer));
+ CHECK_PTR(data_buf, "HDcalloc");
+
+ /* Create and write the dataset */
+ sid = H5Screate_simple(8, da_dims, da_dims);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ plid = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(plid, FAIL, "H5Pcreate");
+
+ if (is_chunked) {
+ ret = H5Pset_chunk(plid, 8, da_chunksize);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+ }
+
+ /* Construct dataset's name */
+ HDmemset(dset_name, 0, NAME_LEN);
+ HDstrcat(dset_name, MULTI_ENDS_SEL_HYPER_DSET);
+ if (is_chunked)
+ HDstrcat(dset_name, "_chunked");
+
+ did = H5Dcreate2(file, dset_name, H5T_NATIVE_INT, sid, H5P_DEFAULT, plid, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+
+ for (i = 0; i < 4; i++)
+ for (j = 0; j < 5; j++)
+ for (k = 0; k < 3; k++)
+ for (l = 0; l < 4; l++)
+ for (m = 0; m < 2; m++)
+ for (n = 0; n < 3; n++)
+ for (p = 0; p < 6; p++) {
+ data_buf->arr[i][j][k][l][m][n][p][0] =
+ i * 1000000 + j * 100000 + k * 10000 + l * 1000 + m * 100 + n * 10 + p;
+ data_buf->arr[i][j][k][l][m][n][p][1] = i * 1000000 + j * 100000 + k * 10000 +
+ l * 1000 + m * 100 + n * 10 + p + 1;
+ }
+
+ ret = H5Dwrite(did, H5T_NATIVE_INT, sid, sid, H5P_DEFAULT, data_buf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* ****** Case 1: ******
+ * Testing the full selections in the fastest-growing end and in the middle dimensions*/
+ did = H5Dopen2(file, dset_name, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen2");
+
+ /* Select the elements in the dataset */
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, mem1_start, mem1_stride, mem1_count, mem1_block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ msid = H5Screate_simple(8, mem1_dims, mem1_dims);
+ CHECK(msid, FAIL, "H5Screate_simple");
+
+ ret = H5Sselect_all(msid);
+ CHECK(ret, FAIL, "H5Sselect_all");
+
+ ret = H5Dread(did, H5T_NATIVE_INT, msid, sid, H5P_DEFAULT, mem1_buffer);
+ CHECK(ret, FAIL, "H5Dread");
+
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Sclose(msid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ for (i = 0; i < 4; i++)
+ for (j = 0; j < 2; j++)
+ for (k = 0; k < 6; k++)
+ for (l = 0; l < 2; l++)
+ if (data_buf->arr[0][0][0][i][j][0][k][l] != mem1_buffer->arr[0][0][0][i][j][0][k][l]) {
+ TestErrPrintf("%u: Read different values than written at index 0,0,0,%d,%d,0,%d,%d\n",
+ __LINE__, i, j, k, l);
+ }
+
+ /* ****** Case 2: ******
+ * Testing the full selections in the slowest-growing end and in the middle dimensions*/
+ did = H5Dopen2(file, dset_name, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen2");
+
+ /* Select the elements in the dataset */
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, mem2_start, mem2_stride, mem2_count, mem2_block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ msid = H5Screate_simple(8, mem2_dims, mem2_dims);
+ CHECK(msid, FAIL, "H5Screate_simple");
+
+ ret = H5Sselect_all(msid);
+ CHECK(ret, FAIL, "H5Sselect_all");
+
+ ret = H5Dread(did, H5T_NATIVE_INT, msid, sid, H5P_DEFAULT, mem2_buffer);
+ CHECK(ret, FAIL, "H5Dread");
+
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Sclose(msid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ for (i = 0; i < 4; i++)
+ for (j = 0; j < 5; j++)
+ for (k = 0; k < 4; k++)
+ for (l = 0; l < 2; l++)
+ if (data_buf->arr[i][j][0][k][l][0][0][0] != mem2_buffer->arr[i][j][0][k][l][0][0][0]) {
+ TestErrPrintf("%u: Read different values than written at index %d,%d,0,%d,%d,0,0,0\n",
+ __LINE__, i, j, k, l);
+ }
+
+ /* ****** Case 3: ******
+ * Testing two unadjacent full selections in the middle dimensions */
+ did = H5Dopen2(file, dset_name, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen2");
+
+ /* Select the elements in the dataset */
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, mem3_start, mem3_stride, mem3_count, mem3_block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ msid = H5Screate_simple(8, mem3_dims, mem3_dims);
+ CHECK(msid, FAIL, "H5Screate_simple");
+
+ ret = H5Sselect_all(msid);
+ CHECK(ret, FAIL, "H5Sselect_all");
+
+ ret = H5Dread(did, H5T_NATIVE_INT, msid, sid, H5P_DEFAULT, mem3_buffer);
+ CHECK(ret, FAIL, "H5Dread");
+
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Sclose(msid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ for (i = 0; i < 5; i++)
+ for (j = 0; j < 3; j++)
+ for (k = 0; k < 3; k++)
+ for (l = 0; l < 6; l++)
+ if (data_buf->arr[0][i][j][0][0][k][l][0] != mem3_buffer->arr[0][i][j][0][0][k][l][0]) {
+ TestErrPrintf("%u: Read different values than written at index 0,%d,%d,0,0,%d,%d,0\n",
+ __LINE__, i, j, k, l);
+ }
+
+ /* ****** Case 4: ******
+ * Testing the full selections in the fastest-growing end and the slowest-growing end */
+ did = H5Dopen2(file, dset_name, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen2");
+
+ /* Select the elements in the dataset */
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, mem4_start, mem4_stride, mem4_count, mem4_block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ msid = H5Screate_simple(8, mem4_dims, mem4_dims);
+ CHECK(msid, FAIL, "H5Screate_simple");
+
+ ret = H5Sselect_all(msid);
+ CHECK(ret, FAIL, "H5Sselect_all");
+
+ ret = H5Dread(did, H5T_NATIVE_INT, msid, sid, H5P_DEFAULT, mem4_buffer);
+ CHECK(ret, FAIL, "H5Dread");
+
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Sclose(msid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ for (i = 0; i < 4; i++)
+ for (j = 0; j < 5; j++)
+ for (k = 0; k < 6; k++)
+ for (l = 0; l < 2; l++)
+ if (data_buf->arr[i][j][0][0][0][0][k][l] != mem4_buffer->arr[i][j][0][0][0][0][k][l]) {
+ TestErrPrintf("%u: Read different values than written at index %d,%d,0,0,0,0,%d,%d\n",
+ __LINE__, i, j, k, l);
+ }
+
+ /* ****** Case 5: ******
+ * Testing the full selections in the fastest-growing end and the slowest-growing end,
+ * and also in the middle dimensions */
+ did = H5Dopen2(file, dset_name, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen2");
+
+ /* Select the elements in the dataset */
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, mem5_start, mem5_stride, mem5_count, mem5_block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ msid = H5Screate_simple(8, mem5_dims, mem5_dims);
+ CHECK(msid, FAIL, "H5Screate_simple");
+
+ ret = H5Sselect_all(msid);
+ CHECK(ret, FAIL, "H5Sselect_all");
+
+ ret = H5Dread(did, H5T_NATIVE_INT, msid, sid, H5P_DEFAULT, mem5_buffer);
+ CHECK(ret, FAIL, "H5Dread");
+
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Sclose(msid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ for (i = 0; i < 4; i++)
+ for (j = 0; j < 5; j++)
+ for (k = 0; k < 4; k++)
+ for (l = 0; l < 2; l++)
+ for (m = 0; m < 6; m++)
+ for (n = 0; n < 2; n++)
+ if (data_buf->arr[i][j][0][k][l][0][m][n] !=
+ mem5_buffer->arr[i][j][0][k][l][0][m][n]) {
+ TestErrPrintf(
+ "%u: Read different values than written at index %d,%d,0,%d,%d,0,%d,%d\n",
+ __LINE__, i, j, k, l, m, n);
+ }
+
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Pclose(plid);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ HDfree(data_buf);
+ HDfree(mem1_buffer);
+ HDfree(mem2_buffer);
+ HDfree(mem3_buffer);
+ HDfree(mem4_buffer);
+ HDfree(mem5_buffer);
+}
+
+/****************************************************************
+**
+** test_coords(): Main testing routine.
+**
+****************************************************************/
+void
+test_coords(void)
+{
+ hid_t fid;
+ hbool_t is_chunk[2] = {TRUE, FALSE};
+ int i;
+ herr_t ret; /* Generic error return */
+
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ for (i = 0; i < 2; i++) {
+ /* Test H5Sselect_elements with selection of one block of data */
+ test_singleEnd_selElements(fid, is_chunk[i]);
+
+ /* Test H5Sselect_hyperslab with selection of one block of data */
+ test_singleEnd_selHyperslab(fid, is_chunk[i]);
+
+ /* Test H5Sselect_hyperslab with selection of multiple blocks of data */
+ test_multiple_ends(fid, is_chunk[i]);
+ }
+
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+}
+
+/*-------------------------------------------------------------------------
+ * Function: cleanup_coords
+ *
+ * Purpose: Cleanup temporary test files
+ *
+ * Return: none
+ *
+ * Programmer: Raymond Lu
+ * 20 Dec. 2007
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+cleanup_coords(void)
+{
+ H5Fdelete(FILENAME, H5P_DEFAULT);
+}
diff --git a/test/API/testhdf5.c b/test/API/testhdf5.c
new file mode 100644
index 0000000..f29b603
--- /dev/null
+++ b/test/API/testhdf5.c
@@ -0,0 +1,729 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ FILE
+ testhdf5.c - HDF5 testing framework main file.
+
+ REMARKS
+ General test wrapper for HDF5 base library test programs
+
+ DESIGN
+ Each test function should be implemented as function having no
+ parameters and returning void (i.e. no return value). They should be put
+ into the list of AddTest() calls in main() below. Functions which depend
+ on other functionality should be placed below the AddTest() call for the
+ base functionality testing.
+ Each test module should include testhdf5.h and define a unique set of
+ names for test files they create.
+
+ BUGS/LIMITATIONS
+
+
+ */
+
+/* ANY new test needs to have a prototype in testhdf5.h */
+#include "testhdf5.h"
+
+int nerrors = 0;
+
+char *paraprefix = NULL; /* for command line option para-prefix */
+
+/* Length of multi-file VFD filename buffers */
+#define H5TEST_MULTI_FILENAME_LEN 1024
+
+/*
+ * This routine is designed to provide equivalent functionality to 'printf'
+ * and allow easy replacement for environments which don't have stdin/stdout
+ * available. (i.e. Windows & the Mac)
+ */
+H5_ATTR_FORMAT(printf, 1, 2)
+int
+print_func(const char *format, ...)
+{
+ va_list arglist;
+ int ret_value;
+
+ HDva_start(arglist, format);
+ ret_value = HDvprintf(format, arglist);
+ HDva_end(arglist);
+ return ret_value;
+}
+
+/*
+ * This routine is designed to provide equivalent functionality to 'printf'
+ * and also increment the error count for the testing framework.
+ */
+int
+TestErrPrintf(const char *format, ...)
+{
+ va_list arglist;
+ int ret_value;
+
+ /* Increment the error count */
+ nerrors++;
+
+ /* Print the requested information */
+ HDva_start(arglist, format);
+ ret_value = HDvprintf(format, arglist);
+ HDva_end(arglist);
+
+ /* Return the length of the string produced (like printf() does) */
+ return ret_value;
+}
+
+#ifdef H5_HAVE_PARALLEL
+/*-------------------------------------------------------------------------
+ * Function: getenv_all
+ *
+ * Purpose: Used to get the environment that the root MPI task has.
+ * name specifies which environment variable to look for
+ * val is the string to which the value of that environment
+ * variable will be copied.
+ *
+ * NOTE: The pointer returned by this function is only
+ * valid until the next call to getenv_all and the data
+ * stored there must be copied somewhere else before any
+ * further calls to getenv_all take place.
+ *
+ * Return: pointer to a string containing the value of the environment variable
+ * NULL if the variable doesn't exist in task 'root's environment.
+ *
+ * Programmer: Leon Arber
+ * 4/4/05
+ *
+ * Modifications:
+ * Use original getenv if MPI is not initialized. This happens
+ * one uses the PHDF5 library to build a serial nature code.
+ * Albert 2006/04/07
+ *
+ *-------------------------------------------------------------------------
+ */
+char *
+getenv_all(MPI_Comm comm, int root, const char *name)
+{
+ int mpi_size, mpi_rank, mpi_initialized, mpi_finalized;
+ int len;
+ static char *env = NULL;
+
+ HDassert(name);
+
+ MPI_Initialized(&mpi_initialized);
+ MPI_Finalized(&mpi_finalized);
+
+ if (mpi_initialized && !mpi_finalized) {
+ MPI_Comm_rank(comm, &mpi_rank);
+ MPI_Comm_size(comm, &mpi_size);
+ HDassert(root < mpi_size);
+
+ /* The root task does the getenv call
+ * and sends the result to the other tasks */
+ if (mpi_rank == root) {
+ env = HDgetenv(name);
+ if (env) {
+ len = (int)HDstrlen(env);
+ MPI_Bcast(&len, 1, MPI_INT, root, comm);
+ MPI_Bcast(env, len, MPI_CHAR, root, comm);
+ }
+ else {
+ /* len -1 indicates that the variable was not in the environment */
+ len = -1;
+ MPI_Bcast(&len, 1, MPI_INT, root, comm);
+ }
+ }
+ else {
+ MPI_Bcast(&len, 1, MPI_INT, root, comm);
+ if (len >= 0) {
+ if (env == NULL)
+ env = (char *)HDmalloc((size_t)len + 1);
+ else if (HDstrlen(env) < (size_t)len)
+ env = (char *)HDrealloc(env, (size_t)len + 1);
+
+ MPI_Bcast(env, len, MPI_CHAR, root, comm);
+ env[len] = '\0';
+ }
+ else {
+ if (env)
+ HDfree(env);
+ env = NULL;
+ }
+ }
+#ifndef NDEBUG
+ MPI_Barrier(comm);
+#endif
+ }
+ else {
+ /* use original getenv */
+ if (env)
+ HDfree(env);
+ env = HDgetenv(name);
+ } /* end if */
+
+ return env;
+}
+
+#endif
+
+/*-------------------------------------------------------------------------
+ * Function: h5_fileaccess
+ *
+ * Purpose: Returns a file access template which is the default template
+ * but with a file driver, VOL connector, or libver bound set
+ * according to a constant or environment variable
+ *
+ * Return: Success: A file access property list
+ * Failure: H5I_INVALID_HID
+ *
+ * Programmer: Robb Matzke
+ * Thursday, November 19, 1998
+ *
+ *-------------------------------------------------------------------------
+ */
+hid_t
+h5_fileaccess(void)
+{
+ hid_t fapl_id = H5I_INVALID_HID;
+
+ if ((fapl_id = H5Pcreate(H5P_FILE_ACCESS)) < 0)
+ goto error;
+
+ /* Finally, check for libver bounds */
+ if (h5_get_libver_fapl(fapl_id) < 0)
+ goto error;
+
+ return fapl_id;
+
+error:
+ if (fapl_id != H5I_INVALID_HID)
+ H5Pclose(fapl_id);
+ return H5I_INVALID_HID;
+} /* end h5_fileaccess() */
+
+/*-------------------------------------------------------------------------
+ * Function: h5_get_libver_fapl
+ *
+ * Purpose: Sets the library version bounds for a FAPL according to the
+ * value in the constant or environment variable "HDF5_LIBVER_BOUNDS".
+ *
+ * Return: Success: 0
+ * Failure: -1
+ *
+ * Programmer: Quincey Koziol
+ * November 2018
+ *
+ *-------------------------------------------------------------------------
+ */
+herr_t
+h5_get_libver_fapl(hid_t fapl)
+{
+ const char *env = NULL; /* HDF5_DRIVER environment variable */
+ const char *tok = NULL; /* strtok pointer */
+ char *lasts = NULL; /* Context pointer for strtok_r() call */
+ char buf[1024]; /* buffer for tokenizing HDF5_DRIVER */
+
+ /* Get the environment variable, if it exists */
+ env = HDgetenv("HDF5_LIBVER_BOUNDS");
+#ifdef HDF5_LIBVER_BOUNDS
+ /* Use the environment variable, then the compile-time constant */
+ if (!env)
+ env = HDF5_LIBVER_BOUNDS;
+#endif
+
+ /* If the environment variable was not set, just return
+ * without modifying the FAPL.
+ */
+ if (!env || !*env)
+ goto done;
+
+ /* Get the first 'word' of the environment variable.
+ * If it's nothing (environment variable was whitespace)
+ * just return the default fapl.
+ */
+ HDstrncpy(buf, env, sizeof(buf));
+ buf[sizeof(buf) - 1] = '\0';
+ if (NULL == (tok = HDstrtok_r(buf, " \t\n\r", &lasts)))
+ goto done;
+
+ if (!HDstrcmp(tok, "latest")) {
+ /* use the latest format */
+ if (H5Pset_libver_bounds(fapl, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST) < 0)
+ goto error;
+ } /* end if */
+ else {
+ /* Unknown setting */
+ goto error;
+ } /* end else */
+
+done:
+ return 0;
+
+error:
+ return -1;
+} /* end h5_get_libver_fapl() */
+
+#ifndef HDF5_PARAPREFIX
+#define HDF5_PARAPREFIX ""
+#endif
+static char *
+h5_fixname_real(const char *base_name, hid_t fapl, const char *_suffix, char *fullname, size_t size,
+ hbool_t nest_printf, hbool_t subst_for_superblock)
+{
+ const char *prefix = NULL;
+ const char *driver_env_var = NULL; /* HDF5_DRIVER environment variable */
+ char *ptr, last = '\0';
+ const char *suffix = _suffix;
+ size_t i, j;
+ hid_t driver = -1;
+ int isppdriver = 0; /* if the driver is MPI parallel */
+
+ if (!base_name || !fullname || size < 1)
+ return NULL;
+
+ HDmemset(fullname, 0, size);
+
+ /* Determine if driver is set by environment variable. If it is,
+ * only generate a suffix if fixing the filename for the superblock
+ * file. */
+ driver_env_var = HDgetenv(HDF5_DRIVER);
+ if (driver_env_var && (H5P_DEFAULT == fapl) && subst_for_superblock)
+ fapl = H5P_FILE_ACCESS_DEFAULT;
+
+ /* figure out the suffix */
+ if (H5P_DEFAULT != fapl) {
+ if ((driver = H5Pget_driver(fapl)) < 0)
+ return NULL;
+
+ if (suffix) {
+ if (H5FD_FAMILY == driver) {
+ if (subst_for_superblock)
+ suffix = "-000000.h5";
+ else
+ suffix = nest_printf ? "-%%06d.h5" : "-%06d.h5";
+ }
+ else if (H5FD_MULTI == driver) {
+
+ /* Check the HDF5_DRIVER environment variable in case
+ * we are using the split driver since both of those
+ * use the multi VFD under the hood.
+ */
+ if (driver_env_var && !HDstrcmp(driver_env_var, "split")) {
+ /* split VFD */
+ if (subst_for_superblock)
+ suffix = ".h5.meta";
+ }
+ else {
+ /* multi VFD */
+ if (subst_for_superblock)
+ suffix = "-s.h5";
+ else
+ suffix = NULL;
+ }
+ }
+ }
+ }
+
+ /* Must first check fapl is not H5P_DEFAULT (-1) because H5FD_XXX
+ * could be of value -1 if it is not defined.
+ */
+ isppdriver = ((H5P_DEFAULT != fapl) || driver_env_var) && (H5FD_MPIO == driver);
+#if 0
+ /* Check HDF5_NOCLEANUP environment setting.
+ * (The #ifdef is needed to prevent compile failure in case MPI is not
+ * configured.)
+ */
+ if (isppdriver) {
+#ifdef H5_HAVE_PARALLEL
+ if (getenv_all(MPI_COMM_WORLD, 0, HDF5_NOCLEANUP))
+ SetTestNoCleanup();
+#endif /* H5_HAVE_PARALLEL */
+ }
+ else {
+ if (HDgetenv(HDF5_NOCLEANUP))
+ SetTestNoCleanup();
+ }
+#endif
+ /* Check what prefix to use for test files. Process HDF5_PARAPREFIX and
+ * HDF5_PREFIX.
+ * Use different ones depending on parallel or serial driver used.
+ * (The #ifdef is needed to prevent compile failure in case MPI is not
+ * configured.)
+ */
+ if (isppdriver) {
+#ifdef H5_HAVE_PARALLEL
+ /*
+ * For parallel:
+ * First use command line option, then the environment
+ * variable, then try the constant
+ */
+ static int explained = 0;
+
+ prefix = (paraprefix ? paraprefix : getenv_all(MPI_COMM_WORLD, 0, "HDF5_PARAPREFIX"));
+
+ if (!prefix && !explained) {
+ /* print hint by process 0 once. */
+ int mpi_rank;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ if (mpi_rank == 0)
+ HDprintf("*** Hint ***\n"
+ "You can use environment variable HDF5_PARAPREFIX to "
+ "run parallel test files in a\n"
+ "different directory or to add file type prefix. e.g.,\n"
+ " HDF5_PARAPREFIX=pfs:/PFS/user/me\n"
+ " export HDF5_PARAPREFIX\n"
+ "*** End of Hint ***\n");
+
+ explained = TRUE;
+#ifdef HDF5_PARAPREFIX
+ prefix = HDF5_PARAPREFIX;
+#endif /* HDF5_PARAPREFIX */
+ }
+#endif /* H5_HAVE_PARALLEL */
+ }
+ else {
+ /*
+ * For serial:
+ * First use the environment variable, then try the constant
+ */
+ prefix = HDgetenv("HDF5_PREFIX");
+
+#ifdef HDF5_PREFIX
+ if (!prefix)
+ prefix = HDF5_PREFIX;
+#endif /* HDF5_PREFIX */
+ }
+
+ /* Prepend the prefix value to the base name */
+ if (prefix && *prefix) {
+ if (isppdriver) {
+ /* This is a parallel system */
+ char *subdir;
+
+ if (!HDstrcmp(prefix, HDF5_PARAPREFIX)) {
+ /*
+ * If the prefix specifies the HDF5_PARAPREFIX directory, then
+ * default to using the "/tmp/$USER" or "/tmp/$LOGIN"
+ * directory instead.
+ */
+ char *user, *login;
+
+ user = HDgetenv("USER");
+ login = HDgetenv("LOGIN");
+ subdir = (user ? user : login);
+
+ if (subdir) {
+ for (i = 0; i < size && prefix[i]; i++)
+ fullname[i] = prefix[i];
+
+ fullname[i++] = '/';
+
+ for (j = 0; i < size && subdir[j]; ++i, ++j)
+ fullname[i] = subdir[j];
+ }
+ }
+
+ if (!fullname[0]) {
+ /* We didn't append the prefix yet */
+ HDstrncpy(fullname, prefix, size);
+ fullname[size - 1] = '\0';
+ }
+
+ if (HDstrlen(fullname) + HDstrlen(base_name) + 1 < size) {
+ /*
+ * Append the base_name with a slash first. Multiple
+ * slashes are handled below.
+ */
+ h5_stat_t buf;
+
+ if (HDstat(fullname, &buf) < 0)
+ /* The directory doesn't exist just yet */
+ if (HDmkdir(fullname, (mode_t)0755) < 0 && errno != EEXIST)
+ /*
+ * We couldn't make the "/tmp/${USER,LOGIN}"
+ * subdirectory. Default to PREFIX's original
+ * prefix value.
+ */
+ HDstrcpy(fullname, prefix);
+
+ HDstrcat(fullname, "/");
+ HDstrcat(fullname, base_name);
+ }
+ else {
+ /* Buffer is too small */
+ return NULL;
+ }
+ }
+ else {
+ if (HDsnprintf(fullname, size, "%s/%s", prefix, base_name) == (int)size)
+ /* Buffer is too small */
+ return NULL;
+ }
+ }
+ else if (HDstrlen(base_name) >= size) {
+ /* Buffer is too small */
+ return NULL;
+ }
+ else {
+ HDstrcpy(fullname, base_name);
+ }
+
+ /* Append a suffix */
+ if (suffix) {
+ if (HDstrlen(fullname) + HDstrlen(suffix) >= size)
+ return NULL;
+
+ HDstrcat(fullname, suffix);
+ }
+
+ /* Remove any double slashes in the filename */
+ for (ptr = fullname, i = j = 0; ptr && i < size; i++, ptr++) {
+ if (*ptr != '/' || last != '/')
+ fullname[j++] = *ptr;
+
+ last = *ptr;
+ }
+
+ return fullname;
+}
+
+char *
+h5_fixname(const char *base_name, hid_t fapl, char *fullname, size_t size)
+{
+ return (h5_fixname_real(base_name, fapl, ".h5", fullname, size, FALSE, FALSE));
+}
+
+char *
+h5_fixname_superblock(const char *base_name, hid_t fapl_id, char *fullname, size_t size)
+{
+ return (h5_fixname_real(base_name, fapl_id, ".h5", fullname, size, FALSE, TRUE));
+}
+
+hbool_t
+h5_using_default_driver(const char *drv_name)
+{
+ hbool_t ret_val = TRUE;
+
+ HDassert(H5_DEFAULT_VFD == H5FD_SEC2);
+
+ if (!drv_name)
+ drv_name = HDgetenv(HDF5_DRIVER);
+
+ if (drv_name)
+ return (!HDstrcmp(drv_name, "sec2") || !HDstrcmp(drv_name, "nomatch"));
+
+ return ret_val;
+}
+
+herr_t
+h5_driver_is_default_vfd_compatible(hid_t fapl_id, hbool_t *default_vfd_compatible)
+{
+ unsigned long feat_flags = 0;
+ hid_t driver_id = H5I_INVALID_HID;
+ herr_t ret_value = SUCCEED;
+
+ HDassert(fapl_id >= 0);
+ HDassert(default_vfd_compatible);
+
+ if (fapl_id == H5P_DEFAULT)
+ fapl_id = H5P_FILE_ACCESS_DEFAULT;
+
+ if ((driver_id = H5Pget_driver(fapl_id)) < 0)
+ return FAIL;
+
+ if (H5FDdriver_query(driver_id, &feat_flags) < 0)
+ return FAIL;
+
+ *default_vfd_compatible = (feat_flags & H5FD_FEAT_DEFAULT_VFD_COMPATIBLE);
+
+ return ret_value;
+} /* end h5_driver_is_default_vfd_compatible() */
+
+int
+main(int argc, char *argv[])
+{
+#if defined(H5_PARALLEL_TEST)
+ MPI_Init(&argc, &argv);
+#else
+ (void)argc;
+ (void)argv;
+#endif
+
+ HDprintf("===================================\n");
+ HDprintf("HDF5 TESTS START\n");
+ HDprintf("===================================\n");
+
+ /* Initialize testing framework */
+ /* TestInit(argv[0], NULL, NULL); */
+
+ /* Tests are generally arranged from least to most complexity... */
+ /* AddTest("config", test_configure, cleanup_configure, "Configure definitions", NULL); */
+ HDprintf("** CONFIGURE DEFINITIONS **\n");
+ test_configure();
+ HDprintf("\n");
+
+ /* AddTest("metadata", test_metadata, cleanup_metadata, "Encoding/decoding metadata", NULL); */
+
+ /* AddTest("checksum", test_checksum, cleanup_checksum, "Checksum algorithm", NULL); */
+ HDprintf("** CHECKSUM ALGORITHM **\n");
+ test_checksum();
+ HDprintf("\n");
+
+ /* AddTest("tst", test_tst, NULL, "Ternary Search Trees", NULL); */
+
+ /* AddTest("heap", test_heap, NULL, "Memory Heaps", NULL); */
+
+ /* AddTest("skiplist", test_skiplist, NULL, "Skip Lists", NULL); */
+
+ /* AddTest("refstr", test_refstr, NULL, "Reference Counted Strings", NULL); */
+
+ /* AddTest("file", test_file, cleanup_file, "Low-Level File I/O", NULL); */
+ HDprintf("** LOW-LEVEL FILE I/O **\n");
+ test_file();
+ HDprintf("\n");
+
+ /* AddTest("objects", test_h5o, cleanup_h5o, "Generic Object Functions", NULL); */
+ HDprintf("** GENERIC OBJECT FUNCTIONS **\n");
+ test_h5o();
+ HDprintf("\n");
+
+ /* AddTest("h5s", test_h5s, cleanup_h5s, "Dataspaces", NULL); */
+ HDprintf("** DATASPACES **\n");
+ test_h5s();
+ HDprintf("\n");
+
+ /* AddTest("coords", test_coords, cleanup_coords, "Dataspace coordinates", NULL); */
+ HDprintf("** DATASPACE COORDINATES **\n");
+ test_coords();
+ HDprintf("\n");
+
+ /* AddTest("sohm", test_sohm, cleanup_sohm, "Shared Object Header Messages", NULL); */
+
+ /* AddTest("attr", test_attr, cleanup_attr, "Attributes", NULL); */
+ HDprintf("** ATTRIBUTES **\n");
+ test_attr();
+ HDprintf("\n");
+
+ /* AddTest("select", test_select, cleanup_select, "Selections", NULL); */
+ HDprintf("** SELECTIONS **\n");
+ test_select();
+ HDprintf("\n");
+
+ /* AddTest("time", test_time, cleanup_time, "Time Datatypes", NULL); */
+ HDprintf("** TIME DATATYPES**\n");
+ test_time();
+ HDprintf("\n");
+
+ /* AddTest("ref_deprec", test_reference_deprec, cleanup_reference_deprec, "Deprecated References", NULL);
+ */
+
+ /* AddTest("ref", test_reference, cleanup_reference, "References", NULL); */
+ HDprintf("** REFERENCES **\n");
+ test_reference();
+ HDprintf("\n");
+
+ /* AddTest("vltypes", test_vltypes, cleanup_vltypes, "Variable-Length Datatypes", NULL); */
+ HDprintf("** VARIABLE-LENGTH DATATYPES **\n");
+ test_vltypes();
+ HDprintf("\n");
+
+ /* AddTest("vlstrings", test_vlstrings, cleanup_vlstrings, "Variable-Length Strings", NULL); */
+ HDprintf("** VARIABLE-LENGTH STRINGS **\n");
+ test_vlstrings();
+ HDprintf("\n");
+
+ /* AddTest("iterate", test_iterate, cleanup_iterate, "Group & Attribute Iteration", NULL); */
+ HDprintf("** GROUP & ATTRIBUTE ITERATION **\n");
+ test_iterate();
+ HDprintf("\n");
+
+ /* AddTest("array", test_array, cleanup_array, "Array Datatypes", NULL); */
+ HDprintf("** ARRAY DATATYPES **\n");
+ test_array();
+ HDprintf("\n");
+
+ /* AddTest("genprop", test_genprop, cleanup_genprop, "Generic Properties", NULL); */
+ HDprintf("** GENERIC PROPERTIES **\n");
+ test_genprop();
+ HDprintf("\n");
+
+ /* AddTest("unicode", test_unicode, cleanup_unicode, "UTF-8 Encoding", NULL); */
+ HDprintf("** UTF-8 ENCODING **\n");
+ test_unicode();
+ HDprintf("\n");
+
+ /* AddTest("id", test_ids, NULL, "User-Created Identifiers", NULL); */
+ HDprintf("** USER-CREATED IDENTIFIERS **\n");
+ test_ids();
+ HDprintf("\n");
+
+ /* AddTest("misc", test_misc, cleanup_misc, "Miscellaneous", NULL); */
+ HDprintf("** MISCELLANEOUS **\n");
+ test_misc();
+ HDprintf("\n");
+
+ /* Display testing information */
+ /* TestInfo(argv[0]); */
+
+ /* Parse command line arguments */
+ /* TestParseCmdLine(argc,argv); */
+
+ /* Perform requested testing */
+ /* PerformTests(); */
+
+ /* Display test summary, if requested */
+ /* if (GetTestSummary())
+ TestSummary(); */
+
+ /* Clean up test files, if allowed */
+ if (/* GetTestCleanup() && */ !getenv("HDF5_NOCLEANUP")) {
+ /* TestCleanup(); */
+
+ HDprintf("TEST CLEANUP\n");
+
+ H5E_BEGIN_TRY
+ cleanup_configure();
+ cleanup_checksum();
+ cleanup_file();
+ cleanup_h5o();
+ cleanup_h5s();
+ cleanup_coords();
+ cleanup_attr();
+ cleanup_select();
+ cleanup_time();
+ cleanup_reference();
+ cleanup_vltypes();
+ cleanup_vlstrings();
+ cleanup_iterate();
+ cleanup_array();
+ cleanup_genprop();
+ cleanup_unicode();
+ cleanup_misc();
+ H5E_END_TRY;
+
+ HDprintf("\n");
+ }
+
+ /* Release test infrastructure */
+ /* TestShutdown(); */
+
+ /* Exit failure if errors encountered; else exit success. */
+ /* No need to print anything since PerformTests() already does. */
+ if (nerrors /* GetTestNumErrs() */ > 0) {
+ HDprintf("** HDF5 tests failed with %d errors **\n", nerrors);
+ HDexit(EXIT_FAILURE);
+ }
+ else {
+ HDprintf("** HDF5 tests ran successfully **\n");
+ HDexit(EXIT_SUCCESS);
+ }
+} /* end main() */
diff --git a/test/API/testhdf5.h b/test/API/testhdf5.h
new file mode 100644
index 0000000..44ccfe0
--- /dev/null
+++ b/test/API/testhdf5.h
@@ -0,0 +1,349 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * This header file contains information required for testing the HDF5 library.
+ */
+
+#ifndef TESTHDF5_H
+#define TESTHDF5_H
+
+/* Include generic testing header also */
+/* #include "h5test.h" */
+#include "hdf5.h"
+#include "H5private.h"
+#include "H5_api_tests_disabled.h"
+
+#define VERBO_NONE 0 /* None */
+#define VERBO_DEF 3 /* Default */
+#define VERBO_LO 5 /* Low */
+#define VERBO_MED 7 /* Medium */
+#define VERBO_HI 9 /* High */
+
+/* Turn off verbose reporting by default */
+#define VERBOSE_MED (FALSE)
+#define VERBOSE_HI (FALSE)
+
+/* Use %ld to print the value because long should cover most cases. */
+/* Used to make certain a return value _is_not_ a value */
+#define CHECK(ret, val, where) \
+ do { \
+ if (VERBOSE_HI) { \
+ print_func(" Call to routine: %15s at line %4d " \
+ "in %s returned %ld \n", \
+ where, (int)__LINE__, __FILE__, (long)(ret)); \
+ } \
+ if ((ret) == (val)) { \
+ TestErrPrintf("*** UNEXPECTED RETURN from %s is %ld at line %4d " \
+ "in %s\n", \
+ where, (long)(ret), (int)__LINE__, __FILE__); \
+ H5Eprint2(H5E_DEFAULT, stdout); \
+ } \
+ } while (0)
+
+#define CHECK_I(ret, where) \
+ { \
+ if (VERBOSE_HI) { \
+ print_func(" Call to routine: %15s at line %4d in %s returned %ld\n", (where), (int)__LINE__, \
+ __FILE__, (long)(ret)); \
+ } \
+ if ((ret) < 0) { \
+ TestErrPrintf("*** UNEXPECTED RETURN from %s is %ld line %4d in %s\n", (where), (long)(ret), \
+ (int)__LINE__, __FILE__); \
+ H5Eprint2(H5E_DEFAULT, stdout); \
+ } \
+ }
+
+/* Check that a pointer is valid (i.e.: not NULL) */
+#define CHECK_PTR(ret, where) \
+ { \
+ if (VERBOSE_HI) { \
+ print_func(" Call to routine: %15s at line %4d in %s returned %p\n", (where), (int)__LINE__, \
+ __FILE__, ((const void *)ret)); \
+ } \
+ if (!(ret)) { \
+ TestErrPrintf("*** UNEXPECTED RETURN from %s is NULL line %4d in %s\n", (where), (int)__LINE__, \
+ __FILE__); \
+ H5Eprint2(H5E_DEFAULT, stdout); \
+ } \
+ }
+
+/* Check that a pointer is NULL */
+#define CHECK_PTR_NULL(ret, where) \
+ { \
+ if (VERBOSE_HI) { \
+ print_func(" Call to routine: %15s at line %4d in %s returned %p\n", (where), (int)__LINE__, \
+ __FILE__, ((const void *)ret)); \
+ } \
+ if (ret) { \
+ TestErrPrintf("*** UNEXPECTED RETURN from %s is not NULL line %4d in %s\n", (where), \
+ (int)__LINE__, __FILE__); \
+ H5Eprint2(H5E_DEFAULT, stdout); \
+ } \
+ }
+
+/* Check that two pointers are equal */
+#define CHECK_PTR_EQ(ret, val, where) \
+ { \
+ if (VERBOSE_HI) { \
+ print_func(" Call to routine: %15s at line %4d in %s returned %p\n", (where), (int)__LINE__, \
+ __FILE__, (const void *)(ret)); \
+ } \
+ if (ret != val) { \
+ TestErrPrintf( \
+ "*** UNEXPECTED RETURN from %s: returned value of %p is not equal to %p line %4d in %s\n", \
+ (where), (const void *)(ret), (const void *)(val), (int)__LINE__, __FILE__); \
+ H5Eprint2(H5E_DEFAULT, stdout); \
+ } \
+ }
+
+/* Used to make certain a return value _is_ a value */
+#define VERIFY(_x, _val, where) \
+ do { \
+ long __x = (long)_x, __val = (long)_val; \
+ if (VERBOSE_HI) { \
+ print_func(" Call to routine: %15s at line %4d in %s had value " \
+ "%ld \n", \
+ (where), (int)__LINE__, __FILE__, __x); \
+ } \
+ if ((__x) != (__val)) { \
+ TestErrPrintf("*** UNEXPECTED VALUE from %s should be %ld, but is %ld at line %4d " \
+ "in %s\n", \
+ (where), __val, __x, (int)__LINE__, __FILE__); \
+ H5Eprint2(H5E_DEFAULT, stdout); \
+ } \
+ } while (0)
+
+/* Used to make certain a (non-'long' type's) return value _is_ a value */
+#define VERIFY_TYPE(_x, _val, _type, _format, where) \
+ do { \
+ _type __x = (_type)_x, __val = (_type)_val; \
+ if (VERBOSE_HI) { \
+ print_func(" Call to routine: %15s at line %4d in %s had value " _format " \n", (where), \
+ (int)__LINE__, __FILE__, __x); \
+ } \
+ if ((__x) != (__val)) { \
+ TestErrPrintf("*** UNEXPECTED VALUE from %s should be " _format ", but is " _format \
+ " at line %4d " \
+ "in %s\n", \
+ (where), __val, __x, (int)__LINE__, __FILE__); \
+ H5Eprint2(H5E_DEFAULT, stdout); \
+ } \
+ } while (0)
+
+/* Used to make certain a string return value _is_ a value */
+#define VERIFY_STR(x, val, where) \
+ do { \
+ if (VERBOSE_HI) { \
+ print_func(" Call to routine: %15s at line %4d in %s had value " \
+ "%s \n", \
+ (where), (int)__LINE__, __FILE__, x); \
+ } \
+ if (HDstrcmp(x, val) != 0) { \
+ TestErrPrintf("*** UNEXPECTED VALUE from %s should be %s, but is %s at line %4d " \
+ "in %s\n", \
+ where, val, x, (int)__LINE__, __FILE__); \
+ H5Eprint2(H5E_DEFAULT, stdout); \
+ } \
+ } while (0)
+
+/* Used to document process through a test and to check for errors */
+#define RESULT(ret, func) \
+ do { \
+ if (VERBOSE_MED) { \
+ print_func(" Call to routine: %15s at line %4d in %s returned " \
+ "%ld\n", \
+ func, (int)__LINE__, __FILE__, (long)(ret)); \
+ } \
+ if (VERBOSE_HI) \
+ H5Eprint2(H5E_DEFAULT, stdout); \
+ if ((ret) == FAIL) { \
+ TestErrPrintf("*** UNEXPECTED RETURN from %s is %ld at line %4d " \
+ "in %s\n", \
+ func, (long)(ret), (int)__LINE__, __FILE__); \
+ H5Eprint2(H5E_DEFAULT, stdout); \
+ } \
+ } while (0)
+
+/* Used to document process through a test */
+#if defined(H5_HAVE_PARALLEL) && defined(H5_PARALLEL_TEST)
+#define MESSAGE(V, A) \
+ { \
+ int mpi_rank; \
+ \
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank); \
+ if (mpi_rank == 0 && VERBO_LO /* HDGetTestVerbosity() */ >= (V)) \
+ print_func A; \
+ }
+#else /* H5_HAVE_PARALLEL */
+#define MESSAGE(V, A) \
+ { \
+ if (VERBO_LO /* HDGetTestVerbosity() */ >= (V)) \
+ print_func A; \
+ }
+#endif /* H5_HAVE_PARALLEL */
+
+/* Used to indicate an error that is complex to check for */
+#define ERROR(where) \
+ do { \
+ if (VERBOSE_HI) \
+ print_func(" Call to routine: %15s at line %4d in %s returned " \
+ "invalid result\n", \
+ where, (int)__LINE__, __FILE__); \
+ TestErrPrintf("*** UNEXPECTED RESULT from %s at line %4d in %s\n", where, (int)__LINE__, __FILE__); \
+ } while (0)
+
+/* definitions for command strings */
+#define VERBOSITY_STR "Verbosity"
+#define SKIP_STR "Skip"
+#define TEST_STR "Test"
+#define CLEAN_STR "Cleanup"
+
+#define AT() HDprintf(" at %s:%d in %s()...\n", __FILE__, __LINE__, __func__);
+#define TESTING(WHAT) \
+ { \
+ HDprintf("Testing %-62s", WHAT); \
+ HDfflush(stdout); \
+ }
+#define TESTING_2(WHAT) \
+ { \
+ HDprintf(" Testing %-60s", WHAT); \
+ HDfflush(stdout); \
+ }
+#define PASSED() \
+ { \
+ HDputs(" PASSED"); \
+ HDfflush(stdout); \
+ }
+#define H5_FAILED() \
+ { \
+ HDputs("*FAILED*"); \
+ HDfflush(stdout); \
+ }
+#define H5_WARNING() \
+ { \
+ HDputs("*WARNING*"); \
+ HDfflush(stdout); \
+ }
+#define SKIPPED() \
+ { \
+ HDputs(" -SKIP-"); \
+ HDfflush(stdout); \
+ }
+#define PUTS_ERROR(s) \
+ { \
+ HDputs(s); \
+ AT(); \
+ goto error; \
+ }
+#define TEST_ERROR \
+ { \
+ H5_FAILED(); \
+ AT(); \
+ goto error; \
+ }
+#define STACK_ERROR \
+ { \
+ H5Eprint2(H5E_DEFAULT, stdout); \
+ goto error; \
+ }
+#define FAIL_STACK_ERROR \
+ { \
+ H5_FAILED(); \
+ AT(); \
+ H5Eprint2(H5E_DEFAULT, stdout); \
+ goto error; \
+ }
+#define FAIL_PUTS_ERROR(s) \
+ { \
+ H5_FAILED(); \
+ AT(); \
+ HDputs(s); \
+ goto error; \
+ }
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+extern int nerrors;
+
+int print_func(const char *format, ...);
+int TestErrPrintf(const char *format, ...);
+hid_t h5_fileaccess(void);
+/* Functions that will replace components of a FAPL */
+herr_t h5_get_vfd_fapl(hid_t fapl_id);
+herr_t h5_get_libver_fapl(hid_t fapl_id);
+char *h5_fixname(const char *base_name, hid_t fapl, char *fullname, size_t size);
+char *h5_fixname_superblock(const char *base_name, hid_t fapl, char *fullname, size_t size);
+hbool_t h5_using_default_driver(const char *drv_name);
+herr_t h5_driver_is_default_vfd_compatible(hid_t fapl_id, hbool_t *default_vfd_compatible);
+
+#ifdef H5_HAVE_PARALLEL
+char *getenv_all(MPI_Comm comm, int root, const char *name);
+#endif
+
+/* Prototypes for the test routines */
+void test_metadata(void);
+void test_checksum(void);
+void test_refstr(void);
+void test_file(void);
+void test_h5o(void);
+void test_h5t(void);
+void test_h5s(void);
+void test_coords(void);
+void test_h5d(void);
+void test_attr(void);
+void test_select(void);
+void test_time(void);
+void test_reference(void);
+void test_reference_deprec(void);
+void test_vltypes(void);
+void test_vlstrings(void);
+void test_iterate(void);
+void test_array(void);
+void test_genprop(void);
+void test_configure(void);
+void test_h5_system(void);
+void test_misc(void);
+void test_ids(void);
+void test_skiplist(void);
+void test_sohm(void);
+void test_unicode(void);
+
+/* Prototypes for the cleanup routines */
+void cleanup_metadata(void);
+void cleanup_checksum(void);
+void cleanup_file(void);
+void cleanup_h5o(void);
+void cleanup_h5s(void);
+void cleanup_coords(void);
+void cleanup_attr(void);
+void cleanup_select(void);
+void cleanup_time(void);
+void cleanup_reference(void);
+void cleanup_reference_deprec(void);
+void cleanup_vltypes(void);
+void cleanup_vlstrings(void);
+void cleanup_iterate(void);
+void cleanup_array(void);
+void cleanup_genprop(void);
+void cleanup_configure(void);
+void cleanup_h5_system(void);
+void cleanup_sohm(void);
+void cleanup_misc(void);
+void cleanup_unicode(void);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* TESTHDF5_H */
diff --git a/test/API/tfile.c b/test/API/tfile.c
new file mode 100644
index 0000000..bc0f18e
--- /dev/null
+++ b/test/API/tfile.c
@@ -0,0 +1,8381 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/***********************************************************
+ *
+ * Test program: tfile
+ *
+ * Test the low-level file I/O features.
+ *
+ *************************************************************/
+
+#include "testhdf5.h"
+/* #include "H5srcdir.h" */
+
+/* #include "H5Iprivate.h" */
+/* #include "H5Pprivate.h" */
+/* #include "H5VLprivate.h" */ /* Virtual Object Layer */
+
+#if 0
+/*
+ * This file needs to access private information from the H5F package.
+ * This file also needs to access the file testing code.
+ */
+#define H5F_FRIEND /*suppress error about including H5Fpkg */
+#define H5F_TESTING
+#include "H5Fpkg.h" /* File access */
+
+#define H5FD_FRIEND /*suppress error about including H5FDpkg.h */
+#define H5FD_TESTING
+#include "H5FDpkg.h"
+
+#define H5D_FRIEND /*suppress error about including H5Dpkg */
+#include "H5Dpkg.h" /* Dataset access */
+
+#define H5S_FRIEND /*suppress error about including H5Spkg */
+#include "H5Spkg.h" /* Dataspace */
+
+#define H5T_FRIEND /*suppress error about including H5Tpkg */
+#include "H5Tpkg.h" /* Datatype */
+
+#define H5A_FRIEND /*suppress error about including H5Apkg */
+#include "H5Apkg.h" /* Attributes */
+
+#define H5O_FRIEND /*suppress error about including H5Opkg */
+#include "H5Opkg.h" /* Object headers */
+#endif
+
+#define BAD_USERBLOCK_SIZE1 (hsize_t)1
+#define BAD_USERBLOCK_SIZE2 (hsize_t)2
+#define BAD_USERBLOCK_SIZE3 (hsize_t)3
+#define BAD_USERBLOCK_SIZE4 (hsize_t)64
+#define BAD_USERBLOCK_SIZE5 (hsize_t)511
+#define BAD_USERBLOCK_SIZE6 (hsize_t)513
+#define BAD_USERBLOCK_SIZE7 (hsize_t)6144
+
+#define F1_USERBLOCK_SIZE (hsize_t)0
+#define F1_OFFSET_SIZE sizeof(haddr_t)
+#define F1_LENGTH_SIZE sizeof(hsize_t)
+#define F1_SYM_LEAF_K 4
+#define F1_SYM_INTERN_K 16
+#define FILE1 "tfile1.h5"
+#define SFILE1 "sys_file1"
+
+#define REOPEN_FILE "tfile_reopen.h5"
+#define REOPEN_DSET "dset"
+
+#define F2_USERBLOCK_SIZE (hsize_t)512
+#define F2_OFFSET_SIZE 8
+#define F2_LENGTH_SIZE 8
+#define F2_SYM_LEAF_K 8
+#define F2_SYM_INTERN_K 32
+#define F2_RANK 2
+#define F2_DIM0 4
+#define F2_DIM1 6
+#define F2_DSET "dset"
+#define FILE2 "tfile2.h5"
+
+#define F3_USERBLOCK_SIZE (hsize_t)0
+#define F3_OFFSET_SIZE F2_OFFSET_SIZE
+#define F3_LENGTH_SIZE F2_LENGTH_SIZE
+#define F3_SYM_LEAF_K F2_SYM_LEAF_K
+#define F3_SYM_INTERN_K F2_SYM_INTERN_K
+#define FILE3 "tfile3.h5"
+
+#define GRP_NAME "/group"
+#define DSET_NAME "dataset"
+#define ATTR_NAME "attr"
+#define TYPE_NAME "type"
+#define FILE4 "tfile4.h5"
+
+#define OBJ_ID_COUNT_0 0
+#define OBJ_ID_COUNT_1 1
+#define OBJ_ID_COUNT_2 2
+#define OBJ_ID_COUNT_3 3
+#define OBJ_ID_COUNT_4 4
+#define OBJ_ID_COUNT_6 6
+#define OBJ_ID_COUNT_8 8
+
+#define GROUP1 "Group1"
+#define DSET1 "Dataset1"
+#define DSET2 "/Group1/Dataset2"
+
+#define TESTA_GROUPNAME "group"
+#define TESTA_DSETNAME "dataset"
+#define TESTA_ATTRNAME "attribute"
+#define TESTA_DTYPENAME "compound"
+#define TESTA_NAME_BUF_SIZE 64
+#define TESTA_RANK 2
+#define TESTA_NX 4
+#define TESTA_NY 5
+
+#define USERBLOCK_SIZE ((hsize_t)512)
+
+/* Declarations for test_filespace_*() */
+#define FILENAME_LEN 1024 /* length of file name */
+#define DSETNAME "dset" /* Name of dataset */
+#define NELMTS(X) (sizeof(X) / sizeof(X[0])) /* # of elements */
+#define READ_OLD_BUFSIZE 1024 /* Buffer for holding file data */
+#define FILE5 "tfile5.h5" /* Test file */
+#define TEST_THRESHOLD10 10 /* Free space section threshold */
+#define FSP_SIZE_DEF 4096 /* File space page size default */
+#define FSP_SIZE512 512 /* File space page size */
+#define FSP_SIZE1G (1024 * 1024 * 1024) /* File space page size */
+
+/* Declaration for test_libver_macros2() */
+#define FILE6 "tfile6.h5" /* Test file */
+
+/* Declaration for test_get_obj_ids() */
+#define FILE7 "tfile7.h5" /* Test file */
+#define NGROUPS 2
+#define NDSETS 4
+
+/* Declaration for test_incr_filesize() */
+#define FILE8 "tfile8.h5" /* Test file */
+
+/* Files created under 1.6 branch and 1.8 branch--used in test_filespace_compatible() */
+const char *OLD_FILENAME[] = {
+ "filespace_1_6.h5", /* 1.6 HDF5 file */
+ "filespace_1_8.h5" /* 1.8 HDF5 file */
+};
+
+/* Files created in 1.10.0 release --used in test_filespace_1.10.0_compatible() */
+/* These files are copied from release 1.10.0 tools/h5format_convert/testfiles */
+const char *OLD_1_10_0_FILENAME[] = {
+ "h5fc_ext1_i.h5", /* 0 */
+ "h5fc_ext1_f.h5", /* 1 */
+ "h5fc_ext2_if.h5", /* 2 */
+ "h5fc_ext2_sf.h5", /* 3 */
+ "h5fc_ext3_isf.h5", /* 4 */
+ "h5fc_ext_none.h5" /* 5 */
+};
+
+/* Files used in test_filespace_round_compatible() */
+const char *FSPACE_FILENAMES[] = {
+ "fsm_aggr_nopersist.h5", /* H5F_FILE_SPACE_AGGR, not persisting free-space */
+ "fsm_aggr_persist.h5", /* H5F_FILE_SPACE_AGGR, persisting free-space */
+ "paged_nopersist.h5", /* H5F_FILE_SPACE_PAGE, not persisting free-space */
+ "paged_persist.h5", /* H5F_FILE_SPACE_PAGE, persisting free-space */
+ "aggr.h5", /* H5F_FILE_SPACE_AGGR */
+ "none.h5" /* H5F_FILE_SPACE_NONE */
+};
+
+const char *FILESPACE_NAME[] = {"tfilespace.h5", NULL};
+
+/* Declarations for test_libver_bounds_copy(): */
+/* SRC_FILE: source file created under 1.8 branch with latest format */
+/* DST_FILE: destination file for copying the dataset in SRC_FILE */
+/* DSET_DS1: the dataset created in SRC_FILE to be copied to DST_FILE */
+#define SRC_FILE "fill18.h5"
+#define DST_FILE "fill18_copy.h5"
+#define DSET_DS1 "DS1"
+
+#if 0
+/* Local test function declarations for version bounds */
+static void test_libver_bounds_low_high(const char *env_h5_drvr);
+static void test_libver_bounds_super(hid_t fapl, const char *env_h5_drvr);
+static void test_libver_bounds_super_create(hid_t fapl, hid_t fcpl, htri_t is_swmr, htri_t non_def_fsm);
+static void test_libver_bounds_super_open(hid_t fapl, hid_t fcpl, htri_t is_swmr, htri_t non_def_fsm);
+static void test_libver_bounds_obj(hid_t fapl);
+static void test_libver_bounds_dataset(hid_t fapl);
+static void test_libver_bounds_dataspace(hid_t fapl);
+static void test_libver_bounds_datatype(hid_t fapl);
+static void test_libver_bounds_datatype_check(hid_t fapl, hid_t tid);
+static void test_libver_bounds_attributes(hid_t fapl);
+#endif
+
+#define DSET_NULL "DSET_NULL"
+#define DSET "DSET"
+#define DSETA "DSETA"
+#define DSETB "DSETB"
+#define DSETC "DSETC"
+
+#if 0
+static void
+create_objects(hid_t, hid_t, hid_t *, hid_t *, hid_t *, hid_t *);
+static void
+test_obj_count_and_id(hid_t, hid_t, hid_t, hid_t, hid_t, hid_t);
+static void
+check_file_id(hid_t, hid_t);
+#endif
+
+#if 0
+/* Helper routine used by test_rw_noupdate() */
+static int cal_chksum(const char *file, uint32_t *chksum);
+
+static void test_rw_noupdate(void);
+#endif
+
+/****************************************************************
+**
+** test_file_create(): Low-level file creation I/O test routine.
+**
+****************************************************************/
+static void
+test_file_create(void)
+{
+ hid_t fid1 = H5I_INVALID_HID;
+ hid_t fid2 = H5I_INVALID_HID;
+ hid_t fid3 = H5I_INVALID_HID; /* HDF5 File IDs */
+ hid_t tmpl1, tmpl2; /* file creation templates */
+ hsize_t ublock; /* sizeof userblock */
+ size_t parm; /* file-creation parameters */
+ size_t parm2; /* file-creation parameters */
+ unsigned iparm;
+ unsigned iparm2;
+ herr_t ret; /*generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Low-Level File Creation I/O\n"));
+
+ /* First ensure the file does not exist */
+ H5E_BEGIN_TRY
+ {
+ H5Fdelete(FILE1, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ /* Try opening a non-existent file */
+ H5E_BEGIN_TRY
+ {
+ fid1 = H5Fopen(FILE1, H5F_ACC_RDWR, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(fid1, FAIL, "H5Fopen");
+
+ /* Test create with various sequences of H5F_ACC_EXCL and */
+ /* H5F_ACC_TRUNC flags */
+
+ /* Create with H5F_ACC_EXCL */
+ fid1 = H5Fcreate(FILE1, H5F_ACC_EXCL, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+#ifndef NO_TRUNCATE_OPEN_FILE
+ /*
+ * try to create the same file with H5F_ACC_TRUNC. This should fail
+ * because fid1 is the same file and is currently open.
+ */
+ H5E_BEGIN_TRY
+ {
+ fid2 = H5Fcreate(FILE1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(fid2, FAIL, "H5Fcreate");
+#endif
+ /* Close all files */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ H5E_BEGIN_TRY
+ {
+ ret = H5Fclose(fid2);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Fclose"); /*file should not have been open */
+
+ /*
+ * Try again with H5F_ACC_EXCL. This should fail because the file already
+ * exists from the previous steps.
+ */
+ H5E_BEGIN_TRY
+ {
+ fid1 = H5Fcreate(FILE1, H5F_ACC_EXCL, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(fid1, FAIL, "H5Fcreate");
+
+ /* Test create with H5F_ACC_TRUNC. This will truncate the existing file. */
+ fid1 = H5Fcreate(FILE1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+#ifndef NO_TRUNCATE_OPEN_FILE
+ /*
+ * Try to truncate first file again. This should fail because fid1 is the
+ * same file and is currently open.
+ */
+ H5E_BEGIN_TRY
+ {
+ fid2 = H5Fcreate(FILE1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(fid2, FAIL, "H5Fcreate");
+#endif
+ /*
+ * Try with H5F_ACC_EXCL. This should fail too because the file already
+ * exists.
+ */
+ H5E_BEGIN_TRY
+ {
+ fid2 = H5Fcreate(FILE1, H5F_ACC_EXCL, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(fid2, FAIL, "H5Fcreate");
+
+ /* Get the file-creation template */
+ tmpl1 = H5Fget_create_plist(fid1);
+ CHECK(tmpl1, FAIL, "H5Fget_create_plist");
+
+ /* Get the file-creation parameters */
+ ret = H5Pget_userblock(tmpl1, &ublock);
+ CHECK(ret, FAIL, "H5Pget_userblock");
+ VERIFY(ublock, F1_USERBLOCK_SIZE, "H5Pget_userblock");
+
+ ret = H5Pget_sizes(tmpl1, &parm, &parm2);
+ CHECK(ret, FAIL, "H5Pget_sizes");
+ VERIFY(parm, F1_OFFSET_SIZE, "H5Pget_sizes");
+ VERIFY(parm2, F1_LENGTH_SIZE, "H5Pget_sizes");
+
+ ret = H5Pget_sym_k(tmpl1, &iparm, &iparm2);
+ CHECK(ret, FAIL, "H5Pget_sym_k");
+ VERIFY(iparm, F1_SYM_INTERN_K, "H5Pget_sym_k");
+ VERIFY(iparm2, F1_SYM_LEAF_K, "H5Pget_sym_k");
+
+ /* Release file-creation template */
+ ret = H5Pclose(tmpl1);
+ CHECK(ret, FAIL, "H5Pclose");
+
+#ifdef LATER
+ /* Double-check that the atom has been vaporized */
+ ret = H5Pclose(tmpl1);
+ VERIFY(ret, FAIL, "H5Pclose");
+#endif
+
+ if (h5_using_default_driver(NULL)) {
+
+ /* Create a new file with a non-standard file-creation template */
+ tmpl1 = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(tmpl1, FAIL, "H5Pcreate");
+
+ /* Try setting some bad userblock sizes */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Pset_userblock(tmpl1, BAD_USERBLOCK_SIZE1);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Pset_userblock");
+ H5E_BEGIN_TRY
+ {
+ ret = H5Pset_userblock(tmpl1, BAD_USERBLOCK_SIZE2);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Pset_userblock");
+ H5E_BEGIN_TRY
+ {
+ ret = H5Pset_userblock(tmpl1, BAD_USERBLOCK_SIZE3);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Pset_userblock");
+ H5E_BEGIN_TRY
+ {
+ ret = H5Pset_userblock(tmpl1, BAD_USERBLOCK_SIZE4);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Pset_userblock");
+ H5E_BEGIN_TRY
+ {
+ ret = H5Pset_userblock(tmpl1, BAD_USERBLOCK_SIZE5);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Pset_userblock");
+ H5E_BEGIN_TRY
+ {
+ ret = H5Pset_userblock(tmpl1, BAD_USERBLOCK_SIZE6);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Pset_userblock");
+ H5E_BEGIN_TRY
+ {
+ ret = H5Pset_userblock(tmpl1, BAD_USERBLOCK_SIZE7);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Pset_userblock");
+
+ /* Set the new file-creation parameters */
+ ret = H5Pset_userblock(tmpl1, F2_USERBLOCK_SIZE);
+ CHECK(ret, FAIL, "H5Pset_userblock");
+
+ ret = H5Pset_sizes(tmpl1, (size_t)F2_OFFSET_SIZE, (size_t)F2_LENGTH_SIZE);
+ CHECK(ret, FAIL, "H5Pset_sizes");
+
+ ret = H5Pset_sym_k(tmpl1, F2_SYM_INTERN_K, F2_SYM_LEAF_K);
+ CHECK(ret, FAIL, "H5Pset_sym_k");
+
+ /*
+ * Try to create second file, with non-standard file-creation template
+ * params.
+ */
+ fid2 = H5Fcreate(FILE2, H5F_ACC_TRUNC, tmpl1, H5P_DEFAULT);
+ CHECK(fid2, FAIL, "H5Fcreate");
+
+ /* Release file-creation template */
+ ret = H5Pclose(tmpl1);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Make certain we can create a dataset properly in the file with the userblock */
+ {
+ hid_t dataset_id, dataspace_id; /* identifiers */
+ hsize_t dims[F2_RANK];
+ unsigned data[F2_DIM0][F2_DIM1];
+ unsigned i, j;
+
+ /* Create the data space for the dataset. */
+ dims[0] = F2_DIM0;
+ dims[1] = F2_DIM1;
+ dataspace_id = H5Screate_simple(F2_RANK, dims, NULL);
+ CHECK(dataspace_id, FAIL, "H5Screate_simple");
+
+ /* Create the dataset. */
+ dataset_id = H5Dcreate2(fid2, F2_DSET, H5T_NATIVE_UINT, dataspace_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT);
+ CHECK(dataset_id, FAIL, "H5Dcreate2");
+
+ for (i = 0; i < F2_DIM0; i++)
+ for (j = 0; j < F2_DIM1; j++)
+ data[i][j] = i * 10 + j;
+
+ /* Write data to the new dataset */
+ ret = H5Dwrite(dataset_id, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, data);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* End access to the dataset and release resources used by it. */
+ ret = H5Dclose(dataset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Terminate access to the data space. */
+ ret = H5Sclose(dataspace_id);
+ CHECK(ret, FAIL, "H5Sclose");
+ }
+
+ /* Get the file-creation template */
+ tmpl1 = H5Fget_create_plist(fid2);
+ CHECK(tmpl1, FAIL, "H5Fget_create_plist");
+
+ /* Get the file-creation parameters */
+ ret = H5Pget_userblock(tmpl1, &ublock);
+ CHECK(ret, FAIL, "H5Pget_userblock");
+ VERIFY(ublock, F2_USERBLOCK_SIZE, "H5Pget_userblock");
+
+ ret = H5Pget_sizes(tmpl1, &parm, &parm2);
+ CHECK(ret, FAIL, "H5Pget_sizes");
+ VERIFY(parm, F2_OFFSET_SIZE, "H5Pget_sizes");
+ VERIFY(parm2, F2_LENGTH_SIZE, "H5Pget_sizes");
+
+ ret = H5Pget_sym_k(tmpl1, &iparm, &iparm2);
+ CHECK(ret, FAIL, "H5Pget_sym_k");
+ VERIFY(iparm, F2_SYM_INTERN_K, "H5Pget_sym_k");
+ VERIFY(iparm2, F2_SYM_LEAF_K, "H5Pget_sym_k");
+
+ /* Clone the file-creation template */
+ tmpl2 = H5Pcopy(tmpl1);
+ CHECK(tmpl2, FAIL, "H5Pcopy");
+
+ /* Release file-creation template */
+ ret = H5Pclose(tmpl1);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Set the new file-creation parameter */
+ ret = H5Pset_userblock(tmpl2, F3_USERBLOCK_SIZE);
+ CHECK(ret, FAIL, "H5Pset_userblock");
+
+ /*
+ * Try to create second file, with non-standard file-creation template
+ * params
+ */
+ fid3 = H5Fcreate(FILE3, H5F_ACC_TRUNC, tmpl2, H5P_DEFAULT);
+ CHECK(fid3, FAIL, "H5Fcreate");
+
+ /* Release file-creation template */
+ ret = H5Pclose(tmpl2);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Get the file-creation template */
+ tmpl1 = H5Fget_create_plist(fid3);
+ CHECK(tmpl1, FAIL, "H5Fget_create_plist");
+
+ /* Get the file-creation parameters */
+ ret = H5Pget_userblock(tmpl1, &ublock);
+ CHECK(ret, FAIL, "H5Pget_userblock");
+ VERIFY(ublock, F3_USERBLOCK_SIZE, "H5Pget_userblock");
+
+ ret = H5Pget_sizes(tmpl1, &parm, &parm2);
+ CHECK(ret, FAIL, "H5Pget_sizes");
+ VERIFY(parm, F3_OFFSET_SIZE, "H5Pget_sizes");
+ VERIFY(parm2, F3_LENGTH_SIZE, "H5Pget_sizes");
+
+ ret = H5Pget_sym_k(tmpl1, &iparm, &iparm2);
+ CHECK(ret, FAIL, "H5Pget_sym_k");
+ VERIFY(iparm, F3_SYM_INTERN_K, "H5Pget_sym_k");
+ VERIFY(iparm2, F3_SYM_LEAF_K, "H5Pget_sym_k");
+
+ /* Release file-creation template */
+ ret = H5Pclose(tmpl1);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close second file */
+ ret = H5Fclose(fid2);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Close third file */
+ ret = H5Fclose(fid3);
+ CHECK(ret, FAIL, "H5Fclose");
+ }
+
+ /* Close first file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_file_create() */
+
+/****************************************************************
+**
+** test_file_open(): Low-level file open I/O test routine.
+**
+****************************************************************/
+static void
+test_file_open(const char *env_h5_drvr)
+{
+ hid_t fid1; /*HDF5 File IDs */
+#if 0
+ hid_t fid2;
+ hid_t did; /*dataset ID */
+ hid_t fapl_id; /*file access property list ID */
+#endif
+ hid_t tmpl1; /*file creation templates */
+ hsize_t ublock; /*sizeof user block */
+ size_t parm; /*file-creation parameters */
+ size_t parm2; /*file-creation parameters */
+ unsigned iparm;
+ unsigned iparm2;
+ unsigned intent;
+ herr_t ret; /*generic return value */
+
+ /*
+ * Test single file open
+ */
+
+ /* Only run this test with sec2/default driver */
+ if (!h5_using_default_driver(env_h5_drvr))
+ return;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Low-Level File Opening I/O\n"));
+
+ /* Open first file */
+ fid1 = H5Fopen(FILE2, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Get the intent */
+ ret = H5Fget_intent(fid1, &intent);
+ CHECK(ret, FAIL, "H5Fget_intent");
+ VERIFY(intent, H5F_ACC_RDWR, "H5Fget_intent");
+
+ /* Get the file-creation template */
+ tmpl1 = H5Fget_create_plist(fid1);
+ CHECK(tmpl1, FAIL, "H5Fget_create_plist");
+
+ /* Get the file-creation parameters */
+ ret = H5Pget_userblock(tmpl1, &ublock);
+ CHECK(ret, FAIL, "H5Pget_userblock");
+ VERIFY(ublock, F2_USERBLOCK_SIZE, "H5Pget_userblock");
+
+ ret = H5Pget_sizes(tmpl1, &parm, &parm2);
+ CHECK(ret, FAIL, "H5Pget_sizes");
+ VERIFY(parm, F2_OFFSET_SIZE, "H5Pget_sizes");
+ VERIFY(parm2, F2_LENGTH_SIZE, "H5Pget_sizes");
+
+ ret = H5Pget_sym_k(tmpl1, &iparm, &iparm2);
+ CHECK(ret, FAIL, "H5Pget_sym_k");
+ VERIFY(iparm, F2_SYM_INTERN_K, "H5Pget_sym_k");
+ VERIFY(iparm2, F2_SYM_LEAF_K, "H5Pget_sym_k");
+
+ /* Release file-creation template */
+ ret = H5Pclose(tmpl1);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close first file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /*
+ * Test two file opens: one is opened H5F_ACC_RDONLY and H5F_CLOSE_WEAK.
+ * It's closed with an object left open. Then another is opened
+ * H5F_ACC_RDWR, which should fail.
+ */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing 2 File Openings - SKIPPED for now due to no file close degree support\n"));
+#if 0
+ /* Create file access property list */
+ fapl_id = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl_id, FAIL, "H5Pcreate");
+
+ /* Set file close mode to H5F_CLOSE_WEAK */
+ ret = H5Pset_fclose_degree(fapl_id, H5F_CLOSE_WEAK);
+ CHECK(ret, FAIL, "H5Pset_fclose_degree");
+
+ /* Open file for first time */
+ fid1 = H5Fopen(FILE2, H5F_ACC_RDONLY, fapl_id);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Check the intent */
+ ret = H5Fget_intent(fid1, &intent);
+ CHECK(ret, FAIL, "H5Fget_intent");
+ VERIFY(intent, H5F_ACC_RDONLY, "H5Fget_intent");
+
+ /* Open dataset */
+ did = H5Dopen2(fid1, F2_DSET, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen2");
+
+ /* Check that the intent works even if NULL is passed in */
+ ret = H5Fget_intent(fid1, NULL);
+ CHECK(ret, FAIL, "H5Fget_intent");
+
+ /* Close first open */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Open file for second time, which should fail. */
+ H5E_BEGIN_TRY
+ {
+ fid2 = H5Fopen(FILE2, H5F_ACC_RDWR, fapl_id);
+ }
+ H5E_END_TRY;
+ VERIFY(fid2, FAIL, "H5Fopen");
+
+ /* Check that the intent fails for an invalid ID */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Fget_intent(fid1, &intent);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Fget_intent");
+
+ /* Close dataset from first open */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Pclose(fapl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+#endif
+} /* test_file_open() */
+
+/****************************************************************
+**
+** test_file_reopen(): File reopen test routine.
+**
+****************************************************************/
+static void
+test_file_reopen(void)
+{
+ hid_t fid = -1; /* file ID from initial open */
+ hid_t rfid = -1; /* file ID from reopen */
+ hid_t did = -1; /* dataset ID (both opens) */
+ hid_t sid = -1; /* dataspace ID for dataset creation */
+ hsize_t dims = 6; /* dataspace size */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing File Re-opening\n"));
+
+ /* Create file via first ID */
+ fid = H5Fcreate(REOPEN_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK_I(fid, "H5Fcreate");
+
+ /* Create a dataset in the file */
+ sid = H5Screate_simple(1, &dims, &dims);
+ CHECK_I(sid, "H5Screate_simple")
+ did = H5Dcreate2(fid, REOPEN_DSET, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK_I(did, "H5Dcreate2");
+
+ /* Close dataset and dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Reopen the file with a different file ID */
+ rfid = H5Freopen(fid);
+ CHECK_I(rfid, "H5Freopen");
+
+ /* Reopen the dataset through the reopen file ID */
+ did = H5Dopen2(rfid, REOPEN_DSET, H5P_DEFAULT);
+ CHECK_I(did, "H5Dopen2");
+
+ /* Close and clean up */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ ret = H5Fclose(rfid);
+ CHECK(ret, FAIL, "H5Fclose");
+ H5Fdelete(REOPEN_FILE, H5P_DEFAULT);
+
+} /* test_file_reopen() */
+
+/****************************************************************
+**
+** test_file_close(): low-level file close test routine.
+** It mainly tests behavior with close degree.
+**
+*****************************************************************/
+static void
+test_file_close(void)
+{
+#if 0
+ hid_t fid1, fid2;
+ hid_t fapl_id, access_id;
+ hid_t dataset_id, group_id1, group_id2, group_id3;
+ H5F_close_degree_t fc_degree;
+ herr_t ret;
+#endif
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing File Closing with file close degrees - SKIPPED for now due to no file close degree "
+ "support\n"));
+#if 0
+ /* Test behavior while opening file multiple times with different
+ * file close degree value
+ */
+ fid1 = H5Fcreate(FILE1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ fapl_id = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl_id, FAIL, "H5Pcreate");
+
+ ret = H5Pset_fclose_degree(fapl_id, H5F_CLOSE_STRONG);
+ CHECK(ret, FAIL, "H5Pset_fclose_degree");
+
+ ret = H5Pget_fclose_degree(fapl_id, &fc_degree);
+ VERIFY(fc_degree, H5F_CLOSE_STRONG, "H5Pget_fclose_degree");
+
+ /* should fail */
+ H5E_BEGIN_TRY
+ {
+ fid2 = H5Fopen(FILE1, H5F_ACC_RDWR, fapl_id);
+ }
+ H5E_END_TRY;
+ VERIFY(fid2, FAIL, "H5Fopen");
+
+ ret = H5Pset_fclose_degree(fapl_id, H5F_CLOSE_DEFAULT);
+ CHECK(ret, FAIL, "H5Pset_fclose_degree");
+
+ /* should succeed */
+ fid2 = H5Fopen(FILE1, H5F_ACC_RDWR, fapl_id);
+ CHECK(fid2, FAIL, "H5Fopen");
+
+ /* Close first open */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Close second open */
+ ret = H5Fclose(fid2);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Test behavior while opening file multiple times with different file
+ * close degree
+ */
+ fid1 = H5Fcreate(FILE1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ ret = H5Pset_fclose_degree(fapl_id, H5F_CLOSE_WEAK);
+ CHECK(ret, FAIL, "H5Pset_fclose_degree");
+
+ ret = H5Pget_fclose_degree(fapl_id, &fc_degree);
+ VERIFY(fc_degree, H5F_CLOSE_WEAK, "H5Pget_fclose_degree");
+
+ /* should succeed */
+ fid2 = H5Fopen(FILE1, H5F_ACC_RDWR, fapl_id);
+ CHECK(fid2, FAIL, "H5Fopen");
+
+ /* Close first open */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Close second open */
+ ret = H5Fclose(fid2);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Test behavior while opening file multiple times with file close
+ * degree STRONG */
+ ret = H5Pset_fclose_degree(fapl_id, H5F_CLOSE_STRONG);
+ CHECK(ret, FAIL, "H5Pset_fclose_degree");
+
+ fid1 = H5Fcreate(FILE1, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ ret = H5Pset_fclose_degree(fapl_id, H5F_CLOSE_WEAK);
+ CHECK(ret, FAIL, "H5Pset_fclose_degree");
+
+ /* should fail */
+ H5E_BEGIN_TRY
+ {
+ fid2 = H5Fopen(FILE1, H5F_ACC_RDWR, fapl_id);
+ }
+ H5E_END_TRY;
+ VERIFY(fid2, FAIL, "H5Fopen");
+
+ ret = H5Pset_fclose_degree(fapl_id, H5F_CLOSE_STRONG);
+ CHECK(ret, FAIL, "H5Pset_fclose_degree");
+
+ /* should succeed */
+ fid2 = H5Fopen(FILE1, H5F_ACC_RDWR, fapl_id);
+ CHECK(fid2, FAIL, "H5Fopen");
+
+ /* Create a dataset and a group in each file open respectively */
+ create_objects(fid1, fid2, NULL, NULL, NULL, NULL);
+
+ /* Close first open */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Close second open */
+ ret = H5Fclose(fid2);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Test behavior while opening file multiple times with file close
+ * degree SEMI */
+ ret = H5Pset_fclose_degree(fapl_id, H5F_CLOSE_SEMI);
+ CHECK(ret, FAIL, "H5Pset_fclose_degree");
+
+ fid1 = H5Fcreate(FILE1, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ ret = H5Pset_fclose_degree(fapl_id, H5F_CLOSE_DEFAULT);
+ CHECK(ret, FAIL, "H5Pset_fclose_degree");
+
+ /* should fail */
+ H5E_BEGIN_TRY
+ {
+ fid2 = H5Fopen(FILE1, H5F_ACC_RDWR, fapl_id);
+ }
+ H5E_END_TRY;
+ VERIFY(fid2, FAIL, "H5Fopen");
+
+ ret = H5Pset_fclose_degree(fapl_id, H5F_CLOSE_SEMI);
+ CHECK(ret, FAIL, "H5Pset_fclose_degree");
+
+ /* should succeed */
+ fid2 = H5Fopen(FILE1, H5F_ACC_RDWR, fapl_id);
+ CHECK(fid2, FAIL, "H5Fopen");
+
+ /* Create a dataset and a group in each file open respectively */
+ create_objects(fid1, fid2, &dataset_id, &group_id1, &group_id2, &group_id3);
+
+ /* Close first open, should fail since it is SEMI and objects are
+ * still open. */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Fclose(fid1);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Fclose");
+
+ /* Close second open, should fail since it is SEMI and objects are
+ * still open. */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Fclose(fid2);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Fclose");
+
+ ret = H5Dclose(dataset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close first open */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ ret = H5Gclose(group_id1);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Gclose(group_id2);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close second open, should fail since it is SEMI and one group ID is
+ * still open. */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Fclose(fid2);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Fclose");
+
+ /* Same check with H5Idec_ref() (should fail also) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Idec_ref(fid2);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Idec_ref");
+
+ ret = H5Gclose(group_id3);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close second open again. Should succeed. */
+ ret = H5Fclose(fid2);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Test behavior while opening file multiple times with file close
+ * degree WEAK */
+ ret = H5Pset_fclose_degree(fapl_id, H5F_CLOSE_WEAK);
+ CHECK(ret, FAIL, "H5Pset_fclose_degree");
+
+ fid1 = H5Fcreate(FILE1, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ ret = H5Pset_fclose_degree(fapl_id, H5F_CLOSE_SEMI);
+ CHECK(ret, FAIL, "H5Pset_fclose_degree");
+
+ /* should fail */
+ H5E_BEGIN_TRY
+ {
+ fid2 = H5Fopen(FILE1, H5F_ACC_RDWR, fapl_id);
+ }
+ H5E_END_TRY;
+ VERIFY(fid2, FAIL, "H5Fopen");
+
+ ret = H5Pset_fclose_degree(fapl_id, H5F_CLOSE_DEFAULT);
+ CHECK(ret, FAIL, "H5Pset_fclose_degree");
+
+ /* should succeed */
+ fid2 = H5Fopen(FILE1, H5F_ACC_RDWR, fapl_id);
+ CHECK(fid2, FAIL, "H5Fopen");
+
+ /* Create a dataset and a group in each file open respectively */
+ create_objects(fid1, fid2, &dataset_id, &group_id1, &group_id2, &group_id3);
+
+ /* Create more new files and test object count and ID list functions */
+ test_obj_count_and_id(fid1, fid2, dataset_id, group_id1, group_id2, group_id3);
+
+ /* Close first open */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Close second open. File will be finally closed after all objects
+ * are closed. */
+ ret = H5Fclose(fid2);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ ret = H5Dclose(dataset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Gclose(group_id1);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Gclose(group_id2);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Gclose(group_id3);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Test behavior while opening file multiple times with file close
+ * degree DEFAULT */
+ ret = H5Pset_fclose_degree(fapl_id, H5F_CLOSE_DEFAULT);
+ CHECK(ret, FAIL, "H5Pset_fclose_degree");
+
+ fid1 = H5Fcreate(FILE1, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ ret = H5Pset_fclose_degree(fapl_id, H5F_CLOSE_SEMI);
+ CHECK(ret, FAIL, "H5Pset_fclose_degree");
+
+ /* should fail */
+ H5E_BEGIN_TRY
+ {
+ fid2 = H5Fopen(FILE1, H5F_ACC_RDWR, fapl_id);
+ }
+ H5E_END_TRY;
+ VERIFY(fid2, FAIL, "H5Fopen");
+
+ ret = H5Pset_fclose_degree(fapl_id, H5F_CLOSE_DEFAULT);
+ CHECK(ret, FAIL, "H5Pset_fclose_degree");
+
+ /* should succeed */
+ fid2 = H5Fopen(FILE1, H5F_ACC_RDWR, fapl_id);
+ CHECK(fid2, FAIL, "H5Fopen");
+
+ /* Create a dataset and a group in each file open respectively */
+ create_objects(fid1, fid2, &dataset_id, &group_id1, &group_id2, &group_id3);
+
+ access_id = H5Fget_access_plist(fid1);
+ CHECK(access_id, FAIL, "H5Fget_access_plist");
+
+ ret = H5Pget_fclose_degree(access_id, &fc_degree);
+ CHECK(ret, FAIL, "H5Pget_fclose_degree");
+
+ switch (fc_degree) {
+ case H5F_CLOSE_STRONG:
+ /* Close first open */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+ /* Close second open */
+ ret = H5Fclose(fid2);
+ CHECK(ret, FAIL, "H5Fclose");
+ break;
+ case H5F_CLOSE_SEMI:
+ /* Close first open */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+ ret = H5Dclose(dataset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Gclose(group_id1);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Gclose(group_id2);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Gclose(group_id3);
+ CHECK(ret, FAIL, "H5Gclose");
+ /* Close second open */
+ ret = H5Fclose(fid2);
+ CHECK(ret, FAIL, "H5Fclose");
+ break;
+ case H5F_CLOSE_WEAK:
+ /* Close first open */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+ /* Close second open */
+ ret = H5Fclose(fid2);
+ CHECK(ret, FAIL, "H5Fclose");
+ ret = H5Dclose(dataset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Gclose(group_id1);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Gclose(group_id2);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Gclose(group_id3);
+ CHECK(ret, FAIL, "H5Gclose");
+ break;
+ case H5F_CLOSE_DEFAULT:
+ default:
+ CHECK(fc_degree, H5F_CLOSE_DEFAULT, "H5Pget_fclose_degree");
+ break;
+ }
+
+ /* Close file access property list */
+ ret = H5Pclose(fapl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(access_id);
+ CHECK(ret, FAIL, "H5Pclose");
+#endif
+}
+
+/****************************************************************
+**
+** create_objects(): routine called by test_file_close to create
+** a dataset and a group in file.
+**
+****************************************************************/
+#if 0
+static void
+create_objects(hid_t fid1, hid_t fid2, hid_t *ret_did, hid_t *ret_gid1, hid_t *ret_gid2, hid_t *ret_gid3)
+{
+ ssize_t oid_count;
+ herr_t ret;
+
+ /* Check reference counts of file IDs and opened object IDs.
+ * The verification is hard-coded. If in any case, this testing
+ * is changed, remember to check this part and update the macros.
+ */
+ {
+ oid_count = H5Fget_obj_count(fid1, H5F_OBJ_ALL);
+ CHECK(oid_count, FAIL, "H5Fget_obj_count");
+ VERIFY(oid_count, OBJ_ID_COUNT_2, "H5Fget_obj_count");
+
+ oid_count = H5Fget_obj_count(fid1, H5F_OBJ_DATASET | H5F_OBJ_GROUP | H5F_OBJ_DATATYPE | H5F_OBJ_ATTR);
+ CHECK(oid_count, FAIL, "H5Fget_obj_count");
+ VERIFY(oid_count, OBJ_ID_COUNT_0, "H5Fget_obj_count");
+
+ oid_count = H5Fget_obj_count(fid2, H5F_OBJ_ALL);
+ CHECK(oid_count, FAIL, "H5Fget_obj_count");
+ VERIFY(oid_count, OBJ_ID_COUNT_2, "H5Fget_obj_count");
+
+ oid_count = H5Fget_obj_count(fid2, H5F_OBJ_DATASET | H5F_OBJ_GROUP | H5F_OBJ_DATATYPE | H5F_OBJ_ATTR);
+ CHECK(oid_count, FAIL, "H5Fget_obj_count");
+ VERIFY(oid_count, OBJ_ID_COUNT_0, "H5Fget_obj_count");
+ }
+
+ /* create a dataset in the first file open */
+ {
+ hid_t dataset_id, dataspace_id; /* identifiers */
+ hsize_t dims[F2_RANK];
+ unsigned data[F2_DIM0][F2_DIM1];
+ unsigned i, j;
+
+ /* Create the data space for the dataset. */
+ dims[0] = F2_DIM0;
+ dims[1] = F2_DIM1;
+ dataspace_id = H5Screate_simple(F2_RANK, dims, NULL);
+ CHECK(dataspace_id, FAIL, "H5Screate_simple");
+
+ /* Create the dataset. */
+ dataset_id =
+ H5Dcreate2(fid1, "/dset", H5T_NATIVE_UINT, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset_id, FAIL, "H5Dcreate2");
+
+ for (i = 0; i < F2_DIM0; i++)
+ for (j = 0; j < F2_DIM1; j++)
+ data[i][j] = i * 10 + j;
+
+ /* Write data to the new dataset */
+ ret = H5Dwrite(dataset_id, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, data);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ if (ret_did != NULL)
+ *ret_did = dataset_id;
+
+ /* Terminate access to the data space. */
+ ret = H5Sclose(dataspace_id);
+ CHECK(ret, FAIL, "H5Sclose");
+ }
+
+ /* Create a group in the second file open */
+ {
+ hid_t gid1, gid2, gid3;
+ gid1 = H5Gcreate2(fid2, "/group", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid1, FAIL, "H5Gcreate2");
+ if (ret_gid1 != NULL)
+ *ret_gid1 = gid1;
+
+ gid2 = H5Gopen2(fid2, "/group", H5P_DEFAULT);
+ CHECK(gid2, FAIL, "H5Gopen2");
+ if (ret_gid2 != NULL)
+ *ret_gid2 = gid2;
+
+ gid3 = H5Gopen2(fid2, "/group", H5P_DEFAULT);
+ CHECK(gid3, FAIL, "H5Gopen2");
+ if (ret_gid3 != NULL)
+ *ret_gid3 = gid3;
+ }
+
+ /* Check reference counts of file IDs and opened object IDs.
+ * The verification is hard-coded. If in any case, this testing
+ * is changed, remember to check this part and update the macros.
+ */
+ {
+ oid_count = H5Fget_obj_count(fid1, H5F_OBJ_ALL);
+ CHECK(oid_count, FAIL, "H5Fget_obj_count");
+ VERIFY(oid_count, OBJ_ID_COUNT_6, "H5Fget_obj_count");
+
+ oid_count = H5Fget_obj_count(fid1, H5F_OBJ_DATASET | H5F_OBJ_GROUP | H5F_OBJ_DATATYPE | H5F_OBJ_ATTR);
+ CHECK(oid_count, FAIL, "H5Fget_obj_count");
+ VERIFY(oid_count, OBJ_ID_COUNT_4, "H5Fget_obj_count");
+
+ oid_count = H5Fget_obj_count(fid2, H5F_OBJ_ALL);
+ CHECK(oid_count, FAIL, "H5Fget_obj_count");
+ VERIFY(oid_count, OBJ_ID_COUNT_6, "H5Fget_obj_count");
+
+ oid_count = H5Fget_obj_count(fid2, H5F_OBJ_DATASET | H5F_OBJ_GROUP | H5F_OBJ_DATATYPE | H5F_OBJ_ATTR);
+ CHECK(oid_count, FAIL, "H5Fget_obj_count");
+ VERIFY(oid_count, OBJ_ID_COUNT_4, "H5Fget_obj_count");
+ }
+}
+#endif
+
+/****************************************************************
+**
+** test_get_obj_ids(): Test the bug and the fix for Jira 8528.
+** H5Fget_obj_ids overfilled the list of
+** object IDs by one. This is an enhancement
+** for test_obj_count_and_id().
+**
+****************************************************************/
+static void
+test_get_obj_ids(void)
+{
+ hid_t fid, gid[NGROUPS], dset[NDSETS];
+ hid_t filespace;
+ hsize_t file_dims[F2_RANK] = {F2_DIM0, F2_DIM1};
+ ssize_t oid_count, ret_count;
+ hid_t *oid_list = NULL;
+ herr_t ret;
+ int i, m, n;
+ ssize_t oid_list_size = NDSETS;
+ char gname[64], dname[64];
+
+ MESSAGE(5, ("Testing retrieval of object IDs\n"));
+
+ /* Create a new file */
+ fid = H5Fcreate(FILE7, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ filespace = H5Screate_simple(F2_RANK, file_dims, NULL);
+ CHECK(filespace, FAIL, "H5Screate_simple");
+
+ /* creates NGROUPS groups under the root group */
+ for (m = 0; m < NGROUPS; m++) {
+ HDsnprintf(gname, sizeof(gname), "group%d", m);
+ gid[m] = H5Gcreate2(fid, gname, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid[m], FAIL, "H5Gcreate2");
+ }
+
+ /* create NDSETS datasets under the root group */
+ for (n = 0; n < NDSETS; n++) {
+ HDsnprintf(dname, sizeof(dname), "dataset%d", n);
+ dset[n] = H5Dcreate2(fid, dname, H5T_NATIVE_INT, filespace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset[n], FAIL, "H5Dcreate2");
+ }
+
+ /* The number of opened objects should be NGROUPS + NDSETS + 1. One is opened file. */
+ oid_count = H5Fget_obj_count(fid, H5F_OBJ_ALL);
+ CHECK(oid_count, FAIL, "H5Fget_obj_count");
+ VERIFY(oid_count, (NGROUPS + NDSETS + 1), "H5Fget_obj_count");
+
+ oid_list = (hid_t *)HDcalloc((size_t)oid_list_size, sizeof(hid_t));
+ CHECK_PTR(oid_list, "HDcalloc");
+
+ /* Call the public function H5F_get_obj_ids to use H5F__get_objects. User reported having problem here.
+ * that the returned size (ret_count) from H5Fget_obj_ids is one greater than the size passed in
+ * (oid_list_size) */
+ ret_count = H5Fget_obj_ids(fid, H5F_OBJ_ALL, (size_t)oid_list_size, oid_list);
+ CHECK(ret_count, FAIL, "H5Fget_obj_ids");
+ VERIFY(ret_count, oid_list_size, "H5Fget_obj_count");
+
+ /* Close all object IDs on the list except the file ID. The first ID is supposed to be file ID according
+ * to the library design */
+ for (i = 0; i < ret_count; i++) {
+ if (fid != oid_list[i]) {
+ ret = H5Oclose(oid_list[i]);
+ CHECK(ret, FAIL, "H5Oclose");
+ }
+ }
+
+ /* The number of opened objects should be NGROUPS + 1 + 1. The first one is opened file. The second one
+ * is the dataset ID left open from the previous around of H5Fget_obj_ids */
+ oid_count = H5Fget_obj_count(fid, H5F_OBJ_ALL);
+ CHECK(oid_count, FAIL, "H5Fget_obj_count");
+ VERIFY(oid_count, NGROUPS + 2, "H5Fget_obj_count");
+
+ /* Get the IDs of the left opened objects */
+ ret_count = H5Fget_obj_ids(fid, H5F_OBJ_ALL, (size_t)oid_list_size, oid_list);
+ CHECK(ret_count, FAIL, "H5Fget_obj_ids");
+ VERIFY(ret_count, oid_list_size, "H5Fget_obj_count");
+
+ /* Close all object IDs on the list except the file ID. The first ID is still the file ID */
+ for (i = 0; i < ret_count; i++) {
+ if (fid != oid_list[i]) {
+ ret = H5Oclose(oid_list[i]);
+ CHECK(ret, FAIL, "H5Oclose");
+ }
+ }
+
+ H5Sclose(filespace);
+ H5Fclose(fid);
+
+ HDfree(oid_list);
+
+ /* Reopen the file to check whether H5Fget_obj_count and H5Fget_obj_ids still works
+ * when the file is closed first */
+ fid = H5Fopen(FILE7, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Open NDSETS datasets under the root group */
+ for (n = 0; n < NDSETS; n++) {
+ HDsnprintf(dname, sizeof(dname), "dataset%d", n);
+ dset[n] = H5Dopen2(fid, dname, H5P_DEFAULT);
+ CHECK(dset[n], FAIL, "H5Dcreate2");
+ }
+
+ /* Close the file first */
+ H5Fclose(fid);
+#ifndef WRONG_DATATYPE_OBJ_COUNT
+ /* Get the number of all opened objects */
+ oid_count = H5Fget_obj_count((hid_t)H5F_OBJ_ALL, H5F_OBJ_ALL);
+ CHECK(oid_count, FAIL, "H5Fget_obj_count");
+ VERIFY(oid_count, NDSETS, "H5Fget_obj_count");
+
+ oid_list = (hid_t *)HDcalloc((size_t)oid_count, sizeof(hid_t));
+ CHECK_PTR(oid_list, "HDcalloc");
+
+ /* Get the list of all opened objects */
+ ret_count = H5Fget_obj_ids((hid_t)H5F_OBJ_ALL, H5F_OBJ_ALL, (size_t)oid_count, oid_list);
+ CHECK(ret_count, FAIL, "H5Fget_obj_ids");
+ VERIFY(ret_count, NDSETS, "H5Fget_obj_ids");
+
+ H5E_BEGIN_TRY
+ {
+ /* Close all open objects with H5Oclose */
+ for (n = 0; n < oid_count; n++)
+ H5Oclose(oid_list[n]);
+ }
+ H5E_END_TRY;
+
+ HDfree(oid_list);
+#endif
+}
+
+/****************************************************************
+**
+** test_get_file_id(): Test H5Iget_file_id()
+**
+*****************************************************************/
+static void
+test_get_file_id(void)
+{
+#if 0
+ hid_t fid, fid2, fid3;
+ hid_t datatype_id, dataset_id, dataspace_id, group_id, attr_id;
+ hid_t plist;
+ hsize_t dims[F2_RANK];
+ unsigned intent;
+ herr_t ret;
+#endif
+
+ MESSAGE(5, ("Testing H5Iget_file_id - SKIPPED for now due to no H5Iget_file_id support\n"));
+#if 0
+ /* Create a file */
+ fid = H5Fcreate(FILE4, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Check the intent */
+ ret = H5Fget_intent(fid, &intent);
+ CHECK(ret, FAIL, "H5Fget_intent");
+ VERIFY(intent, H5F_ACC_RDWR, "H5Fget_intent");
+
+ /* Test H5Iget_file_id() */
+ check_file_id(fid, fid);
+
+ /* Create a group in the file. Make a duplicated file ID from the group.
+ * And close this duplicated ID
+ */
+ group_id = H5Gcreate2(fid, GRP_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(group_id, FAIL, "H5Gcreate2");
+
+ /* Test H5Iget_file_id() */
+ check_file_id(fid, group_id);
+
+ /* Close the file and get file ID from the group ID */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Test H5Iget_file_id() */
+ check_file_id((hid_t)-1, group_id);
+
+ ret = H5Gclose(group_id);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Open the file again. Test H5Iget_file_id() */
+ fid = H5Fopen(FILE4, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ group_id = H5Gopen2(fid, GRP_NAME, H5P_DEFAULT);
+ CHECK(group_id, FAIL, "H5Gopen2");
+
+ /* Test H5Iget_file_id() */
+ check_file_id(fid, group_id);
+
+ /* Open the file for second time. Test H5Iget_file_id() */
+ fid3 = H5Freopen(fid);
+ CHECK(fid3, FAIL, "H5Freopen");
+
+ /* Test H5Iget_file_id() */
+ check_file_id(fid3, fid3);
+
+ ret = H5Fclose(fid3);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Create a dataset in the group. Make a duplicated file ID from the
+ * dataset. And close this duplicated ID.
+ */
+ dims[0] = F2_DIM0;
+ dims[1] = F2_DIM1;
+ dataspace_id = H5Screate_simple(F2_RANK, dims, NULL);
+ CHECK(dataspace_id, FAIL, "H5Screate_simple");
+
+ dataset_id =
+ H5Dcreate2(group_id, DSET_NAME, H5T_NATIVE_INT, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset_id, FAIL, "H5Dcreate2");
+
+ /* Test H5Iget_file_id() */
+ check_file_id(fid, dataset_id);
+
+ /* Create an attribute for the dataset. Make a duplicated file ID from
+ * this attribute. And close it.
+ */
+ attr_id = H5Acreate2(dataset_id, ATTR_NAME, H5T_NATIVE_INT, dataspace_id, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Acreate2");
+
+ /* Test H5Iget_file_id() */
+ check_file_id(fid, attr_id);
+
+ /* Create a named datatype. Make a duplicated file ID from
+ * this attribute. And close it.
+ */
+ datatype_id = H5Tcopy(H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tcopy");
+
+ ret = H5Tcommit2(fid, TYPE_NAME, datatype_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+
+ /* Test H5Iget_file_id() */
+ check_file_id(fid, datatype_id);
+
+ /* Create a property list and try to get file ID from it.
+ * Supposed to fail.
+ */
+ plist = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(plist, FAIL, "H5Pcreate");
+
+ H5E_BEGIN_TRY
+ {
+ fid2 = H5Iget_file_id(plist);
+ }
+ H5E_END_TRY;
+ VERIFY(fid2, FAIL, "H5Iget_file_id");
+
+ /* Close objects */
+ ret = H5Pclose(plist);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ ret = H5Tclose(datatype_id);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ ret = H5Aclose(attr_id);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ ret = H5Sclose(dataspace_id);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Dclose(dataset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Gclose(group_id);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+#endif
+}
+
+/****************************************************************
+**
+** check_file_id(): Internal function of test_get_file_id()
+**
+*****************************************************************/
+#if 0
+static void
+check_file_id(hid_t fid, hid_t object_id)
+{
+ hid_t new_fid;
+ herr_t ret;
+
+ /* Return a duplicated file ID even not expecting user to do it.
+ * And close this duplicated ID
+ */
+ new_fid = H5Iget_file_id(object_id);
+
+ if (fid >= 0)
+ VERIFY(new_fid, fid, "H5Iget_file_id");
+ else
+ CHECK(new_fid, FAIL, "H5Iget_file_id");
+
+ ret = H5Fclose(new_fid);
+ CHECK(ret, FAIL, "H5Fclose");
+}
+#endif
+
+/****************************************************************
+**
+** test_obj_count_and_id(): test object count and ID list functions.
+**
+****************************************************************/
+#if 0
+static void
+test_obj_count_and_id(hid_t fid1, hid_t fid2, hid_t did, hid_t gid1, hid_t gid2, hid_t gid3)
+{
+ hid_t fid3, fid4;
+ ssize_t oid_count, ret_count;
+ herr_t ret;
+
+ /* Create two new files */
+ fid3 = H5Fcreate(FILE2, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid3, FAIL, "H5Fcreate");
+ fid4 = H5Fcreate(FILE3, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid4, FAIL, "H5Fcreate");
+
+ /* test object count of all files IDs open */
+ oid_count = H5Fget_obj_count((hid_t)H5F_OBJ_ALL, H5F_OBJ_FILE);
+ CHECK(oid_count, FAIL, "H5Fget_obj_count");
+ VERIFY(oid_count, OBJ_ID_COUNT_4, "H5Fget_obj_count");
+
+ /* test object count of all datasets open */
+ oid_count = H5Fget_obj_count((hid_t)H5F_OBJ_ALL, H5F_OBJ_DATASET);
+ CHECK(oid_count, FAIL, "H5Fget_obj_count");
+ VERIFY(oid_count, OBJ_ID_COUNT_1, "H5Fget_obj_count");
+
+ /* test object count of all groups open */
+ oid_count = H5Fget_obj_count((hid_t)H5F_OBJ_ALL, H5F_OBJ_GROUP);
+ CHECK(oid_count, FAIL, "H5Fget_obj_count");
+ VERIFY(oid_count, OBJ_ID_COUNT_3, "H5Fget_obj_count");
+
+ /* test object count of all named datatypes open */
+ oid_count = H5Fget_obj_count((hid_t)H5F_OBJ_ALL, H5F_OBJ_DATATYPE);
+ CHECK(oid_count, FAIL, "H5Fget_obj_count");
+ VERIFY(oid_count, OBJ_ID_COUNT_0, "H5Fget_obj_count");
+
+ /* test object count of all attributes open */
+ oid_count = H5Fget_obj_count((hid_t)H5F_OBJ_ALL, H5F_OBJ_ATTR);
+ CHECK(oid_count, FAIL, "H5Fget_obj_count");
+ VERIFY(oid_count, OBJ_ID_COUNT_0, "H5Fget_obj_count");
+
+ /* test object count of all objects currently open */
+ oid_count = H5Fget_obj_count((hid_t)H5F_OBJ_ALL, H5F_OBJ_ALL);
+ CHECK(oid_count, FAIL, "H5Fget_obj_count");
+ VERIFY(oid_count, OBJ_ID_COUNT_8, "H5Fget_obj_count");
+
+ if (oid_count > 0) {
+ hid_t *oid_list;
+
+ oid_list = (hid_t *)HDcalloc((size_t)oid_count, sizeof(hid_t));
+ if (oid_list != NULL) {
+ int i;
+
+ ret_count = H5Fget_obj_ids((hid_t)H5F_OBJ_ALL, H5F_OBJ_ALL, (size_t)oid_count, oid_list);
+ CHECK(ret_count, FAIL, "H5Fget_obj_ids");
+
+ for (i = 0; i < oid_count; i++) {
+ H5I_type_t id_type;
+
+ id_type = H5Iget_type(oid_list[i]);
+ switch (id_type) {
+ case H5I_FILE:
+ if (oid_list[i] != fid1 && oid_list[i] != fid2 && oid_list[i] != fid3 &&
+ oid_list[i] != fid4)
+ ERROR("H5Fget_obj_ids");
+ break;
+
+ case H5I_GROUP:
+ if (oid_list[i] != gid1 && oid_list[i] != gid2 && oid_list[i] != gid3)
+ ERROR("H5Fget_obj_ids");
+ break;
+
+ case H5I_DATASET:
+ VERIFY(oid_list[i], did, "H5Fget_obj_ids");
+ break;
+
+ case H5I_MAP:
+ /* TODO: Not supported in native VOL connector yet */
+
+ case H5I_UNINIT:
+ case H5I_BADID:
+ case H5I_DATATYPE:
+ case H5I_DATASPACE:
+ case H5I_ATTR:
+ case H5I_VFL:
+ case H5I_VOL:
+ case H5I_GENPROP_CLS:
+ case H5I_GENPROP_LST:
+ case H5I_ERROR_CLASS:
+ case H5I_ERROR_MSG:
+ case H5I_ERROR_STACK:
+ case H5I_SPACE_SEL_ITER:
+ case H5I_EVENTSET:
+ case H5I_NTYPES:
+ default:
+ ERROR("H5Fget_obj_ids");
+ } /* end switch */
+ } /* end for */
+
+ HDfree(oid_list);
+ } /* end if */
+ } /* end if */
+
+ /* close the two new files */
+ ret = H5Fclose(fid3);
+ CHECK(ret, FAIL, "H5Fclose");
+ ret = H5Fclose(fid4);
+ CHECK(ret, FAIL, "H5Fclose");
+}
+#endif
+
+/****************************************************************
+**
+** test_file_perm(): low-level file test routine.
+** This test verifies that a file can be opened for both
+** read-only and read-write access and things will be handled
+** appropriately.
+**
+*****************************************************************/
+static void
+test_file_perm(void)
+{
+ hid_t file; /* File opened with read-write permission */
+ hid_t filero; /* Same file opened with read-only permission */
+ hid_t dspace; /* Dataspace ID */
+ hid_t dset; /* Dataset ID */
+ herr_t ret;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Low-Level File Permissions\n"));
+
+ dspace = H5Screate(H5S_SCALAR);
+ CHECK(dspace, FAIL, "H5Screate");
+
+ /* Create the file (with read-write permission) */
+ file = H5Fcreate(FILE2, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file, FAIL, "H5Fcreate");
+
+ /* Create a dataset with the read-write file handle */
+ dset = H5Dcreate2(file, F2_DSET, H5T_NATIVE_INT, dspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset, FAIL, "H5Dcreate2");
+
+ ret = H5Dclose(dset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Open the file (with read-only permission) */
+ filero = H5Fopen(FILE2, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(filero, FAIL, "H5Fopen");
+
+ /* Create a dataset with the read-only file handle (should fail) */
+ H5E_BEGIN_TRY
+ {
+ dset = H5Dcreate2(filero, F2_DSET, H5T_NATIVE_INT, dspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(dset, FAIL, "H5Dcreate2");
+ if (dset != FAIL) {
+ ret = H5Dclose(dset);
+ CHECK(ret, FAIL, "H5Dclose");
+ } /* end if */
+
+ ret = H5Fclose(filero);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ ret = H5Sclose(dspace);
+ CHECK(ret, FAIL, "H5Sclose");
+
+} /* end test_file_perm() */
+
+/****************************************************************
+**
+** test_file_perm2(): low-level file test routine.
+** This test verifies that no object can be created in a
+** file that is opened for read-only.
+**
+*****************************************************************/
+static void
+test_file_perm2(void)
+{
+ hid_t file; /* File opened with read-write permission */
+ hid_t filero; /* Same file opened with read-only permission */
+ hid_t dspace; /* Dataspace ID */
+ hid_t group; /* Group ID */
+ hid_t dset; /* Dataset ID */
+ hid_t type; /* Datatype ID */
+ hid_t attr; /* Attribute ID */
+ herr_t ret;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Low-Level File Permissions again\n"));
+
+ dspace = H5Screate(H5S_SCALAR);
+ CHECK(dspace, FAIL, "H5Screate");
+
+ /* Create the file (with read-write permission) */
+ file = H5Fcreate(FILE2, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file, FAIL, "H5Fcreate");
+
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Open the file (with read-only permission) */
+ filero = H5Fopen(FILE2, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(filero, FAIL, "H5Fopen");
+
+ /* Create a group with the read-only file handle (should fail) */
+ H5E_BEGIN_TRY
+ {
+ group = H5Gcreate2(filero, "MY_GROUP", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(group, FAIL, "H5Gcreate2");
+
+ /* Create a dataset with the read-only file handle (should fail) */
+ H5E_BEGIN_TRY
+ {
+ dset = H5Dcreate2(filero, F2_DSET, H5T_NATIVE_INT, dspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(dset, FAIL, "H5Dcreate2");
+
+ /* Create an attribute with the read-only file handle (should fail) */
+ H5E_BEGIN_TRY
+ {
+ attr = H5Acreate2(filero, "MY_ATTR", H5T_NATIVE_INT, dspace, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(attr, FAIL, "H5Acreate2");
+
+ type = H5Tcopy(H5T_NATIVE_SHORT);
+ CHECK(type, FAIL, "H5Tcopy");
+
+ /* Commit a datatype with the read-only file handle (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Tcommit2(filero, "MY_DTYPE", type, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Tcommit2");
+
+ ret = H5Tclose(type);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ ret = H5Fclose(filero);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ ret = H5Sclose(dspace);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* end test_file_perm2() */
+
+/****************************************************************
+**
+** test_file_is_accessible(): low-level file test routine.
+** Clone of test_file_ishdf5 but uses the newer VOL-enabled
+** H5Fis_accessible() API call.
+**
+*****************************************************************/
+#define FILE_IS_ACCESSIBLE "tfile_is_accessible"
+#define FILE_IS_ACCESSIBLE_NON_HDF5 "tfile_is_accessible_non_hdf5"
+static void
+test_file_is_accessible(const char *env_h5_drvr)
+{
+ hid_t fid = H5I_INVALID_HID; /* File opened with read-write permission */
+ hid_t fcpl_id = H5I_INVALID_HID; /* File creation property list */
+ hid_t fapl_id = H5I_INVALID_HID; /* File access property list */
+#if 0
+ int fd; /* POSIX file descriptor */
+#endif
+ char filename[FILENAME_LEN]; /* Filename to use */
+ char non_hdf5_filename[FILENAME_LEN]; /* Base name of non-hdf5 file */
+ char non_hdf5_sb_filename[FILENAME_LEN]; /* Name of non-hdf5 superblock file */
+#if 0
+ ssize_t nbytes; /* Number of bytes written */
+ unsigned u; /* Local index variable */
+ unsigned char buf[1024]; /* Buffer of data to write */
+#endif
+ htri_t is_hdf5; /* Whether a file is an HDF5 file */
+#if 0
+ int posix_ret; /* Return value from POSIX calls */
+#endif
+ hbool_t driver_is_default_compatible;
+ herr_t ret; /* Return value from HDF5 calls */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Detection of HDF5 Files\n"));
+
+ /* Get FAPL */
+ fapl_id = h5_fileaccess();
+ CHECK(fapl_id, H5I_INVALID_HID, "H5Pcreate");
+
+ if (h5_driver_is_default_vfd_compatible(fapl_id, &driver_is_default_compatible) < 0) {
+ TestErrPrintf("Can't check if VFD is compatible with default VFD");
+ return;
+ }
+
+ /* Fix up filenames */
+ h5_fixname(FILE_IS_ACCESSIBLE, fapl_id, filename, sizeof(filename));
+ h5_fixname(FILE_IS_ACCESSIBLE_NON_HDF5, fapl_id, non_hdf5_filename, sizeof(non_hdf5_filename));
+ h5_fixname_superblock(FILE_IS_ACCESSIBLE_NON_HDF5, fapl_id, non_hdf5_sb_filename,
+ sizeof(non_hdf5_sb_filename));
+
+ /****************/
+ /* Normal usage */
+ /****************/
+
+ /* Create a file */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id);
+ CHECK(fid, H5I_INVALID_HID, "H5Fcreate");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Verify that the file is an HDF5 file */
+ is_hdf5 = H5Fis_accessible(filename, fapl_id);
+ VERIFY(is_hdf5, TRUE, "H5Fis_accessible");
+
+ /*****************************************/
+ /* Newly created file that is still open */
+ /*****************************************/
+
+ /* On Windows, file locking is mandatory so this check ensures that
+ * H5Fis_accessible() works on files that have an exclusive lock.
+ * Previous versions of this API call created an additional file handle
+ * and attempted to read through it, which will not work when locks
+ * are enforced by the OS.
+ */
+
+ /* Create a file and hold it open */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id);
+ CHECK(fid, H5I_INVALID_HID, "H5Fcreate");
+
+ /* Verify that the file is an HDF5 file */
+ is_hdf5 = H5Fis_accessible(filename, fapl_id);
+ VERIFY(is_hdf5, TRUE, "H5Fis_accessible");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /*******************************/
+ /* Non-default user block size */
+ /*******************************/
+
+ /* This test is not currently working for the family VFD.
+ * There are failures when creating files with userblocks.
+ */
+ if (0 != HDstrcmp(env_h5_drvr, "family")) {
+ /* Create a file creation property list with a non-default user block size */
+ fcpl_id = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl_id, H5I_INVALID_HID, "H5Pcreate");
+
+ ret = H5Pset_userblock(fcpl_id, (hsize_t)2048);
+ CHECK(ret, FAIL, "H5Pset_userblock");
+
+ /* Create file with non-default user block */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, fcpl_id, fapl_id);
+ CHECK(fid, H5I_INVALID_HID, "H5Fcreate");
+
+ /* Release file-creation property list */
+ ret = H5Pclose(fcpl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Verify that the file is an HDF5 file */
+ is_hdf5 = H5Fis_accessible(filename, fapl_id);
+ VERIFY(is_hdf5, TRUE, "H5Fis_accessible");
+ } /* end if */
+#if 0
+ if (driver_is_default_compatible) {
+ /***********************/
+ /* EMPTY non-HDF5 file */
+ /***********************/
+
+ /* Create non-HDF5 file and check it */
+ fd = HDopen(non_hdf5_sb_filename, O_RDWR | O_CREAT | O_TRUNC, H5_POSIX_CREATE_MODE_RW);
+ CHECK(fd, (-1), "HDopen");
+
+ /* Close the file */
+ posix_ret = HDclose(fd);
+ CHECK(posix_ret, (-1), "HDclose");
+
+ /* Verify that the file is NOT an HDF5 file using the base filename */
+ is_hdf5 = H5Fis_accessible(non_hdf5_filename, fapl_id);
+ VERIFY(is_hdf5, FALSE, "H5Fis_accessible (empty non-HDF5 file)");
+
+ /***************************/
+ /* Non-empty non-HDF5 file */
+ /***************************/
+
+ /* Create non-HDF5 file and check it */
+ fd = HDopen(non_hdf5_sb_filename, O_RDWR | O_CREAT | O_TRUNC, H5_POSIX_CREATE_MODE_RW);
+ CHECK(fd, (-1), "HDopen");
+
+ /* Initialize information to write */
+ for (u = 0; u < 1024; u++)
+ buf[u] = (unsigned char)u;
+
+ /* Write some information */
+ nbytes = HDwrite(fd, buf, (size_t)1024);
+ VERIFY(nbytes, 1024, "HDwrite");
+
+ /* Close the file */
+ posix_ret = HDclose(fd);
+ CHECK(posix_ret, (-1), "HDclose");
+
+ /* Verify that the file is not an HDF5 file */
+ is_hdf5 = H5Fis_accessible(non_hdf5_filename, fapl_id);
+ VERIFY(is_hdf5, FALSE, "H5Fis_accessible (non-HDF5 file)");
+ }
+
+ /* Clean up files */
+ h5_delete_test_file(filename, fapl_id);
+ h5_delete_test_file(non_hdf5_filename, fapl_id);
+#endif
+ H5Fdelete(filename, fapl_id);
+
+ /* Close property list */
+ ret = H5Pclose(fapl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+
+} /* end test_file_is_accessible() */
+
+/****************************************************************
+**
+** test_file_ishdf5(): low-level file test routine.
+** This test checks whether the H5Fis_hdf5() routine is working
+** correctly in various situations.
+**
+*****************************************************************/
+#if 0
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+static void
+test_file_ishdf5(const char *env_h5_drvr)
+{
+ hid_t fid = H5I_INVALID_HID; /* File opened with read-write permission */
+ hid_t fcpl_id = H5I_INVALID_HID; /* File creation property list */
+ hid_t fapl_id = H5I_INVALID_HID; /* File access property list */
+ int fd; /* POSIX file descriptor */
+ char filename[FILENAME_LEN]; /* Filename to use */
+ char sb_filename[FILENAME_LEN]; /* Name of file w/ superblock */
+ ssize_t nbytes; /* Number of bytes written */
+ unsigned u; /* Local index variable */
+ unsigned char buf[1024]; /* Buffer of data to write */
+ htri_t is_hdf5; /* Whether a file is an HDF5 file */
+ int posix_ret; /* Return value from POSIX calls */
+ herr_t ret; /* Return value from HDF5 calls */
+
+ if (!h5_using_default_driver(env_h5_drvr))
+ return;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Detection of HDF5 Files (using deprecated H5Fis_hdf5() call)\n"));
+
+ /* Get FAPL */
+ fapl_id = h5_fileaccess();
+ CHECK(fapl_id, H5I_INVALID_HID, "H5Pcreate");
+
+ /* Fix up filenames
+ * For VFDs that create multiple files, we also need the name
+ * of the file with the superblock. With single-file VFDs, this
+ * will be equal to the one from h5_fixname().
+ */
+ h5_fixname(FILE_IS_ACCESSIBLE, fapl_id, filename, sizeof(filename));
+ h5_fixname_superblock(FILE_IS_ACCESSIBLE, fapl_id, sb_filename, sizeof(filename));
+
+ /****************/
+ /* Normal usage */
+ /****************/
+
+ /* Create a file */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id);
+ CHECK(fid, H5I_INVALID_HID, "H5Fcreate");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Verify that the file is an HDF5 file */
+ is_hdf5 = H5Fis_hdf5(sb_filename);
+ VERIFY(is_hdf5, TRUE, "H5Fis_hdf5");
+
+ /*******************************/
+ /* Non-default user block size */
+ /*******************************/
+
+ /* Create a file creation property list with a non-default user block size */
+ fcpl_id = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl_id, H5I_INVALID_HID, "H5Pcreate");
+
+ ret = H5Pset_userblock(fcpl_id, (hsize_t)2048);
+ CHECK(ret, FAIL, "H5Pset_userblock");
+
+ /* Create file with non-default user block */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, fcpl_id, fapl_id);
+ CHECK(fid, H5I_INVALID_HID, "H5Fcreate");
+
+ /* Release file creation property list */
+ ret = H5Pclose(fcpl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Verify that the file is an HDF5 file */
+ is_hdf5 = H5Fis_hdf5(sb_filename);
+ VERIFY(is_hdf5, TRUE, "H5Fis_hdf5");
+
+ /***************************/
+ /* Non-empty non-HDF5 file */
+ /***************************/
+
+ /* Create non-HDF5 file. Use the calculated superblock
+ * filename to avoid the format strings that will make
+ * open(2) sad.
+ */
+ fd = HDopen(sb_filename, O_RDWR | O_CREAT | O_TRUNC, H5_POSIX_CREATE_MODE_RW);
+ CHECK(fd, (-1), "HDopen");
+
+ /* Initialize information to write */
+ for (u = 0; u < 1024; u++)
+ buf[u] = (unsigned char)u;
+
+ /* Write some information */
+ nbytes = HDwrite(fd, buf, (size_t)1024);
+ VERIFY(nbytes, 1024, "HDwrite");
+
+ /* Close the file */
+ posix_ret = HDclose(fd);
+ CHECK(posix_ret, (-1), "HDclose");
+
+ /* Verify that the file is not an HDF5 file */
+ is_hdf5 = H5Fis_hdf5(sb_filename);
+ VERIFY(is_hdf5, FALSE, "H5Fis_hdf5");
+
+ /* Clean up files */
+#if 0
+ h5_delete_test_file(filename, fapl_id);
+#endif
+ H5Fdelete(filename, fapl_id);
+
+ /* Close property list */
+ ret = H5Pclose(fapl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+
+} /* end test_file_ishdf5() */
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
+#endif
+
+/****************************************************************
+**
+** test_file_delete(): tests H5Fdelete for all VFDs
+**
+*****************************************************************/
+#define FILE_DELETE "test_file_delete.h5"
+#define FILE_DELETE_NOT_HDF5 "test_file_delete_not_hdf5"
+static void
+test_file_delete(hid_t fapl_id)
+{
+ hid_t fid = H5I_INVALID_HID; /* File to be deleted */
+ char filename[FILENAME_LEN]; /* Filename to use */
+ htri_t is_hdf5; /* Whether a file is an HDF5 file */
+#if 0
+ int fd; /* POSIX file descriptor */
+ int iret;
+#endif
+ herr_t ret;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Deletion of HDF5 Files\n"));
+
+ /*************/
+ /* HDF5 FILE */
+ /*************/
+
+ /* Get fapl-dependent filename */
+ h5_fixname(FILE_DELETE, fapl_id, filename, sizeof(filename));
+
+ /* Create a file */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id);
+ CHECK(fid, H5I_INVALID_HID, "H5Fcreate");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ VERIFY(ret, SUCCEED, "H5Fclose");
+
+ /* Verify that the file is an HDF5 file */
+ is_hdf5 = H5Fis_accessible(filename, fapl_id);
+ VERIFY(is_hdf5, TRUE, "H5Fis_accessible");
+
+ /* Delete the file */
+ ret = H5Fdelete(filename, fapl_id);
+ VERIFY(ret, SUCCEED, "H5Fdelete");
+
+ /* Verify that the file is NO LONGER an HDF5 file */
+ /* This should fail since there is no file */
+ H5E_BEGIN_TRY
+ {
+ is_hdf5 = H5Fis_accessible(filename, fapl_id);
+ }
+ H5E_END_TRY;
+ VERIFY(is_hdf5, FAIL, "H5Fis_accessible");
+
+#if 0
+ /* Just in case deletion fails - silent on errors */
+ h5_delete_test_file(FILE_DELETE, fapl_id);
+
+ /*****************/
+ /* NON-HDF5 FILE */
+ /*****************/
+
+ /* Get fapl-dependent filename */
+ h5_fixname(FILE_DELETE_NOT_HDF5, fapl_id, filename, sizeof(filename));
+
+ /* Create a non-HDF5 file */
+ fd = HDopen(filename, O_RDWR | O_CREAT | O_TRUNC, H5_POSIX_CREATE_MODE_RW);
+ CHECK_I(fd, "HDopen");
+
+ /* Close the file */
+ ret = HDclose(fd);
+ VERIFY(ret, 0, "HDclose");
+
+ /* Verify that the file is not an HDF5 file */
+ /* Note that you can get a FAIL result when h5_fixname()
+ * perturbs the filename as a file with that exact name
+ * may not have been created since we created it with
+ * open(2) and not the library.
+ */
+ H5E_BEGIN_TRY
+ {
+ is_hdf5 = H5Fis_accessible(filename, fapl_id);
+ }
+ H5E_END_TRY;
+ CHECK(is_hdf5, TRUE, "H5Fis_accessible");
+
+ /* Try to delete it (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Fdelete(filename, fapl_id);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Fdelete");
+
+ /* Delete the file */
+ iret = H5Fdelete(filename, H5P_DEFAULT);
+ VERIFY(iret, 0, "H5Fdelete");
+#endif
+} /* end test_file_delete() */
+
+/****************************************************************
+**
+** test_file_open_dot(): low-level file test routine.
+** This test checks whether opening objects with "." for a name
+** works correctly in various situations.
+**
+*****************************************************************/
+static void
+test_file_open_dot(void)
+{
+ hid_t fid; /* File ID */
+ hid_t gid, gid2; /* Group IDs */
+ hid_t did; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t tid, tid2; /* Datatype IDs */
+ herr_t ret;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing opening objects with \".\" for a name\n"));
+
+ /* Create a new HDF5 file to work with */
+ fid = H5Fcreate(FILE1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create a group in the HDF5 file */
+ gid = H5Gcreate2(fid, GRP_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gcreate2");
+
+ /* Create a dataspace for creating datasets */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create a dataset with no name using the file ID */
+ H5E_BEGIN_TRY
+ {
+ did = H5Dcreate2(fid, ".", H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(did, FAIL, "H5Dcreate2");
+
+ /* Create a dataset with no name using the group ID */
+ H5E_BEGIN_TRY
+ {
+ did = H5Dcreate2(gid, ".", H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(did, FAIL, "H5Dcreate2");
+
+ /* Open a dataset with no name using the file ID */
+ H5E_BEGIN_TRY
+ {
+ did = H5Dopen2(fid, ".", H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(did, FAIL, "H5Dopen2");
+
+ /* Open a dataset with no name using the group ID */
+ H5E_BEGIN_TRY
+ {
+ did = H5Dopen2(gid, ".", H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(did, FAIL, "H5Dopen2");
+
+ /* Make a copy of a datatype to use for creating a named datatype */
+ tid = H5Tcopy(H5T_NATIVE_INT);
+ CHECK(tid, FAIL, "H5Tcopy");
+
+ /* Create a named datatype with no name using the file ID */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Tcommit2(fid, ".", tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Tcommit2");
+
+ /* Create a named datatype with no name using the group ID */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Tcommit2(gid, ".", tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Tcommit2");
+
+ /* Open a named datatype with no name using the file ID */
+ H5E_BEGIN_TRY
+ {
+ tid2 = H5Topen2(fid, ".", H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(tid2, FAIL, "H5Topen2");
+
+ /* Open a named datatype with no name using the group ID */
+ H5E_BEGIN_TRY
+ {
+ tid2 = H5Topen2(gid, ".", H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(tid2, FAIL, "H5Topen2");
+
+ /* Create a group with no name using the file ID */
+ H5E_BEGIN_TRY
+ {
+ gid2 = H5Gcreate2(fid, ".", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(gid2, FAIL, "H5Gcreate2");
+
+ /* Create a group with no name using the group ID */
+ H5E_BEGIN_TRY
+ {
+ gid2 = H5Gcreate2(gid, ".", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(gid2, FAIL, "H5Gcreate2");
+
+ /* Open a group with no name using the file ID (should open the root group) */
+ gid2 = H5Gopen2(fid, ".", H5P_DEFAULT);
+ CHECK(gid2, FAIL, "H5Gopen2");
+
+ ret = H5Gclose(gid2);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Open a group with no name using the group ID (should open the group again) */
+ gid2 = H5Gopen2(gid, ".", H5P_DEFAULT);
+ CHECK(gid2, FAIL, "H5Gopen2");
+
+ ret = H5Gclose(gid2);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close everything */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* end test_file_open_dot() */
+
+/****************************************************************
+**
+** test_file_open_overlap(): low-level file test routine.
+** This test checks whether opening files in an overlapping way
+** (as opposed to a nested manner) works correctly.
+**
+*****************************************************************/
+static void
+test_file_open_overlap(void)
+{
+ hid_t fid1, fid2;
+ hid_t did1, did2;
+ hid_t gid;
+ hid_t sid;
+ ssize_t nobjs; /* # of open objects */
+ unsigned intent;
+#if 0
+ unsigned long fileno1, fileno2; /* File number */
+#endif
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing opening overlapping file opens\n"));
+
+ /* Create file */
+ fid1 = H5Fcreate(FILE1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Open file also */
+ fid2 = H5Fopen(FILE1, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid2, FAIL, "H5Fopen");
+
+ /* Check the intent */
+ ret = H5Fget_intent(fid1, &intent);
+ CHECK(ret, FAIL, "H5Fget_intent");
+ VERIFY(intent, H5F_ACC_RDWR, "H5Fget_intent");
+#if 0
+ /* Check the file numbers */
+ fileno1 = 0;
+ ret = H5Fget_fileno(fid1, &fileno1);
+ CHECK(ret, FAIL, "H5Fget_fileno");
+ fileno2 = 0;
+ ret = H5Fget_fileno(fid2, &fileno2);
+ CHECK(ret, FAIL, "H5Fget_fileno");
+ VERIFY(fileno1, fileno2, "H5Fget_fileno");
+
+ /* Check that a file number pointer of NULL is ignored */
+ ret = H5Fget_fileno(fid1, NULL);
+ CHECK(ret, FAIL, "H5Fget_fileno");
+#endif
+
+ /* Create a group in file */
+ gid = H5Gcreate2(fid1, GROUP1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gcreate2");
+
+ /* Create dataspace for dataset */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create dataset in group w/first file ID */
+ did1 = H5Dcreate2(gid, DSET1, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(did1, FAIL, "H5Dcreate2");
+#ifndef WRONG_DATATYPE_OBJ_COUNT
+ /* Check number of objects opened in first file */
+ nobjs = H5Fget_obj_count(fid1, H5F_OBJ_LOCAL | H5F_OBJ_ALL);
+ VERIFY(nobjs, 3, "H5Fget_obj_count"); /* 3 == file, dataset & group */
+#endif
+ /* Close dataset */
+ ret = H5Dclose(did1);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close group */
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close first file ID */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Create dataset with second file ID */
+ did2 = H5Dcreate2(fid2, DSET2, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(did2, FAIL, "H5Dcreate2");
+
+ /* Check number of objects opened in first file */
+ nobjs = H5Fget_obj_count(fid2, H5F_OBJ_ALL);
+ VERIFY(nobjs, 2, "H5Fget_obj_count"); /* 3 == file & dataset */
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close second dataset */
+ ret = H5Dclose(did2);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close second file */
+ ret = H5Fclose(fid2);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_file_open_overlap() */
+
+/****************************************************************
+**
+** test_file_getname(): low-level file test routine.
+** This test checks whether H5Fget_name works correctly.
+**
+*****************************************************************/
+static void
+test_file_getname(void)
+{
+ /* Compound datatype */
+ typedef struct s1_t {
+ unsigned int a;
+ float b;
+ } s1_t;
+
+ hid_t file_id;
+ hid_t group_id;
+ hid_t dataset_id;
+ hid_t space_id;
+ hid_t type_id;
+ hid_t attr_id;
+ hsize_t dims[TESTA_RANK] = {TESTA_NX, TESTA_NY};
+ char name[TESTA_NAME_BUF_SIZE];
+ ssize_t name_len;
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing H5Fget_name() functionality\n"));
+
+ /* Create a new file_id using default properties. */
+ file_id = H5Fcreate(FILE1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file_id, FAIL, "H5Fcreate");
+
+ /* Get and verify file name */
+ name_len = H5Fget_name(file_id, name, (size_t)TESTA_NAME_BUF_SIZE);
+ CHECK(name_len, FAIL, "H5Fget_name");
+ VERIFY_STR(name, FILE1, "H5Fget_name");
+
+ /* Create a group in the root group */
+ group_id = H5Gcreate2(file_id, TESTA_GROUPNAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(group_id, FAIL, "H5Gcreate2");
+
+ /* Get and verify file name */
+ name_len = H5Fget_name(group_id, name, (size_t)TESTA_NAME_BUF_SIZE);
+ CHECK(name_len, FAIL, "H5Fget_name");
+ VERIFY_STR(name, FILE1, "H5Fget_name");
+
+ /* Create the data space */
+ space_id = H5Screate_simple(TESTA_RANK, dims, NULL);
+ CHECK(space_id, FAIL, "H5Screate_simple");
+
+ /* Try get file name from data space. Supposed to fail because
+ * it's illegal operation. */
+ H5E_BEGIN_TRY
+ {
+ name_len = H5Fget_name(space_id, name, (size_t)TESTA_NAME_BUF_SIZE);
+ }
+ H5E_END_TRY;
+ VERIFY(name_len, FAIL, "H5Fget_name");
+
+ /* Create a new dataset */
+ dataset_id =
+ H5Dcreate2(file_id, TESTA_DSETNAME, H5T_NATIVE_INT, space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset_id, FAIL, "H5Dcreate2");
+
+ /* Get and verify file name */
+ name_len = H5Fget_name(dataset_id, name, (size_t)TESTA_NAME_BUF_SIZE);
+ CHECK(name_len, FAIL, "H5Fget_name");
+ VERIFY_STR(name, FILE1, "H5Fget_name");
+
+ /* Create an attribute for the dataset */
+ attr_id = H5Acreate2(dataset_id, TESTA_ATTRNAME, H5T_NATIVE_INT, space_id, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr_id, FAIL, "H5Acreate2");
+
+ /* Get and verify file name */
+ name_len = H5Fget_name(attr_id, name, (size_t)TESTA_NAME_BUF_SIZE);
+ CHECK(name_len, FAIL, "H5Fget_name");
+ VERIFY_STR(name, FILE1, "H5Fget_name");
+
+ /* Create a compound datatype */
+ type_id = H5Tcreate(H5T_COMPOUND, sizeof(s1_t));
+ CHECK(type_id, FAIL, "H5Tcreate");
+
+ /* Insert fields */
+ ret = H5Tinsert(type_id, "a", HOFFSET(s1_t, a), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(type_id, "b", HOFFSET(s1_t, b), H5T_NATIVE_FLOAT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Save it on file */
+ ret = H5Tcommit2(file_id, TESTA_DTYPENAME, type_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+
+ /* Get and verify file name */
+ name_len = H5Fget_name(type_id, name, (size_t)TESTA_NAME_BUF_SIZE);
+ CHECK(name_len, FAIL, "H5Fget_name");
+ VERIFY_STR(name, FILE1, "H5Fget_name");
+
+ /* Close things down */
+ ret = H5Tclose(type_id);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ ret = H5Aclose(attr_id);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ ret = H5Dclose(dataset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Sclose(space_id);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Gclose(group_id);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Fclose(file_id);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* end test_file_getname() */
+
+/****************************************************************
+**
+** test_file_double_root_open(): low-level file test routine.
+** This test checks whether opening the root group from two
+** different files works correctly.
+**
+*****************************************************************/
+static void
+test_file_double_root_open(void)
+{
+ hid_t file1_id, file2_id;
+ hid_t grp1_id, grp2_id;
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing double root group open\n"));
+
+ file1_id = H5Fcreate(FILE1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file1_id, FAIL, "H5Fcreate");
+ file2_id = H5Fopen(FILE1, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(file2_id, FAIL, "H5Fopen");
+
+ grp1_id = H5Gopen2(file1_id, "/", H5P_DEFAULT);
+ CHECK(grp1_id, FAIL, "H5Gopen2");
+ grp2_id = H5Gopen2(file2_id, "/", H5P_DEFAULT);
+ CHECK(grp2_id, FAIL, "H5Gopen2");
+
+ /* Note "asymmetric" close order */
+ ret = H5Gclose(grp1_id);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Gclose(grp2_id);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Fclose(file1_id);
+ CHECK(ret, FAIL, "H5Fclose");
+ ret = H5Fclose(file2_id);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_file_double_root_open() */
+
+/****************************************************************
+**
+** test_file_double_group_open(): low-level file test routine.
+** This test checks whether opening the same group from two
+** different files works correctly.
+**
+*****************************************************************/
+static void
+test_file_double_group_open(void)
+{
+ hid_t file1_id, file2_id;
+ hid_t grp1_id, grp2_id;
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing double non-root group open\n"));
+
+ file1_id = H5Fcreate(FILE1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file1_id, FAIL, "H5Fcreate");
+ file2_id = H5Fopen(FILE1, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(file2_id, FAIL, "H5Fopen");
+
+ grp1_id = H5Gcreate2(file1_id, GRP_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(grp1_id, FAIL, "H5Gcreate2");
+ grp2_id = H5Gopen2(file2_id, GRP_NAME, H5P_DEFAULT);
+ CHECK(grp2_id, FAIL, "H5Gopen2");
+
+ /* Note "asymmetric" close order */
+ ret = H5Gclose(grp1_id);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Gclose(grp2_id);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Fclose(file1_id);
+ CHECK(ret, FAIL, "H5Fclose");
+ ret = H5Fclose(file2_id);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_file_double_group_open() */
+
+/****************************************************************
+**
+** test_file_double_dataset_open(): low-level file test routine.
+** This test checks whether opening the same dataset from two
+** different files works correctly.
+**
+*****************************************************************/
+static void
+test_file_double_dataset_open(void)
+{
+ hid_t file1_id, file2_id;
+ hid_t dset1_id, dset2_id;
+ hid_t space_id;
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing double dataset open\n"));
+
+ file1_id = H5Fcreate(FILE1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file1_id, FAIL, "H5Fcreate");
+ file2_id = H5Fopen(FILE1, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(file2_id, FAIL, "H5Fopen");
+
+ /* Create dataspace for dataset */
+ space_id = H5Screate(H5S_SCALAR);
+ CHECK(space_id, FAIL, "H5Screate");
+
+ dset1_id =
+ H5Dcreate2(file1_id, DSET_NAME, H5T_NATIVE_INT, space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset1_id, FAIL, "H5Dcreate2");
+ dset2_id = H5Dopen2(file2_id, DSET_NAME, H5P_DEFAULT);
+ CHECK(dset2_id, FAIL, "H5Dopen2");
+
+ /* Close "supporting" dataspace */
+ ret = H5Sclose(space_id);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Note "asymmetric" close order */
+ ret = H5Dclose(dset1_id);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset2_id);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Fclose(file1_id);
+ CHECK(ret, FAIL, "H5Fclose");
+ ret = H5Fclose(file2_id);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_file_double_dataset_open() */
+
+/****************************************************************
+**
+** test_file_double_file_dataset_open():
+** This test checks multi-opens of files & datasets:
+** It simulates the multi-thread test program from DLS
+** which exposes the file pointer segmentation fault failure.
+** NOTE: The order on when the files and datasets are open/close
+** is important.
+**
+*****************************************************************/
+static void
+test_file_double_file_dataset_open(hbool_t new_format)
+{
+ hid_t fapl = -1; /* File access property list */
+ hid_t dcpl = -1; /* Dataset creation property list */
+ hid_t fid1 = -1, fid2 = -1; /* File IDs */
+ hid_t did1 = -1, did2 = -1; /* Dataset IDs */
+ hid_t sid1 = -1, sid2 = -1; /* Dataspace IDs */
+ hid_t tid1 = -1, tid2 = -1; /* Datatype IDs */
+ hsize_t dims[1] = {5}, dims2[2] = {1, 4}; /* Dimension sizes */
+ hsize_t e_ext_dims[1] = {7}; /* Expanded dimension sizes */
+ hsize_t s_ext_dims[1] = {3}; /* Shrunk dimension sizes */
+ hsize_t max_dims0[1] = {8}; /* Maximum dimension sizes */
+ hsize_t max_dims1[1] = {H5S_UNLIMITED}; /* Maximum dimesion sizes for extensible array index */
+ hsize_t max_dims2[2] = {H5S_UNLIMITED, H5S_UNLIMITED}; /* Maximum dimension sizes for v2 B-tree index */
+ hsize_t chunks[1] = {2}, chunks2[2] = {4, 5}; /* Chunk dimension sizes */
+#if 0
+ hsize_t size; /* File size */
+#endif
+ char filename[FILENAME_LEN]; /* Filename to use */
+ const char *data[] = {"String 1", "String 2", "String 3", "String 4", "String 5"}; /* Input Data */
+ const char *e_data[] = {"String 1", "String 2", "String 3", "String 4",
+ "String 5", "String 6", "String 7"}; /* Input Data */
+ char *buffer[5]; /* Output buffer */
+ int wbuf[4] = {1, 2, 3, 4}; /* Input data */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing double file and dataset open/close\n"));
+
+ /* Setting up test file */
+ fapl = h5_fileaccess();
+ CHECK(fapl, FAIL, "H5Pcreate");
+ if (new_format) {
+ ret = H5Pset_libver_bounds(fapl, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST);
+ CHECK(ret, FAIL, "H5Pset_libver_bounds");
+ } /* end if */
+ h5_fixname(FILE1, fapl, filename, sizeof filename);
+
+ /* Create the test file */
+ fid1 = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create a chunked dataset with fixed array indexing */
+ sid1 = H5Screate_simple(1, dims, max_dims0);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+ tid1 = H5Tcopy(H5T_C_S1);
+ CHECK(tid1, FAIL, "H5Tcopy");
+ ret = H5Tset_size(tid1, H5T_VARIABLE);
+ CHECK(ret, FAIL, "H5Tset_size");
+
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ ret = H5Pset_chunk(dcpl, 1, chunks);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+
+ did1 = H5Dcreate2(fid1, "dset_fa", tid1, sid1, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(did1, FAIL, "H5Dcreate2");
+
+ /* Closing */
+ ret = H5Dclose(did1);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Create a chunked dataset with extensible array indexing */
+ sid1 = H5Screate_simple(1, dims, max_dims1);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+ tid1 = H5Tcopy(H5T_C_S1);
+ CHECK(tid1, FAIL, "H5Tcopy");
+ ret = H5Tset_size(tid1, H5T_VARIABLE);
+ CHECK(ret, FAIL, "H5Tset_size");
+
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ ret = H5Pset_chunk(dcpl, 1, chunks);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+
+ did1 = H5Dcreate2(fid1, "dset_ea", tid1, sid1, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(did1, FAIL, "H5Dcreate2");
+
+ /* Write to the dataset */
+ ret = H5Dwrite(did1, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, data);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Closing */
+ /* (Leave sid1 open for later use) */
+ ret = H5Dclose(did1);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Create a chunked dataset with v2 btree indexing */
+ sid2 = H5Screate_simple(2, dims2, max_dims2);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ ret = H5Pset_chunk(dcpl, 2, chunks2);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+
+ did2 = H5Dcreate2(fid1, "dset_bt2", H5T_NATIVE_INT, sid2, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(did2, FAIL, "H5Dcreate2");
+
+ /* Write to the dataset */
+ ret = H5Dwrite(did2, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Closing */
+ ret = H5Dclose(did2);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /*
+ * Scenario 1
+ */
+
+ /* First file open */
+ fid1 = H5Fopen(filename, H5F_ACC_RDWR, fapl);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* First file's dataset open */
+ did1 = H5Dopen2(fid1, "/dset_fa", H5P_DEFAULT);
+ CHECK(did1, FAIL, "H5Dopen2");
+
+ tid1 = H5Tcopy(did1);
+ CHECK(tid1, FAIL, "H5Tcopy");
+
+ /* First file's dataset write */
+ ret = H5Dwrite(did1, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, data);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Second file open */
+ fid2 = H5Fopen(filename, H5F_ACC_RDWR, fapl);
+ CHECK(fid2, FAIL, "H5Fopen");
+
+ /* Second file's dataset open */
+ did2 = H5Dopen2(fid2, "/dset_fa", H5P_DEFAULT);
+ CHECK(did2, FAIL, "H5Dopen2");
+
+ tid2 = H5Tcopy(did2);
+ CHECK(tid2, FAIL, "H5Tcopy");
+
+ /* First file's dataset close */
+ ret = H5Dclose(did1);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* First file close */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Second file's dataset write */
+ ret = H5Dwrite(did2, tid2, H5S_ALL, H5S_ALL, H5P_DEFAULT, data);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Second file's dataset close */
+ ret = H5Dclose(did2);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Second file close */
+ ret = H5Fclose(fid2);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Closing */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /*
+ * Scenario 2
+ */
+
+ /* First file open */
+ fid1 = H5Fopen(filename, H5F_ACC_RDONLY, fapl);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Second file open */
+ fid2 = H5Fopen(filename, H5F_ACC_RDONLY, fapl);
+ CHECK(fid2, FAIL, "H5Fopen");
+
+ /* Second file's dataset open */
+ did2 = H5Dopen2(fid2, "/dset_ea", H5P_DEFAULT);
+ CHECK(did2, FAIL, "H5Dopen2");
+
+ tid2 = H5Tcopy(did2);
+ CHECK(tid2, FAIL, "H5Tcopy");
+
+ /* First file's dataset open */
+ did1 = H5Dopen2(fid1, "/dset_ea", H5P_DEFAULT);
+ CHECK(did1, FAIL, "H5Dopen2");
+
+ tid1 = H5Tcopy(did1);
+ CHECK(tid1, FAIL, "H5Tcopy");
+
+ /* Second file's dataset read */
+ HDmemset(buffer, 0, sizeof(char *) * 5);
+ ret = H5Dread(did2, tid2, H5S_ALL, H5S_ALL, H5P_DEFAULT, buffer);
+ CHECK(ret, FAIL, "H5Dread");
+ ret = H5Treclaim(tid2, sid1, H5P_DEFAULT, buffer);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Second file's dataset close */
+ ret = H5Dclose(did2);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Second file close */
+ ret = H5Fclose(fid2);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* First file's dataset read */
+ HDmemset(buffer, 0, sizeof(char *) * 5);
+ ret = H5Dread(did1, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, buffer);
+ CHECK(ret, FAIL, "H5Dread");
+ ret = H5Treclaim(tid2, sid1, H5P_DEFAULT, buffer);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* First file's dataset close */
+ ret = H5Dclose(did1);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* First file close */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Closing */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /*
+ * Scenario 3
+ */
+
+ /* First file open */
+ fid1 = H5Fopen(filename, H5F_ACC_RDONLY, fapl);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* First file's dataset open */
+ did1 = H5Dopen2(fid1, "/dset_bt2", H5P_DEFAULT);
+ CHECK(did1, FAIL, "H5Dopen2");
+#if 0
+ /* First file's get storage size */
+ size = H5Dget_storage_size(did1);
+ CHECK(size, 0, "H5Dget_storage_size");
+#endif
+ /* Second file open */
+ fid2 = H5Fopen(filename, H5F_ACC_RDONLY, fapl);
+ CHECK(fid2, FAIL, "H5Fopen");
+
+ /* Second file's dataset open */
+ did2 = H5Dopen2(fid2, "/dset_bt2", H5P_DEFAULT);
+ CHECK(did2, FAIL, "H5Dopen2");
+
+ /* First file's dataset close */
+ ret = H5Dclose(did1);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* First file close */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+#if 0
+ /* Second file's get storage size */
+ size = H5Dget_storage_size(did2);
+ CHECK(size, 0, "H5Dget_storage_size");
+#endif
+ /* Second file's dataset close */
+ ret = H5Dclose(did2);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Second file close */
+ ret = H5Fclose(fid2);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /*
+ * Scenario 4
+ * --trigger H5AC_protect: Assertion `f->shared' failed
+ * from second call to
+ * H5Dset_extent->...H5D__earray_idx_remove->H5EA_get...H5EA__iblock_protect...H5AC_protect
+ */
+ /* First file open */
+ fid1 = H5Fopen(filename, H5F_ACC_RDWR, fapl);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* First file's dataset open */
+ did1 = H5Dopen2(fid1, "/dset_ea", H5P_DEFAULT);
+ CHECK(did1, FAIL, "H5Dopen2");
+
+ tid1 = H5Tcopy(did1);
+ CHECK(tid1, FAIL, "H5Tcopy");
+
+ /* Extend the dataset */
+ ret = H5Dset_extent(did1, e_ext_dims);
+ CHECK(ret, FAIL, "H5Dset_extent");
+
+ /* Write to the dataset */
+ ret = H5Dwrite(did1, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, e_data);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Second file open */
+ fid2 = H5Fopen(filename, H5F_ACC_RDWR, fapl);
+ CHECK(fid2, FAIL, "H5Fopen");
+
+ /* Second file's dataset open */
+ did2 = H5Dopen2(fid2, "/dset_ea", H5P_DEFAULT);
+ CHECK(did2, FAIL, "H5Dopen2");
+
+ /* First file's dataset close */
+ ret = H5Dclose(did1);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* First file close */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Shrink the dataset */
+ ret = H5Dset_extent(did2, s_ext_dims);
+ CHECK(ret, FAIL, "H5Dset_extent");
+
+ /* Second file's dataset close */
+ ret = H5Dclose(did2);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Second file close */
+ ret = H5Fclose(fid2);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Close the data type */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close FAPL */
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+} /* end test_file_double_dataset_open() */
+
+/****************************************************************
+**
+** test_file_double_datatype_open(): low-level file test routine.
+** This test checks whether opening the same named datatype from two
+** different files works correctly.
+**
+*****************************************************************/
+static void
+test_file_double_datatype_open(void)
+{
+ hid_t file1_id, file2_id;
+ hid_t type1_id, type2_id;
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing double datatype open\n"));
+
+ file1_id = H5Fcreate(FILE1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file1_id, FAIL, "H5Fcreate");
+ file2_id = H5Fopen(FILE1, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(file2_id, FAIL, "H5Fopen");
+
+ type1_id = H5Tcopy(H5T_NATIVE_INT);
+ CHECK(type1_id, FAIL, "H5Tcopy");
+ ret = H5Tcommit2(file1_id, TYPE_NAME, type1_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+ type2_id = H5Topen2(file2_id, TYPE_NAME, H5P_DEFAULT);
+ CHECK(type2_id, FAIL, "H5Topen2");
+
+ /* Note "asymmetric" close order */
+ ret = H5Tclose(type1_id);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Tclose(type2_id);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ ret = H5Fclose(file1_id);
+ CHECK(ret, FAIL, "H5Fclose");
+ ret = H5Fclose(file2_id);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_file_double_dataset_open() */
+
+/****************************************************************
+**
+** test_userblock_file_size(): low-level file test routine.
+** This test checks that the presence of a userblock
+** affects the file size in the expected manner, and that
+** the filesize is not changed by reopening the file. It
+** creates two files which are identical except that one
+** contains a userblock, and verifies that their file sizes
+** differ exactly by the userblock size.
+**
+*****************************************************************/
+#if 0
+static void
+test_userblock_file_size(const char *env_h5_drvr)
+{
+ hid_t file1_id, file2_id;
+ hid_t group1_id, group2_id;
+ hid_t dset1_id, dset2_id;
+ hid_t space_id;
+ hid_t fcpl2_id;
+ hsize_t dims[2] = {3, 4};
+#if 0
+ hsize_t filesize1, filesize2, filesize;
+ unsigned long fileno1, fileno2; /* File number */
+#endif
+ herr_t ret; /* Generic return value */
+
+ /* Don't run with multi/split, family or direct drivers */
+ if (!HDstrcmp(env_h5_drvr, "multi") || !HDstrcmp(env_h5_drvr, "split") ||
+ !HDstrcmp(env_h5_drvr, "family") || !HDstrcmp(env_h5_drvr, "direct"))
+ return;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing file size with user block\n"));
+
+ /* Create property list with userblock size set */
+ fcpl2_id = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl2_id, FAIL, "H5Pcreate");
+ ret = H5Pset_userblock(fcpl2_id, USERBLOCK_SIZE);
+ CHECK(ret, FAIL, "H5Pset_userblock");
+
+ /* Create files. Only file2 with have a userblock. */
+ file1_id = H5Fcreate(FILE1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file1_id, FAIL, "H5Fcreate");
+ file2_id = H5Fcreate(FILE2, H5F_ACC_TRUNC, fcpl2_id, H5P_DEFAULT);
+ CHECK(file2_id, FAIL, "H5Fcreate");
+#if 0
+ /* Check the file numbers */
+ fileno1 = 0;
+ ret = H5Fget_fileno(file1_id, &fileno1);
+ CHECK(ret, FAIL, "H5Fget_fileno");
+ fileno2 = 0;
+ ret = H5Fget_fileno(file2_id, &fileno2);
+ CHECK(ret, FAIL, "H5Fget_fileno");
+ CHECK(fileno1, fileno2, "H5Fget_fileno");
+#endif
+ /* Create groups */
+ group1_id = H5Gcreate2(file1_id, GROUP1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(group1_id, FAIL, "H5Gcreate2");
+ group2_id = H5Gcreate2(file2_id, GROUP1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(group2_id, FAIL, "H5Gcreate2");
+
+ /* Create dataspace */
+ space_id = H5Screate_simple(2, dims, NULL);
+ CHECK(space_id, FAIL, "H5Screate_simple");
+
+ /* Create datasets */
+ dset1_id = H5Dcreate2(file1_id, DSET2, H5T_NATIVE_INT, space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset1_id, FAIL, "H5Dcreate2");
+ dset2_id = H5Dcreate2(file2_id, DSET2, H5T_NATIVE_INT, space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset2_id, FAIL, "H5Dcreate2");
+
+ /* Close IDs */
+ ret = H5Dclose(dset1_id);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(dset2_id);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Sclose(space_id);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Gclose(group1_id);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Gclose(group2_id);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Pclose(fcpl2_id);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close files */
+ ret = H5Fclose(file1_id);
+ CHECK(ret, FAIL, "H5Fclose");
+ ret = H5Fclose(file2_id);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Reopen files */
+ file1_id = H5Fopen(FILE1, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(file1_id, FAIL, "H5Fopen");
+ file2_id = H5Fopen(FILE2, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(file2_id, FAIL, "H5Fopen");
+#if 0
+ /* Check file sizes */
+ ret = H5Fget_filesize(file1_id, &filesize1);
+ CHECK(ret, FAIL, "H5Fget_filesize");
+ ret = H5Fget_filesize(file2_id, &filesize2);
+ CHECK(ret, FAIL, "H5Fget_filesize");
+
+ /* Verify that the file sizes differ exactly by the userblock size */
+ VERIFY_TYPE((unsigned long long)filesize2, (unsigned long long)(filesize1 + USERBLOCK_SIZE),
+ unsigned long long, "%llu", "H5Fget_filesize");
+#endif
+ /* Close files */
+ ret = H5Fclose(file1_id);
+ CHECK(ret, FAIL, "H5Fclose");
+ ret = H5Fclose(file2_id);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Reopen files */
+ file1_id = H5Fopen(FILE1, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(file1_id, FAIL, "H5Fopen");
+ file2_id = H5Fopen(FILE2, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(file2_id, FAIL, "H5Fopen");
+#if 0
+ /* Verify file sizes did not change */
+ ret = H5Fget_filesize(file1_id, &filesize);
+ CHECK(ret, FAIL, "H5Fget_filesize");
+ VERIFY(filesize, filesize1, "H5Fget_filesize");
+ ret = H5Fget_filesize(file2_id, &filesize);
+ CHECK(ret, FAIL, "H5Fget_filesize");
+ VERIFY(filesize, filesize2, "H5Fget_filesize");
+#endif
+ /* Close files */
+ ret = H5Fclose(file1_id);
+ CHECK(ret, FAIL, "H5Fclose");
+ ret = H5Fclose(file2_id);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_userblock_file_size() */
+#endif
+
+/****************************************************************
+**
+** test_cached_stab_info(): low-level file test routine.
+** This test checks that new files are created with cached
+** symbol table information in the superblock (when using
+** the old format). This is necessary to ensure backwards
+** compatibility with versions from 1.3.0 to 1.6.3.
+**
+*****************************************************************/
+#if 0
+static void
+test_cached_stab_info(void)
+{
+ hid_t file_id;
+ hid_t group_id;
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing cached symbol table information\n"));
+
+ /* Create file */
+ file_id = H5Fcreate(FILE1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file_id, FAIL, "H5Fcreate");
+
+ /* Create group */
+ group_id = H5Gcreate2(file_id, GROUP1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(group_id, FAIL, "H5Gcreate2");
+
+ /* Close file and group */
+ ret = H5Gclose(group_id);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Fclose(file_id);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Reopen file */
+ file_id = H5Fopen(FILE1, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(file_id, FAIL, "H5Fopen");
+#if 0
+ /* Verify the cached symbol table information */
+ ret = H5F__check_cached_stab_test(file_id);
+ CHECK(ret, FAIL, "H5F__check_cached_stab_test");
+#endif
+ /* Close file */
+ ret = H5Fclose(file_id);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_cached_stab_info() */
+#endif
+
+#if 0
+/*
+ * To calculate the checksum for a file.
+ * This is a helper routine for test_rw_noupdate().
+ */
+static int
+cal_chksum(const char *file, uint32_t *chksum)
+{
+ int curr_num_errs = nerrors; /* Retrieve the current # of errors */
+ int fdes = -1; /* File descriptor */
+ void *file_data = NULL; /* Copy of file data */
+ ssize_t bytes_read; /* # of bytes read */
+ h5_stat_t sb; /* Stat buffer for file */
+ herr_t ret; /* Generic return value */
+
+ /* Open the file */
+ fdes = HDopen(file, O_RDONLY);
+ CHECK(fdes, FAIL, "HDopen");
+
+ /* Retrieve the file's size */
+ ret = HDfstat(fdes, &sb);
+ CHECK(fdes, FAIL, "HDfstat");
+
+ /* Allocate space for the file data */
+ file_data = HDmalloc((size_t)sb.st_size);
+ CHECK_PTR(file_data, "HDmalloc");
+
+ if (file_data) {
+ /* Read file's data into memory */
+ bytes_read = HDread(fdes, file_data, (size_t)sb.st_size);
+ CHECK(bytes_read == sb.st_size, FALSE, "HDmalloc");
+
+ /* Calculate checksum */
+ *chksum = H5_checksum_lookup3(file_data, sizeof(file_data), 0);
+
+ /* Free memory */
+ HDfree(file_data);
+ }
+
+ /* Close the file */
+ ret = HDclose(fdes);
+ CHECK(ret, FAIL, "HDclose");
+
+ return ((nerrors == curr_num_errs) ? 0 : -1);
+} /* cal_chksum() */
+#endif
+
+/****************************************************************
+**
+** test_rw_noupdate(): low-level file test routine.
+** This test checks to ensure that opening and closing a file
+** with read/write permissions does not write anything to the
+** file if the file does not change.
+** Due to the implementation of file locking (status_flags in
+** the superblock is used), this test is changed to use checksum
+** instead of timestamp to verify the file is not changed.
+**
+** Programmer: Vailin Choi; July 2013
+**
+*****************************************************************/
+#if 0
+static void
+test_rw_noupdate(void)
+{
+ herr_t ret; /* Generic return value */
+ hid_t fid; /* File ID */
+ uint32_t chksum1, chksum2; /* Checksum value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing to verify that nothing is written if nothing is changed.\n"));
+
+ /* Create and Close a HDF5 File */
+ fid = H5Fcreate(FILE1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Calculate checksum for the file */
+ ret = cal_chksum(FILE1, &chksum1);
+ CHECK(ret, FAIL, "cal_chksum");
+
+ /* Open and close File With Read/Write Permission */
+ fid = H5Fopen(FILE1, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Calculate checksum for the file */
+ ret = cal_chksum(FILE1, &chksum2);
+ CHECK(ret, FAIL, "cal_chksum");
+
+ /* The two checksums are the same, i.e. the file is not changed */
+ VERIFY(chksum1, chksum2, "Checksum");
+
+} /* end test_rw_noupdate() */
+#endif
+
+/****************************************************************
+**
+** test_userblock_alignment_helper1(): helper routine for
+** test_userblock_alignment() test, to handle common testing
+**
+** Programmer: Quincey Koziol
+** Septmber 10, 2009
+**
+*****************************************************************/
+#if 0
+static int
+test_userblock_alignment_helper1(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid; /* File ID */
+ int curr_num_errs = nerrors(); /* Retrieve the current # of errors */
+ herr_t ret; /* Generic return value */
+
+ /* Create a file with FAPL & FCPL */
+ fid = H5Fcreate(FILE1, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Only proceed further if file ID is OK */
+ if (fid > 0) {
+ hid_t gid; /* Group ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t did; /* Dataset ID */
+ int val = 2; /* Dataset value */
+
+ /* Create a group */
+ gid = H5Gcreate2(fid, "group1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gcreate2");
+
+ /* Create a dataset */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+ did = H5Dcreate2(gid, "dataset", H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Write value to dataset */
+ ret = H5Dwrite(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &val);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close group */
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ } /* end if */
+
+ return ((nerrors == curr_num_errs) ? 0 : -1);
+} /* end test_userblock_alignment_helper1() */
+
+/****************************************************************
+**
+** test_userblock_alignment_helper2(): helper routine for
+** test_userblock_alignment() test, to handle common testing
+**
+** Programmer: Quincey Koziol
+** Septmber 10, 2009
+**
+*****************************************************************/
+static int
+test_userblock_alignment_helper2(hid_t fapl, hbool_t open_rw)
+{
+ hid_t fid; /* File ID */
+ int curr_num_errs = nerrors(); /* Retrieve the current # of errors */
+ herr_t ret; /* Generic return value */
+
+ /* Re-open file */
+ fid = H5Fopen(FILE1, (open_rw ? H5F_ACC_RDWR : H5F_ACC_RDONLY), fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Only proceed further if file ID is OK */
+ if (fid > 0) {
+ hid_t gid; /* Group ID */
+ hid_t did; /* Dataset ID */
+ int val = -1; /* Dataset value */
+
+ /* Open group */
+ gid = H5Gopen2(fid, "group1", H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gopen2");
+
+ /* Open dataset */
+ did = H5Dopen2(gid, "dataset", H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen2");
+
+ /* Read value from dataset */
+ ret = H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &val);
+ CHECK(ret, FAIL, "H5Dread");
+ VERIFY(val, 2, "H5Dread");
+
+ /* Close dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Only create new objects if file is open R/W */
+ if (open_rw) {
+ hid_t gid2; /* Group ID */
+
+ /* Create a new group */
+ gid2 = H5Gcreate2(gid, "group2", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gcreate2");
+
+ /* Close new group */
+ ret = H5Gclose(gid2);
+ CHECK(ret, FAIL, "H5Gclose");
+ } /* end if */
+
+ /* Close group */
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ } /* end if */
+
+ return ((nerrors == curr_num_errs) ? 0 : -1);
+} /* end test_userblock_alignment_helper2() */
+
+/****************************************************************
+**
+** test_userblock_alignment(): low-level file test routine.
+** This test checks to ensure that files with both a userblock and a
+** object [allocation] alignment size set interact properly.
+**
+** Programmer: Quincey Koziol
+** Septmber 8, 2009
+**
+*****************************************************************/
+static void
+test_userblock_alignment(const char *env_h5_drvr)
+{
+ hid_t fid; /* File ID */
+ hid_t fcpl; /* File creation property list ID */
+ hid_t fapl; /* File access property list ID */
+ herr_t ret; /* Generic return value */
+
+ /* Only run with sec2 driver */
+ if (!h5_using_default_driver(env_h5_drvr))
+ return;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing that non-zero userblocks and object alignment interact correctly.\n"));
+
+ /* Case 1:
+ * Userblock size = 0, alignment != 0
+ * Outcome:
+ * Should succeed
+ */
+ /* Create file creation property list with user block */
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, FAIL, "H5Pcreate");
+ ret = H5Pset_userblock(fcpl, (hsize_t)0);
+ CHECK(ret, FAIL, "H5Pset_userblock");
+
+ /* Create file access property list with alignment */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+ ret = H5Pset_alignment(fapl, (hsize_t)1, (hsize_t)3);
+ CHECK(ret, FAIL, "H5Pset_alignment");
+
+ /* Call helper routines to perform file manipulations */
+ ret = test_userblock_alignment_helper1(fcpl, fapl);
+ CHECK(ret, FAIL, "test_userblock_alignment_helper1");
+ ret = test_userblock_alignment_helper2(fapl, TRUE);
+ CHECK(ret, FAIL, "test_userblock_alignment_helper2");
+
+ /* Release property lists */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Case 2:
+ * Userblock size = 512, alignment = 16
+ * (userblock is integral mult. of alignment)
+ * Outcome:
+ * Should succeed
+ */
+ /* Create file creation property list with user block */
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, FAIL, "H5Pcreate");
+ ret = H5Pset_userblock(fcpl, (hsize_t)512);
+ CHECK(ret, FAIL, "H5Pset_userblock");
+
+ /* Create file access property list with alignment */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+ ret = H5Pset_alignment(fapl, (hsize_t)1, (hsize_t)16);
+ CHECK(ret, FAIL, "H5Pset_alignment");
+
+ /* Call helper routines to perform file manipulations */
+ ret = test_userblock_alignment_helper1(fcpl, fapl);
+ CHECK(ret, FAIL, "test_userblock_alignment_helper1");
+ ret = test_userblock_alignment_helper2(fapl, TRUE);
+ CHECK(ret, FAIL, "test_userblock_alignment_helper2");
+
+ /* Release property lists */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Case 3:
+ * Userblock size = 512, alignment = 512
+ * (userblock is equal to alignment)
+ * Outcome:
+ * Should succeed
+ */
+ /* Create file creation property list with user block */
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, FAIL, "H5Pcreate");
+ ret = H5Pset_userblock(fcpl, (hsize_t)512);
+ CHECK(ret, FAIL, "H5Pset_userblock");
+
+ /* Create file access property list with alignment */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+ ret = H5Pset_alignment(fapl, (hsize_t)1, (hsize_t)512);
+ CHECK(ret, FAIL, "H5Pset_alignment");
+
+ /* Call helper routines to perform file manipulations */
+ ret = test_userblock_alignment_helper1(fcpl, fapl);
+ CHECK(ret, FAIL, "test_userblock_alignment_helper1");
+ ret = test_userblock_alignment_helper2(fapl, TRUE);
+ CHECK(ret, FAIL, "test_userblock_alignment_helper2");
+
+ /* Release property lists */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Case 4:
+ * Userblock size = 512, alignment = 3
+ * (userblock & alignment each individually valid, but userblock is
+ * non-integral multiple of alignment)
+ * Outcome:
+ * Should fail at file creation
+ */
+ /* Create file creation property list with user block */
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, FAIL, "H5Pcreate");
+ ret = H5Pset_userblock(fcpl, (hsize_t)512);
+ CHECK(ret, FAIL, "H5Pset_userblock");
+
+ /* Create file access property list with alignment */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+ ret = H5Pset_alignment(fapl, (hsize_t)1, (hsize_t)3);
+ CHECK(ret, FAIL, "H5Pset_alignment");
+
+ /* Create a file with FAPL & FCPL */
+ H5E_BEGIN_TRY
+ {
+ fid = H5Fcreate(FILE1, H5F_ACC_TRUNC, fcpl, fapl);
+ }
+ H5E_END_TRY;
+ VERIFY(fid, FAIL, "H5Fcreate");
+
+ /* Release property lists */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Case 5:
+ * Userblock size = 512, alignment = 1024
+ * (userblock & alignment each individually valid, but userblock is
+ * less than alignment)
+ * Outcome:
+ * Should fail at file creation
+ */
+ /* Create file creation property list with user block */
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, FAIL, "H5Pcreate");
+ ret = H5Pset_userblock(fcpl, (hsize_t)512);
+ CHECK(ret, FAIL, "H5Pset_userblock");
+
+ /* Create file access property list with alignment */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+ ret = H5Pset_alignment(fapl, (hsize_t)1, (hsize_t)1024);
+ CHECK(ret, FAIL, "H5Pset_alignment");
+
+ /* Create a file with FAPL & FCPL */
+ H5E_BEGIN_TRY
+ {
+ fid = H5Fcreate(FILE1, H5F_ACC_TRUNC, fcpl, fapl);
+ }
+ H5E_END_TRY;
+ VERIFY(fid, FAIL, "H5Fcreate");
+
+ /* Release property lists */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Case 6:
+ * File created with:
+ * Userblock size = 512, alignment = 512
+ * File re-opened for read-only & read-write access with:
+ * Userblock size = 512, alignment = 1024
+ * Outcome:
+ * Should succeed
+ */
+ /* Create file creation property list with user block */
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, FAIL, "H5Pcreate");
+ ret = H5Pset_userblock(fcpl, (hsize_t)512);
+ CHECK(ret, FAIL, "H5Pset_userblock");
+
+ /* Create file access property list with alignment */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+ ret = H5Pset_alignment(fapl, (hsize_t)1, (hsize_t)512);
+ CHECK(ret, FAIL, "H5Pset_alignment");
+
+ /* Call helper routines to perform file manipulations */
+ ret = test_userblock_alignment_helper1(fcpl, fapl);
+ CHECK(ret, FAIL, "test_userblock_alignment_helper1");
+
+ /* Change alignment in FAPL */
+ ret = H5Pset_alignment(fapl, (hsize_t)1, (hsize_t)1024);
+ CHECK(ret, FAIL, "H5Pset_alignment");
+
+ /* Call helper routines to perform file manipulations */
+ ret = test_userblock_alignment_helper2(fapl, FALSE);
+ CHECK(ret, FAIL, "test_userblock_alignment_helper2");
+ ret = test_userblock_alignment_helper2(fapl, TRUE);
+ CHECK(ret, FAIL, "test_userblock_alignment_helper2");
+
+ /* Release property lists */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+} /* end test_userblock_alignment() */
+
+/****************************************************************
+**
+** test_userblock_alignment_paged(): low-level file test routine.
+** This test checks to ensure that files with both a userblock and
+** alignment interact properly:
+** -- alignment via H5Pset_alignment
+** -- alignment via paged aggregation
+**
+** Programmer: Vailin Choi; March 2013
+**
+*****************************************************************/
+static void
+test_userblock_alignment_paged(const char *env_h5_drvr)
+{
+ hid_t fid; /* File ID */
+ hid_t fcpl; /* File creation property list ID */
+ hid_t fapl; /* File access property list ID */
+ herr_t ret; /* Generic return value */
+
+ /* Only run with sec2 driver */
+ if (!h5_using_default_driver(env_h5_drvr))
+ return;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing interaction between userblock and alignment (via paged aggregation and "
+ "H5Pset_alignment)\n"));
+
+ /*
+ * Case 1:
+ * Userblock size = 0
+ * Alignment in use = 4096
+ * Strategy = H5F_FILE_SPACE_PAGE; fsp_size = alignment = 4096
+ * Outcome:
+ * Should succeed:
+ * userblock is 0 and alignment != 0
+ */
+ /* Create file creation property list with user block */
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, FAIL, "H5Pcreate");
+ ret = H5Pset_userblock(fcpl, (hsize_t)0);
+ CHECK(ret, FAIL, "H5Pset_userblock");
+
+ /* Create file access property list */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+
+ /* Set the "use the latest version of the format" bounds */
+ ret = H5Pset_libver_bounds(fapl, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST);
+ CHECK(ret, FAIL, "H5Pset_libver_bounds");
+
+ ret = H5Pset_file_space_strategy(fcpl, H5F_FSPACE_STRATEGY_PAGE, FALSE, (hsize_t)1);
+ CHECK(ret, FAIL, "H5Pset_file_space_strategy");
+
+ /* Call helper routines to perform file manipulations */
+ ret = test_userblock_alignment_helper1(fcpl, fapl);
+ CHECK(ret, FAIL, "test_userblock_alignment_helper1");
+ ret = test_userblock_alignment_helper2(fapl, TRUE);
+ CHECK(ret, FAIL, "test_userblock_alignment_helper2");
+
+ /* Release property lists */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /*
+ * Case 2a:
+ * Userblock size = 1024
+ * Alignment in use = 512
+ * Strategy = H5F_FILE_SPACE_PAGE; fsp_size = alignment = 512
+ * H5Pset_alignment() is 3
+ * Outcome:
+ * Should succeed:
+ * userblock (1024) is integral mult. of alignment (512)
+ */
+ /* Create file creation property list with user block */
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, FAIL, "H5Pcreate");
+ ret = H5Pset_userblock(fcpl, (hsize_t)1024);
+ CHECK(ret, FAIL, "H5Pset_userblock");
+ ret = H5Pset_file_space_strategy(fcpl, H5F_FSPACE_STRATEGY_PAGE, FALSE, (hsize_t)1);
+ ret = H5Pset_file_space_page_size(fcpl, (hsize_t)512);
+
+ /* Create file access property list */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+ ret = H5Pset_alignment(fapl, (hsize_t)1, (hsize_t)3);
+ CHECK(ret, FAIL, "H5Pset_alignment");
+
+ /* Call helper routines to perform file manipulations */
+ ret = test_userblock_alignment_helper1(fcpl, fapl);
+ CHECK(ret, FAIL, "test_userblock_alignment_helper1");
+ ret = test_userblock_alignment_helper2(fapl, TRUE);
+ CHECK(ret, FAIL, "test_userblock_alignment_helper2");
+
+ /* Release property lists */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /*
+ * Case 2b:
+ * Userblock size = 1024
+ * Alignment in use = 3
+ * Strategy = H5F_FILE_SPACE_AGGR; fsp_size = 512
+ * (via default file creation property)
+ * H5Pset_alignment() is 3
+ * Outcome:
+ * Should fail at file creation:
+ * userblock (1024) is non-integral mult. of alignment (3)
+ */
+ /* Create file creation property list with user block */
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, FAIL, "H5Pcreate");
+ ret = H5Pset_userblock(fcpl, (hsize_t)1024);
+ CHECK(ret, FAIL, "H5Pset_userblock");
+ ret = H5Pset_file_space_page_size(fcpl, (hsize_t)512);
+
+ /* Create file access property list */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+ ret = H5Pset_alignment(fapl, (hsize_t)1, (hsize_t)3);
+ CHECK(ret, FAIL, "H5Pset_alignment");
+
+ /* Create a file with FAPL & FCPL */
+ H5E_BEGIN_TRY
+ {
+ fid = H5Fcreate(FILE1, H5F_ACC_TRUNC, fcpl, fapl);
+ }
+ H5E_END_TRY;
+ VERIFY(fid, FAIL, "H5Fcreate");
+
+ /* Release property lists */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /*
+ * Case 3a:
+ * Userblock size = 512
+ * Alignment in use = 512
+ * Strategy is H5F_FILE_SPACE_PAGE; fsp_size = alignment = 512
+ * H5Pset_alignment() is 3
+ * Outcome:
+ * Should succeed:
+ * userblock (512) is equal to alignment (512)
+ */
+ /* Create file creation property list with user block */
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, FAIL, "H5Pcreate");
+ ret = H5Pset_userblock(fcpl, (hsize_t)512);
+ CHECK(ret, FAIL, "H5Pset_userblock");
+ ret = H5Pset_file_space_strategy(fcpl, H5F_FSPACE_STRATEGY_PAGE, TRUE, (hsize_t)1);
+ CHECK(ret, FAIL, "H5Pset_file_space_strategy");
+ ret = H5Pset_file_space_page_size(fcpl, (hsize_t)512);
+ CHECK(ret, FAIL, "H5Pset_file_space_page_size");
+
+ /* Create file access property list with alignment */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+ ret = H5Pset_alignment(fapl, (hsize_t)1, (hsize_t)3);
+ CHECK(ret, FAIL, "H5Pset_alignment");
+
+ /* Call helper routines to perform file manipulations */
+ ret = test_userblock_alignment_helper1(fcpl, fapl);
+ CHECK(ret, FAIL, "test_userblock_alignment_helper1");
+ ret = test_userblock_alignment_helper2(fapl, TRUE);
+ CHECK(ret, FAIL, "test_userblock_alignment_helper2");
+
+ /* Release property lists */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /*
+ * Case 3b:
+ * Userblock size = 512
+ * Alignment in use = 3
+ * Strategy is H5F_FILE_SPACE_NONE; fsp_size = 512
+ * H5Pset_alignment() is 3
+ * Outcome:
+ * Should fail at file creation:
+ * userblock (512) is non-integral mult. of alignment (3)
+ */
+ /* Create file creation property list with user block */
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, FAIL, "H5Pcreate");
+ ret = H5Pset_userblock(fcpl, (hsize_t)512);
+ CHECK(ret, FAIL, "H5Pset_userblock");
+ ret = H5Pset_file_space_strategy(fcpl, H5F_FSPACE_STRATEGY_NONE, FALSE, (hsize_t)1);
+ CHECK(ret, FAIL, "H5Pset_file_space_strategy");
+ ret = H5Pset_file_space_page_size(fcpl, (hsize_t)512);
+ CHECK(ret, FAIL, "H5Pset_file_space_page_size");
+
+ /* Create file access property list with alignment */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+ ret = H5Pset_alignment(fapl, (hsize_t)1, (hsize_t)3);
+ CHECK(ret, FAIL, "H5Pset_alignment");
+
+ /* Create a file with FAPL & FCPL */
+ H5E_BEGIN_TRY
+ {
+ fid = H5Fcreate(FILE1, H5F_ACC_TRUNC, fcpl, fapl);
+ }
+ H5E_END_TRY;
+ VERIFY(fid, FAIL, "H5Fcreate");
+
+ /* Release property lists */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /*
+ * Case 4a:
+ * Userblock size = 1024
+ * Alignment in use = 1023
+ * Strategy is H5F_FILE_SPACE_PAGE; fsp_size = alignment = 1023
+ * H5Pset_alignment() is 16
+ * Outcome:
+ * Should fail at file creation:
+ * userblock (1024) is non-integral multiple of alignment (1023)
+ */
+ /* Create file creation property list with user block */
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, FAIL, "H5Pcreate");
+ ret = H5Pset_userblock(fcpl, (hsize_t)1024);
+ CHECK(ret, FAIL, "H5Pset_userblock");
+ ret = H5Pset_file_space_strategy(fcpl, H5F_FSPACE_STRATEGY_PAGE, TRUE, (hsize_t)1);
+ CHECK(ret, FAIL, "H5Pset_file_space_strategy");
+ ret = H5Pset_file_space_page_size(fcpl, (hsize_t)1023);
+ CHECK(ret, FAIL, "H5Pset_file_space_page_size");
+
+ /* Create file access property list with alignment */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+ ret = H5Pset_alignment(fapl, (hsize_t)1, (hsize_t)16);
+ CHECK(ret, FAIL, "H5Pset_alignment");
+
+ /* Create a file with FAPL & FCPL */
+ H5E_BEGIN_TRY
+ {
+ fid = H5Fcreate(FILE1, H5F_ACC_TRUNC, fcpl, fapl);
+ }
+ H5E_END_TRY;
+ VERIFY(fid, FAIL, "H5Fcreate");
+
+ /* Release property lists */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /*
+ * Case 4b:
+ * Userblock size = 1024
+ * Alignment in use = 16
+ * Strategy is H5F_FILE_SPACE_FSM_AGGR; fsp_size = 1023
+ * H5Pset_alignment() is 16
+ * Outcome:
+ * Should succeed:
+ * userblock (512) is integral multiple of alignment (16)
+ */
+ /* Create file creation property list with user block */
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, FAIL, "H5Pcreate");
+ ret = H5Pset_userblock(fcpl, (hsize_t)1024);
+ CHECK(ret, FAIL, "H5Pset_userblock");
+ ret = H5Pset_file_space_strategy(fcpl, H5F_FSPACE_STRATEGY_FSM_AGGR, FALSE, (hsize_t)1);
+ CHECK(ret, FAIL, "H5Pset_file_space_strategy");
+ ret = H5Pset_file_space_page_size(fcpl, (hsize_t)1023);
+ CHECK(ret, FAIL, "H5Pset_file_space_page_size");
+
+ /* Create file access property list with alignment */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+ ret = H5Pset_alignment(fapl, (hsize_t)1, (hsize_t)16);
+ CHECK(ret, FAIL, "H5Pset_alignment");
+
+ /* Call helper routines to perform file manipulations */
+ ret = test_userblock_alignment_helper1(fcpl, fapl);
+ CHECK(ret, FAIL, "test_userblock_alignment_helper1");
+ ret = test_userblock_alignment_helper2(fapl, TRUE);
+ CHECK(ret, FAIL, "test_userblock_alignment_helper2");
+
+ /* Release property lists */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /*
+ * Case 5a:
+ * Userblock size = 512
+ * Alignment in use = 1024
+ * Strategy is H5F_FILE_SPACE_PAGE; fsp_size = alignment = 1024
+ * H5Pset_alignment() is 16
+ * Outcome:
+ * Should fail at file creation:
+ * userblock (512) is less than alignment (1024)
+ */
+ /* Create file creation property list with user block */
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, FAIL, "H5Pcreate");
+ ret = H5Pset_userblock(fcpl, (hsize_t)512);
+ CHECK(ret, FAIL, "H5Pset_userblock");
+ ret = H5Pset_file_space_strategy(fcpl, H5F_FSPACE_STRATEGY_PAGE, FALSE, (hsize_t)1);
+ CHECK(ret, FAIL, "H5Pset_file_space_strategy");
+ ret = H5Pset_file_space_page_size(fcpl, (hsize_t)1024);
+ CHECK(ret, FAIL, "H5Pset_file_space_page_size");
+
+ /* Create file access property list with alignment */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+ ret = H5Pset_alignment(fapl, (hsize_t)1, (hsize_t)16);
+ CHECK(ret, FAIL, "H5Pset_alignment");
+
+ /* Create a file with FAPL & FCPL */
+ H5E_BEGIN_TRY
+ {
+ fid = H5Fcreate(FILE1, H5F_ACC_TRUNC, fcpl, fapl);
+ }
+ H5E_END_TRY;
+ VERIFY(fid, FAIL, "H5Fcreate");
+
+ /* Release property lists */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /*
+ * Case 5b:
+ * Userblock size = 512
+ * Alignment in use = 16
+ * Strategy is H5F_FILE_SPACE_NONE; fsp_size = 1024
+ * H5Pset_alignment() is 16
+ * Outcome:
+ * Should succeed:
+ * userblock (512) is integral multiple of alignment (16)
+ */
+ /* Create file creation property list with user block */
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, FAIL, "H5Pcreate");
+ ret = H5Pset_userblock(fcpl, (hsize_t)512);
+ CHECK(ret, FAIL, "H5Pset_userblock");
+ ret = H5Pset_file_space_strategy(fcpl, H5F_FSPACE_STRATEGY_NONE, FALSE, (hsize_t)1);
+ CHECK(ret, FAIL, "H5Pset_file_space_strategy");
+ ret = H5Pset_file_space_page_size(fcpl, (hsize_t)1024);
+ CHECK(ret, FAIL, "H5Pset_file_space_page_size");
+
+ /* Create file access property list with alignment */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+ ret = H5Pset_alignment(fapl, (hsize_t)1, (hsize_t)16);
+ CHECK(ret, FAIL, "H5Pset_alignment");
+
+ /* Call helper routines to perform file manipulations */
+ ret = test_userblock_alignment_helper1(fcpl, fapl);
+ CHECK(ret, FAIL, "test_userblock_alignment_helper1");
+ ret = test_userblock_alignment_helper2(fapl, TRUE);
+ CHECK(ret, FAIL, "test_userblock_alignment_helper2");
+
+ /* Release property lists */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /*
+ * Case 6:
+ * Userblock size = 512
+ * Alignment in use = 512
+ * Strategy is H5F_FILE_SPACE_PAGE; fsp_size = alignment = 512
+ * H5Pset_alignment() is 3
+ * Reopen the file; H5Pset_alignment() is 1024
+ * Outcome:
+ * Should succeed:
+ * Userblock (512) is the same as alignment (512);
+ * The H5Pset_alignment() calls have no effect
+ */
+ /* Create file creation property list with user block */
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, FAIL, "H5Pcreate");
+ ret = H5Pset_userblock(fcpl, (hsize_t)512);
+ CHECK(ret, FAIL, "H5Pset_userblock");
+ ret = H5Pset_file_space_strategy(fcpl, H5F_FSPACE_STRATEGY_PAGE, FALSE, (hsize_t)1);
+ CHECK(ret, FAIL, "H5Pset_file_space_strategy");
+ ret = H5Pset_file_space_page_size(fcpl, (hsize_t)512);
+ CHECK(ret, FAIL, "H5Pset_file_space_page_size");
+
+ /* Create file access property list with alignment */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+ ret = H5Pset_alignment(fapl, (hsize_t)1, (hsize_t)3);
+ CHECK(ret, FAIL, "H5Pset_alignment");
+
+ /* Call helper routines to perform file manipulations */
+ ret = test_userblock_alignment_helper1(fcpl, fapl);
+ CHECK(ret, FAIL, "test_userblock_alignment_helper1");
+
+ /* Change alignment in FAPL */
+ ret = H5Pset_alignment(fapl, (hsize_t)1, (hsize_t)1024);
+ CHECK(ret, FAIL, "H5Pset_alignment");
+
+ /* Call helper routines to perform file manipulations */
+ ret = test_userblock_alignment_helper2(fapl, FALSE);
+ CHECK(ret, FAIL, "test_userblock_alignment_helper2");
+ ret = test_userblock_alignment_helper2(fapl, TRUE);
+ CHECK(ret, FAIL, "test_userblock_alignment_helper2");
+
+ /* Release property lists */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+} /* end test_userblock_alignment_paged() */
+#endif
+
+/****************************************************************
+**
+** test_filespace_info():
+** Verify the following public routines retrieve and set file space
+** information correctly:
+** (1) H5Pget/set_file_space_strategy():
+** Retrieve and set file space strategy, persisting free-space,
+** and free-space section threshold as specified
+** (2) H5Pget/set_file_space_page_size():
+** Retrieve and set the page size for paged aggregation
+**
+****************************************************************/
+#if 0
+static void
+test_filespace_info(const char *env_h5_drvr)
+{
+ hid_t fid; /* File IDs */
+ hid_t fapl, new_fapl; /* File access property lists */
+ hid_t fcpl, fcpl1, fcpl2; /* File creation property lists */
+ H5F_fspace_strategy_t strategy; /* File space strategy */
+ hbool_t persist; /* Persist free-space or not */
+ hsize_t threshold; /* Free-space section threshold */
+ unsigned new_format; /* New or old format */
+ H5F_fspace_strategy_t fs_strategy; /* File space strategy--iteration variable */
+ unsigned fs_persist; /* Persist free-space or not--iteration variable */
+ hsize_t fs_threshold; /* Free-space section threshold--iteration variable */
+ hsize_t fsp_size; /* File space page size */
+ char filename[FILENAME_LEN]; /* Filename to use */
+ hbool_t contig_addr_vfd; /* Whether VFD used has a contiguous address space */
+ herr_t ret; /* Return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing file creation public routines: H5Pget/set_file_space_strategy & "
+ "H5Pget/set_file_space_page_size\n"));
+
+ contig_addr_vfd = (hbool_t)(HDstrcmp(env_h5_drvr, "split") != 0 && HDstrcmp(env_h5_drvr, "multi") != 0);
+
+ fapl = h5_fileaccess();
+ h5_fixname(FILESPACE_NAME[0], fapl, filename, sizeof filename);
+
+ /* Get a copy of the file access property list */
+ new_fapl = H5Pcopy(fapl);
+ CHECK(new_fapl, FAIL, "H5Pcopy");
+
+ /* Set the "use the latest version of the format" bounds */
+ ret = H5Pset_libver_bounds(new_fapl, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST);
+ CHECK(ret, FAIL, "H5Pset_libver_bounds");
+
+ /*
+ * Case (1)
+ * Check file space information from a default file creation property list.
+ * Values expected:
+ * strategy--H5F_FILE_SPACE_AGGR
+ * persist--FALSE
+ * threshold--1
+ * file space page size--4096
+ */
+ /* Create file creation property list template */
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, FAIL, "H5Pcreate");
+
+ /* Retrieve file space information */
+ ret = H5Pget_file_space_strategy(fcpl, &strategy, &persist, &threshold);
+ CHECK(ret, FAIL, "H5Pget_file_space_strategy");
+
+ /* Verify file space information */
+ VERIFY(strategy, H5F_FSPACE_STRATEGY_FSM_AGGR, "H5Pget_file_space_strategy");
+ VERIFY(persist, FALSE, "H5Pget_file_space_strategy");
+ VERIFY(threshold, 1, "H5Pget_file_space_strategy");
+
+ /* Retrieve file space page size */
+ ret = H5Pget_file_space_page_size(fcpl, &fsp_size);
+ CHECK(ret, FAIL, "H5Pget_file_space_page_size");
+ VERIFY(fsp_size, FSP_SIZE_DEF, "H5Pget_file_space_page_size");
+
+ /* Close property list */
+ H5Pclose(fcpl);
+
+ /*
+ * Case (2)
+ * File space page size has a minimum size of 512.
+ * Setting value less than 512 will return an error;
+ * --setting file space page size to 0
+ * --setting file space page size to 511
+ *
+ * File space page size has a maximum size of 1 gigabyte.
+ * Setting value greater than 1 gigabyte will return an error.
+ */
+ /* Create file creation property list template */
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, FAIL, "H5Pcreate");
+
+ /* Setting to 0: should fail */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Pset_file_space_page_size(fcpl, 0);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Pset_file_space_page_size");
+
+ /* Setting to 511: should fail */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Pset_file_space_page_size(fcpl, 511);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Pset_file_space_page_size");
+
+ /* Setting to 1GB+1: should fail */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Pset_file_space_page_size(fcpl, FSP_SIZE1G + 1);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Pset_file_space_page_size");
+
+ /* Setting to 512: should succeed */
+ ret = H5Pset_file_space_page_size(fcpl, FSP_SIZE512);
+ CHECK(ret, FAIL, "H5Pset_file_space_page_size");
+ ret = H5Pget_file_space_page_size(fcpl, &fsp_size);
+ CHECK(ret, FAIL, "H5Pget_file_space_page_size");
+ VERIFY(fsp_size, FSP_SIZE512, "H5Pget_file_space_page_size");
+
+ /* Setting to 1GB: should succeed */
+ ret = H5Pset_file_space_page_size(fcpl, FSP_SIZE1G);
+ CHECK(ret, FAIL, "H5Pset_file_space_page_size");
+ ret = H5Pget_file_space_page_size(fcpl, &fsp_size);
+ CHECK(ret, FAIL, "H5Pget_file_space_page_size");
+ VERIFY(fsp_size, FSP_SIZE1G, "H5Pget_file_space_page_size");
+
+ /* Close property list */
+ H5Pclose(fcpl);
+
+ /*
+ * Case (3)
+ * Check file space information when creating a file with default properties.
+ * Values expected:
+ * strategy--H5F_FILE_SPACE_AGGR
+ * persist--FALSE
+ * threshold--1
+ * file space page size--4096
+ */
+ /* Create a file with default file creation and access property lists */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Get the file's creation property list */
+ fcpl1 = H5Fget_create_plist(fid);
+ CHECK(fcpl1, FAIL, "H5Fget_create_plist");
+
+ /* Retrieve file space information */
+ ret = H5Pget_file_space_strategy(fcpl1, &strategy, &persist, &threshold);
+ CHECK(ret, FAIL, "H5Pget_file_space_strategy");
+
+ /* Verify file space information */
+ VERIFY(strategy, H5F_FSPACE_STRATEGY_FSM_AGGR, "H5Pget_file_space_strategy");
+ VERIFY(persist, FALSE, "H5Pget_file_space_strategy");
+ VERIFY(threshold, 1, "H5Pget_file_space_strategy");
+
+ /* Retrieve file space page size */
+ ret = H5Pget_file_space_page_size(fcpl1, &fsp_size);
+ CHECK(ret, FAIL, "H5Pget_file_space_page_size");
+ VERIFY(fsp_size, FSP_SIZE_DEF, "H5Pget_file_space_page_size");
+
+ /* Close property lists */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ ret = H5Pclose(fcpl1);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /*
+ * Case (4)
+ * Check file space information when creating a file with the
+ * latest library format and default properties.
+ * Values expected:
+ * strategy--H5F_FILE_SPACE_AGGR
+ * persist--FALSE
+ * threshold--1
+ * file space page size--4096
+ */
+ /* Create a file with the latest library format */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, new_fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Get the file's creation property */
+ fcpl1 = H5Fget_create_plist(fid);
+ CHECK(fcpl1, FAIL, "H5Fget_create_plist");
+
+ /* Retrieve file space information */
+ ret = H5Pget_file_space_strategy(fcpl1, &strategy, &persist, &threshold);
+ CHECK(ret, FAIL, "H5Pget_file_space_strategy");
+
+ /* Verify file space information */
+ VERIFY(strategy, H5F_FSPACE_STRATEGY_FSM_AGGR, "H5Pget_file_space_strategy");
+ VERIFY(persist, FALSE, "H5Pget_file_space_strategy");
+ VERIFY(threshold, 1, "H5Pget_file_space_strategy");
+
+ /* Retrieve file space page size */
+ ret = H5Pget_file_space_page_size(fcpl1, &fsp_size);
+ CHECK(ret, FAIL, "H5Pget_file_space_page_size");
+ VERIFY(fsp_size, FSP_SIZE_DEF, "H5Pget_file_space_page_size");
+
+ /* Close property lists */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ ret = H5Pclose(fcpl1);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /*
+ * Case (5)
+ * Check file space information with the following combinations:
+ * Create file with --
+ * New or old format
+ * Persist or not persist free-space
+ * Different sizes for free-space section threshold (0 to 10)
+ * The four file space strategies:
+ * H5F_FSPACE_STRATEGY_FSM_AGGR, H5F_FSPACE_STRATEGY_PAGE,
+ * H5F_FSPACE_STRATEGY_AGGR, H5F_FSPACE_STRATEGY_NONE
+ * File space page size: set to 512
+ *
+ */
+ for (new_format = FALSE; new_format <= TRUE; new_format++) {
+ hid_t my_fapl;
+
+ /* Set the FAPL for the type of format */
+ if (new_format) {
+ MESSAGE(5, ("Testing with new group format\n"));
+ my_fapl = new_fapl;
+ } /* end if */
+ else {
+ MESSAGE(5, ("Testing with old group format\n"));
+ my_fapl = fapl;
+ } /* end else */
+
+ /* Test with TRUE or FALSE for persisting free-space */
+ for (fs_persist = FALSE; fs_persist <= TRUE; fs_persist++) {
+
+ /* Test with free-space section threshold size: 0 to 10 */
+ for (fs_threshold = 0; fs_threshold <= TEST_THRESHOLD10; fs_threshold++) {
+
+ /* Test with 4 file space strategies */
+ for (fs_strategy = H5F_FSPACE_STRATEGY_FSM_AGGR; fs_strategy < H5F_FSPACE_STRATEGY_NTYPES;
+ fs_strategy++) {
+
+ if (!contig_addr_vfd && (fs_strategy == H5F_FSPACE_STRATEGY_PAGE || fs_persist))
+ continue;
+
+ /* Create file creation property list template */
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, FAIL, "H5Pcreate");
+
+ /* Set file space information */
+ ret = H5Pset_file_space_strategy(fcpl, fs_strategy, (hbool_t)fs_persist, fs_threshold);
+ CHECK(ret, FAIL, "H5Pset_file_space_strategy");
+
+ ret = H5Pset_file_space_page_size(fcpl, FSP_SIZE512);
+ CHECK(ret, FAIL, "H5Pset_file_space_strategy");
+
+ /* Retrieve file space information */
+ ret = H5Pget_file_space_strategy(fcpl, &strategy, &persist, &threshold);
+ CHECK(ret, FAIL, "H5Pget_file_space_strategy");
+
+ /* Verify file space information */
+ VERIFY(strategy, fs_strategy, "H5Pget_file_space_strategy");
+
+ if (fs_strategy < H5F_FSPACE_STRATEGY_AGGR) {
+ VERIFY(persist, (hbool_t)fs_persist, "H5Pget_file_space_strategy");
+ VERIFY(threshold, fs_threshold, "H5Pget_file_space_strategy");
+ }
+ else {
+ VERIFY(persist, FALSE, "H5Pget_file_space_strategy");
+ VERIFY(threshold, 1, "H5Pget_file_space_strategy");
+ }
+
+ /* Retrieve and verify file space page size */
+ ret = H5Pget_file_space_page_size(fcpl, &fsp_size);
+ CHECK(ret, FAIL, "H5Pget_file_space_page_size");
+ VERIFY(fsp_size, FSP_SIZE512, "H5Pget_file_space_page_size");
+
+ /* Create the file with the specified file space info */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, fcpl, my_fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Get the file's creation property */
+ fcpl1 = H5Fget_create_plist(fid);
+ CHECK(fcpl1, FAIL, "H5Fget_create_plist");
+
+ /* Retrieve file space information */
+ ret = H5Pget_file_space_strategy(fcpl1, &strategy, &persist, &threshold);
+ CHECK(ret, FAIL, "H5Pget_file_space_strategy");
+
+ /* Verify file space information */
+ VERIFY(strategy, fs_strategy, "H5Pget_file_space_strategy");
+
+ if (fs_strategy < H5F_FSPACE_STRATEGY_AGGR) {
+ VERIFY(persist, fs_persist, "H5Pget_file_space_strategy");
+ VERIFY(threshold, fs_threshold, "H5Pget_file_space_strategy");
+ }
+ else {
+ VERIFY(persist, FALSE, "H5Pget_file_space_strategy");
+ VERIFY(threshold, 1, "H5Pget_file_space_strategy");
+ }
+
+ /* Retrieve and verify file space page size */
+ ret = H5Pget_file_space_page_size(fcpl1, &fsp_size);
+ CHECK(ret, FAIL, "H5Pget_file_space_page_size");
+ VERIFY(fsp_size, FSP_SIZE512, "H5Pget_file_space_page_size");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open the file */
+ fid = H5Fopen(filename, H5F_ACC_RDWR, my_fapl);
+ CHECK(ret, FAIL, "H5Fopen");
+
+ /* Get the file's creation property */
+ fcpl2 = H5Fget_create_plist(fid);
+ CHECK(fcpl2, FAIL, "H5Fget_create_plist");
+
+ /* Retrieve file space information */
+ ret = H5Pget_file_space_strategy(fcpl2, &strategy, &persist, &threshold);
+ CHECK(ret, FAIL, "H5Pget_file_space_strategy");
+
+ /* Verify file space information */
+ VERIFY(strategy, fs_strategy, "H5Pget_file_space_strategy");
+ if (fs_strategy < H5F_FSPACE_STRATEGY_AGGR) {
+ VERIFY(persist, fs_persist, "H5Pget_file_space_strategy");
+ VERIFY(threshold, fs_threshold, "H5Pget_file_space_strategy");
+ }
+ else {
+ VERIFY(persist, FALSE, "H5Pget_file_space_strategy");
+ VERIFY(threshold, 1, "H5Pget_file_space_strategy");
+ }
+
+ /* Retrieve and verify file space page size */
+ ret = H5Pget_file_space_page_size(fcpl2, &fsp_size);
+ CHECK(ret, FAIL, "H5Pget_file_space_page_size");
+ VERIFY(fsp_size, FSP_SIZE512, "H5Pget_file_space_page_size");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Release file creation property lists */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(fcpl1);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(fcpl2);
+ CHECK(ret, FAIL, "H5Pclose");
+ } /* end for file space strategy type */
+ } /* end for free-space section threshold */
+ } /* end for fs_persist */
+
+ /* close fapl_ and remove the file */
+#if 0
+ h5_clean_files(FILESPACE_NAME, my_fapl);
+#endif
+
+ H5E_BEGIN_TRY
+ {
+ H5Fdelete(FILESPACE_NAME[0], my_fapl);
+ }
+ H5E_END_TRY;
+ } /* end for new_format */
+
+} /* test_filespace_info() */
+#endif
+
+/****************************************************************
+**
+** set_multi_split():
+** Internal routine to set up page-aligned address space for multi/split driver
+** when testing paged aggregation.
+** This is used by test_file_freespace() and test_sects_freespace().
+**
+*****************************************************************/
+#if 0
+static int
+set_multi_split(hid_t fapl, hsize_t pagesize, hbool_t split)
+{
+ H5FD_mem_t memb_map[H5FD_MEM_NTYPES];
+ hid_t memb_fapl_arr[H5FD_MEM_NTYPES];
+ char *memb_name[H5FD_MEM_NTYPES];
+ haddr_t memb_addr[H5FD_MEM_NTYPES];
+ hbool_t relax;
+ H5FD_mem_t mt;
+
+ HDassert(split);
+
+ HDmemset(memb_name, 0, sizeof memb_name);
+
+ /* Get current split settings */
+ if (H5Pget_fapl_multi(fapl, memb_map, memb_fapl_arr, memb_name, memb_addr, &relax) < 0)
+ TEST_ERROR;
+
+ if (split) {
+ /* Set memb_addr aligned */
+ memb_addr[H5FD_MEM_SUPER] = ((memb_addr[H5FD_MEM_SUPER] + pagesize - 1) / pagesize) * pagesize;
+ memb_addr[H5FD_MEM_DRAW] = ((memb_addr[H5FD_MEM_DRAW] + pagesize - 1) / pagesize) * pagesize;
+ }
+ else {
+ /* Set memb_addr aligned */
+ for (mt = H5FD_MEM_DEFAULT; mt < H5FD_MEM_NTYPES; mt++)
+ memb_addr[mt] = ((memb_addr[mt] + pagesize - 1) / pagesize) * pagesize;
+ } /* end else */
+
+ /* Set multi driver with new FAPLs */
+ if (H5Pset_fapl_multi(fapl, memb_map, memb_fapl_arr, (const char *const *)memb_name, memb_addr, relax) <
+ 0)
+ TEST_ERROR;
+
+ /* Free memb_name */
+ for (mt = H5FD_MEM_DEFAULT; mt < H5FD_MEM_NTYPES; mt++)
+ HDfree(memb_name[mt]);
+
+ return 0;
+
+error:
+ return (-1);
+
+} /* set_multi_split() */
+#endif
+
+/****************************************************************
+**
+** test_file_freespace():
+** This routine checks the free space available in a file as
+** returned by the public routine H5Fget_freespace().
+**
+**
+*****************************************************************/
+#if 0
+static void
+test_file_freespace(const char *env_h5_drvr)
+{
+ hid_t file; /* File opened with read-write permission */
+#if 0
+ h5_stat_size_t empty_filesize; /* Size of file when empty */
+ h5_stat_size_t mod_filesize; /* Size of file after being modified */
+ hssize_t free_space; /* Amount of free space in file */
+#endif
+ hid_t fcpl; /* File creation property list */
+ hid_t fapl, new_fapl; /* File access property list IDs */
+ hid_t dspace; /* Dataspace ID */
+ hid_t dset; /* Dataset ID */
+ hid_t dcpl; /* Dataset creation property list */
+ int k; /* Local index variable */
+ unsigned u; /* Local index variable */
+ char filename[FILENAME_LEN]; /* Filename to use */
+ char name[32]; /* Dataset name */
+ unsigned new_format; /* To use old or new format */
+ hbool_t split_vfd, multi_vfd; /* Indicate multi/split driver */
+ hsize_t expected_freespace; /* Freespace expected */
+ hsize_t expected_fs_del; /* Freespace expected after delete */
+ herr_t ret; /* Return value */
+
+ split_vfd = !HDstrcmp(env_h5_drvr, "split");
+ multi_vfd = !HDstrcmp(env_h5_drvr, "multi");
+
+ if (!split_vfd && !multi_vfd) {
+ fapl = h5_fileaccess();
+ h5_fixname(FILESPACE_NAME[0], fapl, filename, sizeof filename);
+
+ new_fapl = H5Pcopy(fapl);
+ CHECK(new_fapl, FAIL, "H5Pcopy");
+
+ /* Set the "use the latest version of the format" bounds */
+ ret = H5Pset_libver_bounds(new_fapl, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST);
+ CHECK(ret, FAIL, "H5Pset_libver_bounds");
+
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, FAIL, "H5Pcreate");
+
+ /* Test with old & new format */
+ for (new_format = FALSE; new_format <= TRUE; new_format++) {
+ hid_t my_fapl;
+
+ /* Set the FAPL for the type of format */
+ if (new_format) {
+ MESSAGE(5, ("Testing with new group format\n"));
+
+ my_fapl = new_fapl;
+
+ if (multi_vfd || split_vfd) {
+ ret = set_multi_split(new_fapl, FSP_SIZE_DEF, split_vfd);
+ CHECK(ret, FAIL, "set_multi_split");
+ }
+
+ ret = H5Pset_file_space_strategy(fcpl, H5F_FSPACE_STRATEGY_PAGE, FALSE, (hsize_t)1);
+ CHECK(ret, FAIL, "H5P_set_file_space_strategy");
+
+ expected_freespace = 4534;
+ if (split_vfd)
+ expected_freespace = 427;
+ if (multi_vfd)
+ expected_freespace = 248;
+ expected_fs_del = 0;
+ } /* end if */
+ else {
+ MESSAGE(5, ("Testing with old group format\n"));
+ /* Default: non-paged aggregation, non-persistent free-space */
+ my_fapl = fapl;
+ expected_freespace = 2464;
+ if (split_vfd)
+ expected_freespace = 264;
+ if (multi_vfd)
+ expected_freespace = 0;
+ expected_fs_del = 4096;
+
+ } /* end else */
+
+ /* Create an "empty" file */
+ file = H5Fcreate(filename, H5F_ACC_TRUNC, fcpl, my_fapl);
+ CHECK(file, FAIL, "H5Fcreate");
+
+ ret = H5Fclose(file);
+ CHECK_I(ret, "H5Fclose");
+#if 0
+ /* Get the "empty" file size */
+ empty_filesize = h5_get_file_size(filename, H5P_DEFAULT);
+#endif
+ /* Re-open the file (with read-write permission) */
+ file = H5Fopen(filename, H5F_ACC_RDWR, my_fapl);
+ CHECK_I(file, "H5Fopen");
+#if 0
+ /* Check that the free space is 0 */
+ free_space = H5Fget_freespace(file);
+ CHECK(free_space, FAIL, "H5Fget_freespace");
+ VERIFY(free_space, 0, "H5Fget_freespace");
+#endif
+ /* Create dataspace for datasets */
+ dspace = H5Screate(H5S_SCALAR);
+ CHECK(dspace, FAIL, "H5Screate");
+
+ /* Create a dataset creation property list */
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+
+ /* Set the space allocation time to early */
+ ret = H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_EARLY);
+ CHECK(ret, FAIL, "H5Pset_alloc_time");
+
+ /* Create datasets in file */
+ for (u = 0; u < 10; u++) {
+ HDsnprintf(name, sizeof(name), "Dataset %u", u);
+ dset = H5Dcreate2(file, name, H5T_STD_U32LE, dspace, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset, FAIL, "H5Dcreate2");
+
+ ret = H5Dclose(dset);
+ CHECK(ret, FAIL, "H5Dclose");
+ } /* end for */
+
+ /* Close dataspace */
+ ret = H5Sclose(dspace);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close dataset creation property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+#if 0
+ /* Check that there is the right amount of free space in the file */
+ free_space = H5Fget_freespace(file);
+ CHECK(free_space, FAIL, "H5Fget_freespace");
+ VERIFY(free_space, expected_freespace, "H5Fget_freespace");
+#endif
+ /* Delete datasets in file */
+ for (k = 9; k >= 0; k--) {
+ HDsnprintf(name, sizeof(name), "Dataset %u", (unsigned)k);
+ ret = H5Ldelete(file, name, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+ } /* end for */
+#if 0
+ /* Check that there is the right amount of free space in the file */
+ free_space = H5Fget_freespace(file);
+ CHECK(free_space, FAIL, "H5Fget_freespace");
+ VERIFY(free_space, expected_fs_del, "H5Fget_freespace");
+#endif
+ /* Close file */
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+#if 0
+ /* Get the file size after modifications*/
+ mod_filesize = h5_get_file_size(filename, H5P_DEFAULT);
+
+ /* Check that the file reverted to empty size */
+ VERIFY(mod_filesize, empty_filesize, "H5Fget_freespace");
+
+ h5_clean_files(FILESPACE_NAME, my_fapl);
+#endif
+ H5Fdelete(FILESPACE_NAME[0], my_fapl);
+ } /* end for */
+ }
+
+} /* end test_file_freespace() */
+
+/****************************************************************
+**
+** test_sects_freespace():
+** This routine checks free-space section information for the
+** file as returned by the public routine H5Fget_free_sections().
+**
+*****************************************************************/
+static void
+test_sects_freespace(const char *env_h5_drvr, hbool_t new_format)
+{
+ char filename[FILENAME_LEN]; /* Filename to use */
+ hid_t file; /* File ID */
+ hid_t fcpl; /* File creation property list template */
+ hid_t fapl; /* File access property list template */
+#if 0
+ hssize_t free_space; /* Amount of free-space in the file */
+#endif
+ hid_t dspace; /* Dataspace ID */
+ hid_t dset; /* Dataset ID */
+ hid_t dcpl; /* Dataset creation property list */
+ char name[32]; /* Dataset name */
+ hssize_t nsects = 0; /* # of free-space sections */
+ hssize_t nall; /* # of free-space sections for all types of data */
+ hssize_t nmeta = 0, nraw = 0; /* # of free-space sections for meta/raw/generic data */
+ H5F_sect_info_t sect_info[15]; /* Array to hold free-space information */
+ H5F_sect_info_t all_sect_info[15]; /* Array to hold free-space information for all types of data */
+ H5F_sect_info_t meta_sect_info[15]; /* Array to hold free-space information for metadata */
+ H5F_sect_info_t raw_sect_info[15]; /* Array to hold free-space information for raw data */
+ hsize_t total = 0; /* sum of the free-space section sizes */
+ hsize_t tmp_tot = 0; /* Sum of the free-space section sizes */
+ hsize_t last_size; /* Size of last free-space section */
+ hsize_t dims[1]; /* Dimension sizes */
+ unsigned u; /* Local index variable */
+ H5FD_mem_t type;
+ hbool_t split_vfd = FALSE, multi_vfd = FALSE;
+ herr_t ret; /* Return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing H5Fget_free_sections()--free-space section info in the file\n"));
+
+ split_vfd = !HDstrcmp(env_h5_drvr, "split");
+ multi_vfd = !HDstrcmp(env_h5_drvr, "multi");
+
+ if (!split_vfd && !multi_vfd) {
+
+ fapl = h5_fileaccess();
+ h5_fixname(FILESPACE_NAME[0], fapl, filename, sizeof filename);
+
+ /* Create file-creation template */
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, FAIL, "H5Pcreate");
+
+ if (new_format) {
+ ret = H5Pset_libver_bounds(fapl, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST);
+ CHECK(ret, FAIL, "H5Pset_libver_bounds");
+
+ /* Set to paged aggregation and persistent free-space */
+ ret = H5Pset_file_space_strategy(fcpl, H5F_FSPACE_STRATEGY_PAGE, TRUE, (hsize_t)1);
+ CHECK(ret, FAIL, "H5Pget_file_space_strategy");
+
+ /* Set up paged aligned address space for multi/split driver */
+ if (multi_vfd || split_vfd) {
+ ret = set_multi_split(fapl, FSP_SIZE_DEF, split_vfd);
+ CHECK(ret, FAIL, "set_multi_split");
+ }
+ }
+ else {
+ ret = H5Pset_file_space_strategy(fcpl, H5F_FSPACE_STRATEGY_FSM_AGGR, TRUE, (hsize_t)1);
+ CHECK(ret, FAIL, "H5Pget_file_space_strategy");
+ }
+
+ /* Create the file */
+ file = H5Fcreate(filename, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(file, FAIL, "H5Fcreate");
+
+ /* Create a dataset creation property list */
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+
+ /* Set the space allocation time to early */
+ ret = H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_EARLY);
+ CHECK(ret, FAIL, "H5Pset_alloc_time");
+
+ /* Create 1 large dataset */
+ dims[0] = 1200;
+ dspace = H5Screate_simple(1, dims, NULL);
+ dset = H5Dcreate2(file, "Dataset_large", H5T_STD_U32LE, dspace, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset, FAIL, "H5Dcreate2");
+
+ /* Close dataset */
+ ret = H5Dclose(dset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close dataspace */
+ ret = H5Sclose(dspace);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create dataspace for datasets */
+ dspace = H5Screate(H5S_SCALAR);
+ CHECK(dspace, FAIL, "H5Screate");
+
+ /* Create datasets in file */
+ for (u = 0; u < 10; u++) {
+ HDsnprintf(name, sizeof(name), "Dataset %u", u);
+ dset = H5Dcreate2(file, name, H5T_STD_U32LE, dspace, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset, FAIL, "H5Dcreate2");
+
+ ret = H5Dclose(dset);
+ CHECK(ret, FAIL, "H5Dclose");
+ } /* end for */
+
+ /* Close dataspace */
+ ret = H5Sclose(dspace);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close dataset creation property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Delete odd-numbered datasets in file */
+ for (u = 0; u < 10; u++) {
+ HDsnprintf(name, sizeof(name), "Dataset %u", u);
+ if (u % 2) {
+ ret = H5Ldelete(file, name, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+ } /* end if */
+ } /* end for */
+
+ /* Close file */
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open the file with read-only permission */
+ file = H5Fopen(filename, H5F_ACC_RDONLY, fapl);
+ CHECK_I(file, "H5Fopen");
+#if 0
+ /* Get the amount of free space in the file */
+ free_space = H5Fget_freespace(file);
+ CHECK(free_space, FAIL, "H5Fget_freespace");
+#endif
+ /* Get the total # of free-space sections in the file */
+ nall = H5Fget_free_sections(file, H5FD_MEM_DEFAULT, (size_t)0, NULL);
+ CHECK(nall, FAIL, "H5Fget_free_sections");
+
+ /* Should return failure when nsects is 0 with a nonnull sect_info */
+ nsects = H5Fget_free_sections(file, H5FD_MEM_DEFAULT, (size_t)0, all_sect_info);
+ VERIFY(nsects, FAIL, "H5Fget_free_sections");
+
+ /* Retrieve and verify free space info for all the sections */
+ HDmemset(all_sect_info, 0, sizeof(all_sect_info));
+ nsects = H5Fget_free_sections(file, H5FD_MEM_DEFAULT, (size_t)nall, all_sect_info);
+ VERIFY(nsects, nall, "H5Fget_free_sections");
+
+ /* Verify the amount of free-space is correct */
+ for (u = 0; u < nall; u++)
+ total += all_sect_info[u].size;
+#if 0
+ VERIFY(free_space, total, "H5Fget_free_sections");
+#endif
+ /* Save the last section's size */
+ last_size = all_sect_info[nall - 1].size;
+
+ /* Retrieve and verify free space info for -1 sections */
+ HDmemset(sect_info, 0, sizeof(sect_info));
+ nsects = H5Fget_free_sections(file, H5FD_MEM_DEFAULT, (size_t)(nall - 1), sect_info);
+ VERIFY(nsects, nall, "H5Fget_free_sections");
+
+ /* Verify the amount of free-space is correct */
+ total = 0;
+ for (u = 0; u < (nall - 1); u++) {
+ VERIFY(sect_info[u].addr, all_sect_info[u].addr, "H5Fget_free_sections");
+ VERIFY(sect_info[u].size, all_sect_info[u].size, "H5Fget_free_sections");
+ total += sect_info[u].size;
+ }
+#if 0
+ VERIFY(((hsize_t)free_space - last_size), total, "H5Fget_free_sections");
+#endif
+ /* Retrieve and verify free-space info for +1 sections */
+ HDmemset(sect_info, 0, sizeof(sect_info));
+ nsects = H5Fget_free_sections(file, H5FD_MEM_DEFAULT, (size_t)(nall + 1), sect_info);
+ VERIFY(nsects, nall, "H5Fget_free_sections");
+
+ /* Verify amount of free-space is correct */
+ total = 0;
+ for (u = 0; u < nall; u++) {
+ VERIFY(sect_info[u].addr, all_sect_info[u].addr, "H5Fget_free_sections");
+ VERIFY(sect_info[u].size, all_sect_info[u].size, "H5Fget_free_sections");
+ total += sect_info[u].size;
+ }
+ VERIFY(sect_info[nall].addr, 0, "H5Fget_free_sections");
+ VERIFY(sect_info[nall].size, 0, "H5Fget_free_sections");
+#if 0
+ VERIFY(free_space, total, "H5Fget_free_sections");
+#endif
+
+ HDmemset(meta_sect_info, 0, sizeof(meta_sect_info));
+ if (multi_vfd) {
+ hssize_t ntmp;
+
+ for (type = H5FD_MEM_SUPER; type < H5FD_MEM_NTYPES; type++) {
+ if (type == H5FD_MEM_DRAW || type == H5FD_MEM_GHEAP)
+ continue;
+ /* Get the # of free-space sections in the file for metadata */
+ ntmp = H5Fget_free_sections(file, type, (size_t)0, NULL);
+ CHECK(ntmp, FAIL, "H5Fget_free_sections");
+
+ if (ntmp > 0) {
+ nsects = H5Fget_free_sections(file, type, (size_t)ntmp, &meta_sect_info[nmeta]);
+ VERIFY(nsects, ntmp, "H5Fget_free_sections");
+ nmeta += ntmp;
+ }
+ }
+ }
+ else {
+ /* Get the # of free-space sections in the file for metadata */
+ nmeta = H5Fget_free_sections(file, H5FD_MEM_SUPER, (size_t)0, NULL);
+ CHECK(nmeta, FAIL, "H5Fget_free_sections");
+
+ /* Retrieve and verify free-space sections for metadata */
+ nsects = H5Fget_free_sections(file, H5FD_MEM_SUPER, (size_t)nmeta, meta_sect_info);
+ VERIFY(nsects, nmeta, "H5Fget_free_sections");
+ }
+
+ /* Get the # of free-space sections in the file for raw data */
+ nraw = H5Fget_free_sections(file, H5FD_MEM_DRAW, (size_t)0, NULL);
+ CHECK(nraw, FAIL, "H5Fget_free_sections");
+
+ /* Retrieve and verify free-space sections for raw data */
+ HDmemset(raw_sect_info, 0, sizeof(raw_sect_info));
+ nsects = H5Fget_free_sections(file, H5FD_MEM_DRAW, (size_t)nraw, raw_sect_info);
+ VERIFY(nsects, nraw, "H5Fget_free_sections");
+
+ /* Sum all the free-space sections */
+ for (u = 0; u < nmeta; u++)
+ tmp_tot += meta_sect_info[u].size;
+
+ for (u = 0; u < nraw; u++)
+ tmp_tot += raw_sect_info[u].size;
+
+ /* Verify free-space info */
+ VERIFY(nmeta + nraw, nall, "H5Fget_free_sections");
+ VERIFY(tmp_tot, total, "H5Fget_free_sections");
+
+ /* Closing */
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+ ret = H5Pclose(fcpl);
+ CHECK(fcpl, FAIL, "H5Pclose");
+#if 0
+ h5_clean_files(FILESPACE_NAME, fapl);
+#endif
+ H5Fdelete(FILESPACE_NAME[0], fapl);
+ }
+
+} /* end test_sects_freespace() */
+#endif
+
+/****************************************************************
+**
+** test_filespace_compatible():
+** Verify that the trunk with the latest file space management
+** can open, read and modify 1.6 HDF5 file and 1.8 HDF5 file.
+** Also verify the correct file space handling information
+** and the amount of free space.
+**
+****************************************************************/
+#if 0
+static void
+test_filespace_compatible(void)
+{
+ int fd_old = (-1), fd_new = (-1); /* File descriptors for copying data */
+ hid_t fid = -1; /* File id */
+ hid_t did = -1; /* Dataset id */
+ hid_t fcpl; /* File creation property list template */
+ int check[100]; /* Temporary buffer for verifying dataset data */
+ int rdbuf[100]; /* Temporary buffer for reading in dataset data */
+ uint8_t buf[READ_OLD_BUFSIZE]; /* temporary buffer for reading */
+ ssize_t nread; /* Number of bytes read in */
+ unsigned i, j; /* Local index variable */
+ hssize_t free_space; /* Amount of free-space in the file */
+ hbool_t persist; /* Persist free-space or not */
+ hsize_t threshold; /* Free-space section threshold */
+ H5F_fspace_strategy_t strategy; /* File space handling strategy */
+ herr_t ret; /* Return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("File space compatibility testing for 1.6 and 1.8 files\n"));
+
+ for (j = 0; j < NELMTS(OLD_FILENAME); j++) {
+ const char *filename = H5_get_srcdir_filename(OLD_FILENAME[j]); /* Corrected test file name */
+
+ /* Open and copy the test file into a temporary file */
+ fd_old = HDopen(filename, O_RDONLY);
+ CHECK(fd_old, FAIL, "HDopen");
+ fd_new = HDopen(FILE5, O_RDWR | O_CREAT | O_TRUNC, H5_POSIX_CREATE_MODE_RW);
+ CHECK(fd_new, FAIL, "HDopen");
+
+ /* Copy data */
+ while ((nread = HDread(fd_old, buf, (size_t)READ_OLD_BUFSIZE)) > 0) {
+ ssize_t write_err = HDwrite(fd_new, buf, (size_t)nread);
+ CHECK(write_err, -1, "HDwrite");
+ } /* end while */
+
+ /* Close the files */
+ ret = HDclose(fd_old);
+ CHECK(ret, FAIL, "HDclose");
+ ret = HDclose(fd_new);
+ CHECK(ret, FAIL, "HDclose");
+
+ /* Open the temporary test file */
+ fid = H5Fopen(FILE5, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* There should not be any free space in the file */
+ free_space = H5Fget_freespace(fid);
+ CHECK(free_space, FAIL, "H5Fget_freespace");
+ VERIFY(free_space, (hssize_t)0, "H5Fget_freespace");
+
+ /* Get the file's file creation property list */
+ fcpl = H5Fget_create_plist(fid);
+ CHECK(fcpl, FAIL, "H5Fget_create_plist");
+
+ /* Retrieve the file space info */
+ ret = H5Pget_file_space_strategy(fcpl, &strategy, &persist, &threshold);
+ CHECK(ret, FAIL, "H5Pget_file_space_strategy");
+
+ /* File space handling strategy should be H5F_FSPACE_STRATEGY_FSM_AGGR */
+ /* Persisting free-space should be FALSE */
+ /* Free-space section threshold should be 1 */
+ VERIFY(strategy, H5F_FSPACE_STRATEGY_FSM_AGGR, "H5Pget_file_space_strategy");
+ VERIFY(persist, FALSE, "H5Pget_file_space_strategy");
+ VERIFY(threshold, 1, "H5Pget_file_space_strategy");
+
+ /* Generate raw data */
+ for (i = 0; i < 100; i++)
+ check[i] = (int)i;
+
+ /* Open and read the dataset */
+ did = H5Dopen2(fid, DSETNAME, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen");
+ ret = H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &rdbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Verify the data read is correct */
+ for (i = 0; i < 100; i++)
+ VERIFY(rdbuf[i], check[i], "test_compatible");
+
+ /* Close the dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Remove the dataset */
+ ret = H5Ldelete(fid, DSETNAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ /* Close the plist */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-Open the file */
+ fid = H5Fopen(FILE5, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* The dataset should not be there */
+ did = H5Dopen2(fid, DSETNAME, H5P_DEFAULT);
+ VERIFY(did, FAIL, "H5Dopen");
+
+ /* There should not be any free space in the file */
+ free_space = H5Fget_freespace(fid);
+ CHECK(free_space, FAIL, "H5Fget_freespace");
+ VERIFY(free_space, (hssize_t)0, "H5Fget_freespace");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ } /* end for */
+} /* test_filespace_compatible */
+#endif
+
+/****************************************************************
+**
+** test_filespace_1.10.0_compatible():
+** Verify that the latest file space management can open, read and
+** modify 1.10.0 HDF5 files :
+** h5fc_ext1_i.h5: H5F_FILE_SPACE_ALL, default threshold; has superblock extension but no fsinfo message
+** h5fc_ext1_f.h5: H5F_FILE_SPACE_ALL_PERSIST, default threshold; has superblock extension with fsinfo
+*message
+** h5fc_ext2_if.h5: H5F_FILE_SPACE_ALL, non-default threshold; has superblock extension with fsinfo
+*message
+** h5fc_ext2_sf.h5: H5F_FILE_SPACE_VFD, default threshold; has superblock extension with fsinfo message
+** h5fc_ext3_isf.h5: H5F_FILE_SPACE_AGGR_VFD, default threshold; has superblock extension with fsinfo
+*message
+** h5fc_ext_none.h5: H5F_FILE_SPACE_ALL, default threshold; without superblock extension
+** The above files are copied from release 1.10.0 tools/h5format_convert/testfiles.
+**
+****************************************************************/
+#if 0
+static void
+test_filespace_1_10_0_compatible(void)
+{
+ hid_t fid = -1; /* File id */
+ hid_t did = -1; /* Dataset id */
+ hid_t fcpl; /* File creation property list */
+ hbool_t persist; /* Persist free-space or not */
+ hsize_t threshold; /* Free-space section threshold */
+ H5F_fspace_strategy_t strategy; /* File space handling strategy */
+ int wbuf[24]; /* Buffer for dataset data */
+ int rdbuf[24]; /* Buffer for dataset data */
+ int status; /* Status from copying the existing file */
+ unsigned i, j; /* Local index variable */
+ herr_t ret; /* Return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("File space compatibility testing for 1.10.0 files\n"));
+
+ for (j = 0; j < NELMTS(OLD_1_10_0_FILENAME); j++) {
+ /* Make a copy of the test file */
+ status = h5_make_local_copy(OLD_1_10_0_FILENAME[j], FILE5);
+ CHECK(status, FAIL, "h5_make_local_copy");
+
+ /* Open the temporary test file */
+ fid = H5Fopen(FILE5, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Get the file's file creation property list */
+ fcpl = H5Fget_create_plist(fid);
+ CHECK(fcpl, FAIL, "H5Fget_create_plist");
+
+ /* Retrieve the file space info */
+ ret = H5Pget_file_space_strategy(fcpl, &strategy, &persist, &threshold);
+ CHECK(ret, FAIL, "H5Pget_file_space_strategy");
+
+ switch (j) {
+ case 0:
+#if 0
+ VERIFY(strategy, H5F_FILE_SPACE_STRATEGY_DEF, "H5Pget_file_space_strategy");
+ VERIFY(persist, H5F_FREE_SPACE_PERSIST_DEF, "H5Pget_file_space_strategy");
+ VERIFY(threshold, H5F_FREE_SPACE_THRESHOLD_DEF, "H5Pget_file_space_strategy");
+#endif
+ /* Open the dataset */
+ did = H5Dopen2(fid, "/DSET_EA", H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen");
+
+ for (i = 0; i < 24; i++)
+ wbuf[i] = (int)j + 1;
+
+ /* Write to the dataset */
+ ret = H5Dwrite(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close the dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+ break;
+
+ case 1:
+ VERIFY(strategy, H5F_FSPACE_STRATEGY_FSM_AGGR, "H5Pget_file_space_strategy");
+ VERIFY(persist, TRUE, "H5Pget_file_space_strategy");
+#if 0
+ VERIFY(threshold, H5F_FREE_SPACE_THRESHOLD_DEF, "H5Pget_file_space_strategy");
+#endif
+
+ /* Open the dataset */
+ did = H5Dopen2(fid, "/DSET_NDATA_BT2", H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen");
+
+ for (i = 0; i < 24; i++)
+ wbuf[i] = (int)j + 1;
+
+ /* Write to the dataset */
+ ret = H5Dwrite(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close the dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+ break;
+
+ case 2:
+ VERIFY(strategy, H5F_FSPACE_STRATEGY_FSM_AGGR, "H5Pget_file_space_strategy");
+#if 0
+ VERIFY(persist, H5F_FREE_SPACE_PERSIST_DEF, "H5Pget_file_space_strategy");
+#endif
+ VERIFY(threshold, 2, "H5Pget_file_space_strategy");
+
+ /* Open the dataset */
+ did = H5Dopen2(fid, "/DSET_NONE", H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen");
+
+ for (i = 0; i < 24; i++)
+ wbuf[i] = (int)j + 1;
+
+ /* Write to the dataset */
+ ret = H5Dwrite(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close the dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+ break;
+
+ case 3:
+ VERIFY(strategy, H5F_FSPACE_STRATEGY_NONE, "H5Pget_file_space_strategy");
+#if 0
+ VERIFY(persist, H5F_FREE_SPACE_PERSIST_DEF, "H5Pget_file_space_strategy");
+ VERIFY(threshold, H5F_FREE_SPACE_THRESHOLD_DEF, "H5Pget_file_space_strategy");
+#endif
+ /* Open the dataset */
+ did = H5Dopen2(fid, "/GROUP/DSET_NDATA_EA", H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen");
+
+ for (i = 0; i < 24; i++)
+ wbuf[i] = (int)j + 1;
+
+ /* Write to the dataset */
+ ret = H5Dwrite(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close the dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+ break;
+
+ case 4:
+ VERIFY(strategy, H5F_FSPACE_STRATEGY_AGGR, "H5Pget_file_space_strategy");
+#if 0
+ VERIFY(persist, H5F_FREE_SPACE_PERSIST_DEF, "H5Pget_file_space_strategy");
+ VERIFY(threshold, H5F_FREE_SPACE_THRESHOLD_DEF, "H5Pget_file_space_strategy");
+#endif
+ /* Open the dataset */
+ did = H5Dopen2(fid, "/GROUP/DSET_NDATA_FA", H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen");
+
+ for (i = 0; i < 24; i++)
+ wbuf[i] = (int)j + 1;
+
+ /* Write to the dataset */
+ ret = H5Dwrite(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close the dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+ break;
+ case 5:
+ VERIFY(strategy, H5F_FSPACE_STRATEGY_FSM_AGGR, "H5Pget_file_space_strategy");
+#if 0
+ VERIFY(persist, H5F_FREE_SPACE_PERSIST_DEF, "H5Pget_file_space_strategy");
+ VERIFY(threshold, H5F_FREE_SPACE_THRESHOLD_DEF, "H5Pget_file_space_strategy");
+#endif
+ /* Open the dataset */
+ did = H5Dopen2(fid, "/GROUP/DSET_NDATA_NONE", H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen");
+
+ for (i = 0; i < 24; i++)
+ wbuf[i] = (int)j + 1;
+
+ /* Write to the dataset */
+ ret = H5Dwrite(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close the dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+ break;
+
+ default:
+ break;
+ }
+
+ /* Close the plist */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-Open the file */
+ fid = H5Fopen(FILE5, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ switch (j) {
+ case 0:
+ /* Open and read the dataset */
+ did = H5Dopen2(fid, "/DSET_EA", H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen");
+
+ ret = H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &rdbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Verify the data read is correct */
+ for (i = 0; i < 24; i++)
+ VERIFY(rdbuf[i], j + 1, "test_compatible");
+
+ /* Close the dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+ break;
+
+ case 1:
+ /* Open and read the dataset */
+ did = H5Dopen2(fid, "/DSET_NDATA_BT2", H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen");
+
+ ret = H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &rdbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Verify the data read is correct */
+ for (i = 0; i < 24; i++)
+ VERIFY(rdbuf[i], j + 1, "test_compatible");
+
+ /* Close the dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+ break;
+
+ case 2:
+ /* Open and read the dataset */
+ did = H5Dopen2(fid, "/DSET_NONE", H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen");
+
+ ret = H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &rdbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Verify the data read is correct */
+ for (i = 0; i < 24; i++)
+ VERIFY(rdbuf[i], j + 1, "test_compatible");
+
+ /* Close the dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+ break;
+
+ case 3:
+ /* Open and read the dataset */
+ did = H5Dopen2(fid, "/GROUP/DSET_NDATA_EA", H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen");
+
+ ret = H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &rdbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Verify the data read is correct */
+ for (i = 0; i < 24; i++)
+ VERIFY(rdbuf[i], j + 1, "test_compatible");
+
+ /* Close the dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+ break;
+
+ case 4:
+
+ /* Open and read the dataset */
+ did = H5Dopen2(fid, "/GROUP/DSET_NDATA_FA", H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen");
+
+ ret = H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &rdbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Verify the data read is correct */
+ for (i = 0; i < 24; i++)
+ VERIFY(rdbuf[i], j + 1, "test_compatible");
+
+ /* Close the dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+ break;
+
+ case 5:
+
+ /* Open and read the dataset */
+ did = H5Dopen2(fid, "/GROUP/DSET_NDATA_NONE", H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen");
+
+ ret = H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &rdbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Verify the data read is correct */
+ for (i = 0; i < 24; i++)
+ VERIFY(rdbuf[i], j + 1, "test_compatible");
+
+ /* Close the dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+ break;
+
+ default:
+ break;
+ }
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ } /* end for */
+
+} /* test_filespace_1_10_0_compatible */
+#endif
+
+/****************************************************************
+**
+** test_filespace_round_compatible():
+** Verify that the trunk can open, read and modify these files--
+** 1) They are initially created (via gen_filespace.c) in the trunk
+** with combinations of file space strategies, default/non-default
+** threshold, and file spacing paging enabled/disabled.
+** The library creates the file space info message with
+** "mark if unknown" in these files.
+** 2) They are copied to the 1.8 branch, and are opened/read/modified
+** there via test_filespace_compatible() in test/tfile.c.
+** The 1.8 library marks the file space info message as "unknown"
+** in these files.
+** 3) They are then copied back from the 1.8 branch to the trunk for
+** compatibility testing via this routine.
+** 4) Upon encountering the file space info message which is marked
+** as "unknown", the library will use the default file space management
+** from then on: non-persistent free-space managers, default threshold,
+** and non-paging file space.
+**
+****************************************************************/
+#if 0
+static void
+test_filespace_round_compatible(void)
+{
+ hid_t fid = -1; /* File id */
+ hid_t fcpl = -1; /* File creation property list ID */
+ unsigned j; /* Local index variable */
+ H5F_fspace_strategy_t strategy; /* File space strategy */
+ hbool_t persist; /* Persist free-space or not */
+ hsize_t threshold; /* Free-space section threshold */
+ hssize_t free_space; /* Amount of free space in the file */
+ int status; /* Status from copying the existing file */
+ herr_t ret; /* Return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("File space compatibility testing for files from trunk to 1_8 to trunk\n"));
+
+ for (j = 0; j < NELMTS(FSPACE_FILENAMES); j++) {
+ /* Make a copy of the test file */
+ status = h5_make_local_copy(FSPACE_FILENAMES[j], FILE5);
+ CHECK(status, FAIL, "h5_make_local_copy");
+
+ /* Open the temporary test file */
+ fid = H5Fopen(FILE5, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Get the file's creation property list */
+ fcpl = H5Fget_create_plist(fid);
+ CHECK(fcpl, FAIL, "H5Fget_create_plist");
+
+ ret = H5Pget_file_space_strategy(fcpl, &strategy, &persist, &threshold);
+ CHECK(ret, FAIL, "H5Pget_file_space_strategy");
+ VERIFY(strategy, H5F_FSPACE_STRATEGY_FSM_AGGR, "H5Pget_file_space_strategy");
+ VERIFY(persist, FALSE, "H5Pget_file_space_strategy");
+ VERIFY(threshold, 1, "H5Pget_file_space_strategy");
+
+ /* There should not be any free space in the file */
+ free_space = H5Fget_freespace(fid);
+ CHECK(free_space, FAIL, "H5Fget_freespace");
+ VERIFY(free_space, (hssize_t)0, "H5Fget_freespace");
+
+ /* Closing */
+ ret = H5Fclose(fid);
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Fclose");
+ } /* end for */
+
+} /* test_filespace_round_compatible */
+
+/****************************************************************
+**
+** test_libver_bounds_real():
+** Verify that a file created and modified with the
+** specified libver bounds has the specified object header
+** versions for the right objects.
+**
+****************************************************************/
+static void
+test_libver_bounds_real(H5F_libver_t libver_create, unsigned oh_vers_create, H5F_libver_t libver_mod,
+ unsigned oh_vers_mod)
+{
+ hid_t file, group; /* Handles */
+ hid_t fapl; /* File access property list */
+ H5O_native_info_t ninfo; /* Object info */
+ herr_t ret; /* Return value */
+
+ /*
+ * Create a new file using the creation properties.
+ */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+
+ ret = H5Pset_libver_bounds(fapl, libver_create, H5F_LIBVER_LATEST);
+ CHECK(ret, FAIL, "H5Pset_libver_bounds");
+
+ file = H5Fcreate("tfile5.h5", H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(file, FAIL, "H5Fcreate");
+
+ /*
+ * Make sure the root group has the correct object header version
+ */
+ ret = H5Oget_native_info_by_name(file, "/", &ninfo, H5O_NATIVE_INFO_HDR, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_native_info_by_name");
+ VERIFY(ninfo.hdr.version, oh_vers_create, "H5Oget_native_info_by_name");
+
+ /*
+ * Reopen the file and make sure the root group still has the correct version
+ */
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ ret = H5Pset_libver_bounds(fapl, libver_mod, H5F_LIBVER_LATEST);
+ CHECK(ret, FAIL, "H5Pset_libver_bounds");
+
+ file = H5Fopen("tfile5.h5", H5F_ACC_RDWR, fapl);
+ CHECK(file, FAIL, "H5Fopen");
+
+ ret = H5Oget_native_info_by_name(file, "/", &ninfo, H5O_NATIVE_INFO_HDR, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_native_info_by_name");
+ VERIFY(ninfo.hdr.version, oh_vers_create, "H5Oget_native_info_by_name");
+
+ /*
+ * Create a group named "G1" in the file, and make sure it has the correct
+ * object header version
+ */
+ group = H5Gcreate2(file, "/G1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(group, FAIL, "H5Gcreate");
+
+ //! [H5Oget_native_info_snip]
+
+ ret = H5Oget_native_info(group, &ninfo, H5O_NATIVE_INFO_HDR);
+
+ //! [H5Oget_native_info_snip]
+
+ CHECK(ret, FAIL, "H5Oget_native)info");
+ VERIFY(ninfo.hdr.version, oh_vers_mod, "H5Oget_native_info");
+
+ ret = H5Gclose(group);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /*
+ * Create a group named "/G1/G3" in the file, and make sure it has the
+ * correct object header version
+ */
+ group = H5Gcreate2(file, "/G1/G3", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(group, FAIL, "H5Gcreate");
+
+ ret = H5Oget_native_info(group, &ninfo, H5O_NATIVE_INFO_HDR);
+ CHECK(ret, FAIL, "H5Oget_native_info");
+ VERIFY(ninfo.hdr.version, oh_vers_mod, "H5Oget_native_info");
+
+ ret = H5Gclose(group);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ //! [H5Oget_native_info_by_name_snip]
+
+ /*
+ * Make sure the root group still has the correct object header version
+ */
+ ret = H5Oget_native_info_by_name(file, "/", &ninfo, H5O_NATIVE_INFO_HDR, H5P_DEFAULT);
+
+ //! [H5Oget_native_info_by_name_snip]
+
+ CHECK(ret, FAIL, "H5Oget_native_info_by_name");
+ VERIFY(ninfo.hdr.version, oh_vers_create, "H5Oget_native_info_by_name");
+
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+} /* end test_libver_bounds_real() */
+#endif
+
+/*-------------------------------------------------------------------------
+ * Function: test_libver_bounds_open
+ *
+ * Purpose: Tests opening latest file with various low/high bounds.
+ *
+ * Return: Success: 0
+ * Failure: number of errors
+ *
+ *-------------------------------------------------------------------------
+ */
+#if 0
+#define VERBFNAME "tverbounds_dspace.h5"
+#define VERBDSNAME "dataset 1"
+#define SPACE1_DIM1 3
+static void
+test_libver_bounds_open(void)
+{
+ hid_t file = -1; /* File ID */
+ hid_t space = -1; /* Dataspace ID */
+ hid_t dset = -1; /* Dataset ID */
+ hid_t fapl = -1; /* File access property list ID */
+ hid_t new_fapl = -1; /* File access property list ID for reopened file */
+ hid_t dcpl = -1; /* Dataset creation property list ID */
+ hsize_t dim[1] = {SPACE1_DIM1}; /* Dataset dimensions */
+ H5F_libver_t low, high; /* File format bounds */
+ hsize_t chunk_dim[1] = {SPACE1_DIM1}; /* Chunk dimensions */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Opening File in Various Version Bounds\n"));
+
+ /* Create a file access property list */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+
+ /* Create dataspace */
+ space = H5Screate_simple(1, dim, NULL);
+ CHECK(space, FAIL, "H5Screate_simple");
+
+ /* Create a dataset creation property list */
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+
+ /* Create and set chunk plist */
+ ret = H5Pset_chunk(dcpl, 1, chunk_dim);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+ ret = H5Pset_deflate(dcpl, 9);
+ CHECK(ret, FAIL, "H5Pset_deflate");
+ ret = H5Pset_chunk_opts(dcpl, H5D_CHUNK_DONT_FILTER_PARTIAL_CHUNKS);
+ CHECK(ret, FAIL, "H5Pset_chunk_opts");
+
+ /* Create a file with (LATEST, LATEST) bounds, create a layout version 4
+ dataset, then close the file */
+
+ /* Set version bounds to (LATEST, LATEST) */
+ low = H5F_LIBVER_LATEST;
+ high = H5F_LIBVER_LATEST;
+ ret = H5Pset_libver_bounds(fapl, low, high);
+ CHECK(ret, FAIL, "H5Pset_libver_bounds");
+
+ /* Create the file */
+ file = H5Fcreate(VERBFNAME, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(file, FAIL, "H5Fcreate");
+
+ /* Create dataset */
+ dset = H5Dcreate2(file, VERBDSNAME, H5T_NATIVE_INT, space, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset, FAIL, "H5Dcreate2");
+
+ /* Close dataset and file */
+ ret = H5Dclose(dset);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Attempt to open latest file with (earliest, v18), should fail */
+ ret = H5Pset_libver_bounds(fapl, H5F_LIBVER_EARLIEST, H5F_LIBVER_V18);
+ H5E_BEGIN_TRY
+ {
+ file = H5Fopen(VERBFNAME, H5F_ACC_RDONLY, fapl);
+ }
+ H5E_END_TRY;
+ VERIFY(file, FAIL, "Attempted to open latest file with earliest version");
+
+ /* Attempt to open latest file with (v18, v18), should fail */
+ ret = H5Pset_libver_bounds(fapl, H5F_LIBVER_V18, H5F_LIBVER_V18);
+ H5E_BEGIN_TRY
+ {
+ file = H5Fopen(VERBFNAME, H5F_ACC_RDONLY, fapl);
+ }
+ H5E_END_TRY;
+ VERIFY(file, FAIL, "Attempted to open latest file with v18 bounds");
+
+ /* Opening VERBFNAME in these combination should succeed.
+ For each low bound, verify that it is upgraded properly */
+ high = H5F_LIBVER_LATEST;
+ for (low = H5F_LIBVER_EARLIEST; low < H5F_LIBVER_NBOUNDS; low++) {
+ H5F_libver_t new_low = H5F_LIBVER_EARLIEST;
+
+ /* Set version bounds for opening file */
+ ret = H5Pset_libver_bounds(fapl, low, high);
+ CHECK(ret, FAIL, "H5Pset_libver_bounds");
+
+ /* Open the file */
+ file = H5Fopen(VERBFNAME, H5F_ACC_RDONLY, fapl);
+ CHECK(file, FAIL, "H5Fopen");
+
+ /* Get the new file access property */
+ new_fapl = H5Fget_access_plist(file);
+ CHECK(new_fapl, FAIL, "H5Fget_access_plist");
+
+ /* Get new low bound and verify that it has been upgraded properly */
+ ret = H5Pget_libver_bounds(new_fapl, &new_low, NULL);
+ CHECK(ret, FAIL, "H5Pget_libver_bounds");
+ VERIFY(new_low >= H5F_LIBVER_V110, TRUE, "Low bound should be upgraded to at least H5F_LIBVER_V110");
+
+ ret = H5Pclose(new_fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+ } /* for low */
+
+ /* Close dataspace and property lists */
+ ret = H5Sclose(space);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+} /* end test_libver_bounds_open() */
+#endif
+
+/*-------------------------------------------------------------------------
+ * Function: test_libver_bounds_copy
+ *
+ * Purpose: Test to verify HDFFV-10800 is fixed:
+ * This test is copied from the user test program: copy10.c.
+ * (See attached programs in the jira issue.)
+ *
+ * The source file used in the test is generated by the user test
+ * program "fill18.c" with the 1.8 library. The file is created
+ * with the latest format and the dataset created in the file
+ * has version 3 fill value message (latest).
+ *
+ * The test creates the destination file with (v18, v18) version bounds.
+ * H5Ocopy() should succeed in copying the dataset in the source file
+ * to the destination file.
+ *
+ * Return: Success: 0
+ * Failure: number of errors
+ *
+ *-------------------------------------------------------------------------
+ */
+#if 0
+static void
+test_libver_bounds_copy(void)
+{
+ hid_t src_fid = -1; /* File ID */
+ hid_t dst_fid = -1; /* File ID */
+ hid_t fapl = -1; /* File access property list ID */
+ const char *src_fname; /* Source file name */
+ herr_t ret; /* Generic return value */
+ hbool_t driver_is_default_compatible;
+
+ /* Output message about the test being performed */
+ MESSAGE(5, ("Testing H5Ocopy a dataset in a 1.8 library file to a 1.10 library file\n"));
+
+ ret = h5_driver_is_default_vfd_compatible(H5P_DEFAULT, &driver_is_default_compatible);
+ CHECK_I(ret, "h5_driver_is_default_vfd_compatible");
+
+ if (!driver_is_default_compatible) {
+ HDprintf("-- SKIPPED --\n");
+ return;
+ }
+
+ /* Get the test file name */
+ src_fname = H5_get_srcdir_filename(SRC_FILE);
+
+ /* Open the source test file */
+ src_fid = H5Fopen(src_fname, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(src_fid, FAIL, "H5Fopen");
+
+ /* Create file access property list */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+
+ /* Set library version bounds to (v18, v18) */
+ ret = H5Pset_libver_bounds(fapl, H5F_LIBVER_V18, H5F_LIBVER_V18);
+ CHECK(ret, FAIL, "H5Pset_libver_bounds");
+
+ /* Create the destination file with the fapl */
+ dst_fid = H5Fcreate(DST_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(dst_fid, FAIL, "H5Pcreate");
+
+ /* Close the fapl */
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Copy the dataset in the source file to the destination file */
+ ret = H5Ocopy(src_fid, DSET_DS1, dst_fid, DSET_DS1, H5P_DEFAULT, H5P_DEFAULT);
+ VERIFY(ret, SUCCEED, "H5Ocopy");
+
+ /* Close the source file */
+ ret = H5Fclose(src_fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Close the destination file */
+ ret = H5Fclose(dst_fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Remove the destination file */
+ H5Fdelete(DST_FILE, H5P_DEFAULT);
+
+} /* end test_libver_bounds_copy() */
+#endif
+
+/****************************************************************
+**
+** test_libver_bounds():
+** Verify that a file created and modified with various
+** libver bounds is handled correctly. (Further testing
+** welcome)
+**
+****************************************************************/
+#if 0
+static void
+test_libver_bounds(void)
+{
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing setting library version bounds\n"));
+
+ /* Run the tests */
+ test_libver_bounds_real(H5F_LIBVER_EARLIEST, 1, H5F_LIBVER_LATEST, 2);
+ test_libver_bounds_real(H5F_LIBVER_LATEST, 2, H5F_LIBVER_EARLIEST, 2);
+ test_libver_bounds_open();
+#if 0
+ test_libver_bounds_copy();
+#endif
+} /* end test_libver_bounds() */
+#endif
+
+/**************************************************************************************
+**
+** test_libver_bounds_low_high():
+** Tests to verify that format versions are correct with the following five
+** pairs of low/high version bounds set in fapl via H5Pset_libver_bounds():
+** (1) (earliest, v18)
+** (2) (earliest, v110)
+** (3) (v18, v18)
+** (4) (v18, v110)
+** (5) (v110, v110)
+**
+** For each pair of setting in fapl, verify format versions with the following
+** six tests:
+** (1) test_libver_bounds_super(fapl): superblock versions
+** (2) test_libver_bounds_obj(fapl): object header versions
+** (3) test_libver_bounds_dataset(fapl): message versions associated with dataset
+** (4) test_libver_bounds_dataspace(fapl): dataspace message versions
+** (5) test_libver_bounds_datatype(fapl): datatype message versions
+** (6) test_libver_bounds_attributes(fapl): attribute message versions
+**
+**************************************************************************************/
+#if 0
+static void
+test_libver_bounds_low_high(const char *env_h5_drvr)
+{
+ hid_t fapl = H5I_INVALID_HID; /* File access property list */
+ H5F_libver_t low, high; /* Low and high bounds */
+ herr_t ret; /* The return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing setting (low, high) format version bounds\n"));
+
+ /* Create a file access property list */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, H5I_INVALID_HID, "H5Pcreate");
+
+ /* Loop through all the combinations of low/high version bounds */
+ for (low = H5F_LIBVER_EARLIEST; low < H5F_LIBVER_NBOUNDS; low++)
+ for (high = H5F_LIBVER_EARLIEST; high < H5F_LIBVER_NBOUNDS; high++) {
+
+ H5E_BEGIN_TRY
+ {
+ /* Set the low/high version bounds */
+ ret = H5Pset_libver_bounds(fapl, low, high);
+ }
+ H5E_END_TRY;
+
+ /* Should fail: invalid combinations */
+ if (high == H5F_LIBVER_EARLIEST) {
+ VERIFY(ret, FAIL, "H5Pset_libver_bounds");
+ continue;
+ }
+
+ /* Should fail: invalid combinations */
+ if (high < low) {
+ VERIFY(ret, FAIL, "H5Pset_libver_bounds");
+ continue;
+ }
+
+ /* All other combinations are valid and should succeed */
+ VERIFY(ret, SUCCEED, "H5Pset_libver_bounds");
+
+ /* Tests to verify version bounds */
+ test_libver_bounds_super(fapl, env_h5_drvr);
+ test_libver_bounds_obj(fapl);
+ test_libver_bounds_dataset(fapl);
+ test_libver_bounds_dataspace(fapl);
+ test_libver_bounds_datatype(fapl);
+ test_libver_bounds_attributes(fapl);
+ }
+
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+} /* end test_libver_bounds_low_high() */
+#endif
+
+/***********************************************************************
+**
+** test_libver_bounds_super():
+** Verify superblock version with the following two tests:
+** (1) test_libver_bounds_super_create():
+** --when creating a file with the input fapl and the fcpl
+** that has the following feature enabled:
+** (A) default fcpl
+** (B) fcpl with v1-btee K value enabled
+** (C) fcpl with shared messages enabled
+** (D) fcpl with persistent free-space manager enabled
+**
+** (2) test_libver_bounds_super_open():
+** --when opening a file which is created with the input fapl
+** and the fcpl setting as #A to #D above.
+**
+** These two tests are run with or without SWMR file access.
+**
+*************************************************************************/
+#if 0
+static void
+test_libver_bounds_super(hid_t fapl, const char *env_h5_drvr)
+{
+ hid_t fcpl = H5I_INVALID_HID; /* File creation property list */
+ herr_t ret; /* The return value */
+
+ /* Create a default fcpl: #A */
+ /* This will result in superblock version 0 */
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, H5I_INVALID_HID, "H5Pcreate");
+
+ /* Verify superblock version when creating a file with input fapl,
+ fcpl #A and with/without SWMR access */
+ if (H5FD__supports_swmr_test(env_h5_drvr))
+ test_libver_bounds_super_create(fapl, fcpl, TRUE, FALSE);
+ test_libver_bounds_super_create(fapl, fcpl, FALSE, FALSE);
+
+ /* Verify superblock version when opening a file which is created
+ with input fapl, fcpl #A and with/without SWMR access */
+ if (H5FD__supports_swmr_test(env_h5_drvr))
+ test_libver_bounds_super_open(fapl, fcpl, TRUE, FALSE);
+ test_libver_bounds_super_open(fapl, fcpl, FALSE, FALSE);
+
+ /* Close the fcpl */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Create a fcpl with v1-btree K value enabled: #B */
+ /* This will result in superblock version 1 */
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, H5I_INVALID_HID, "H5Pcreate");
+ ret = H5Pset_istore_k(fcpl, 64);
+ CHECK(ret, FAIL, "H5Pset_istore_k");
+
+ /* Verify superblock version when creating a file with input fapl,
+ fcpl #B and with/without SWMR access */
+ if (H5FD__supports_swmr_test(env_h5_drvr))
+ test_libver_bounds_super_create(fapl, fcpl, TRUE, FALSE);
+ test_libver_bounds_super_create(fapl, fcpl, FALSE, FALSE);
+
+ /* Verify superblock version when opening a file which is created
+ with input fapl, fcpl #B and with/without SWMR access */
+ if (H5FD__supports_swmr_test(env_h5_drvr))
+ test_libver_bounds_super_open(fapl, fcpl, TRUE, FALSE);
+ test_libver_bounds_super_open(fapl, fcpl, FALSE, FALSE);
+
+ /* Close the fcpl */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Create a fcpl with shared messages enabled: #C */
+ /* This will result in superblock version 2 */
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, H5I_INVALID_HID, "H5Pcreate");
+ ret = H5Pset_shared_mesg_nindexes(fcpl, 1);
+ CHECK(ret, FAIL, "H5Pset_shared_mesg_nindexes");
+ ret = H5Pset_shared_mesg_index(fcpl, 0, H5O_SHMESG_ATTR_FLAG, 2);
+ CHECK(ret, FAIL, "H5Pset_shared_mesg_index");
+
+ /* Verify superblock version when creating a file with input fapl,
+ fcpl #C and with/without SWMR access */
+ if (H5FD__supports_swmr_test(env_h5_drvr))
+ test_libver_bounds_super_create(fapl, fcpl, TRUE, FALSE);
+ test_libver_bounds_super_create(fapl, fcpl, FALSE, FALSE);
+
+ /* Verify superblock version when opening a file which is created
+ with input fapl, fcpl #C and with/without SWMR access */
+ if (H5FD__supports_swmr_test(env_h5_drvr))
+ test_libver_bounds_super_open(fapl, fcpl, TRUE, FALSE);
+ test_libver_bounds_super_open(fapl, fcpl, FALSE, FALSE);
+
+ /* Close the fcpl */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ if (h5_using_default_driver(env_h5_drvr)) {
+ /* Create a fcpl with persistent free-space manager enabled: #D */
+ /* This will result in superblock version 2 */
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, H5I_INVALID_HID, "H5Pcreate");
+ ret = H5Pset_file_space_strategy(fcpl, H5F_FSPACE_STRATEGY_FSM_AGGR, 1, (hsize_t)1);
+ CHECK(ret, FAIL, "H5Pset_file_space");
+
+ /* Verify superblock version when creating a file with input fapl,
+ fcpl #D and with/without SWMR access */
+ if (H5FD__supports_swmr_test(env_h5_drvr))
+ test_libver_bounds_super_create(fapl, fcpl, TRUE, TRUE);
+ test_libver_bounds_super_create(fapl, fcpl, FALSE, TRUE);
+
+ /* Verify superblock version when opening a file which is created
+ with input fapl, fcpl #D and with/without SWMR access */
+ if (H5FD__supports_swmr_test(env_h5_drvr))
+ test_libver_bounds_super_open(fapl, fcpl, TRUE, TRUE);
+ test_libver_bounds_super_open(fapl, fcpl, FALSE, TRUE);
+
+ /* Close the fcpl */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ }
+
+} /* end test_libver_bounds_super() */
+
+/**************************************************************************************************
+**
+** test_libver_bounds_super_create():
+** Verify the following when the file is created with the input fapl, fcpl,
+** and with/without SWMR access:
+** (a) the superblock version #
+** (b) the file's low bound setting
+** (c) fail or succeed in creating the file
+**
+** For file creation, the bounds setting in fapl, the feature enabled in fcpl,
+** and with/without SWMR file access will determine the results for #a to #c.
+**
+** The first row for the following two tables is the 5 pairs of low/high bounds setting
+** in the input fapl. The next three rows list the expected results for #a to #c.
+** "-->" indicates "upgrade to"
+**
+** The last table lists the expected results in creating the file when non-default
+** free-space info (fsinfo) is enabled in fcpl.
+**
+** Creating a file with write access
+** --------------------------------------------------------------------------------
+** | (earliest, v18) | (earliest, v110) | (v18, v18) | (v18, v110) | (v110, v110) |
+** |______________________________________________________________________________|
+** Superblock version | vers 0, 1, 2 | vers 0, 1, 2 | vers 2 | vers 2 | vers 3 |
+** |------------------------------------------------------------------------------|
+** File's low bound | no change |
+** |------------------------------------------------------------------------------|
+** File creation | succeed |
+** |______________________________________________________________________________|
+**
+** Creating a file with SWMR-write access
+** --------------------------------------------------------------------------------
+** | (earliest, v18) | (earliest, v110) | (v18, v18) | (v18, v110) | (v110, v110) |
+** |______________________________________________________________________________|
+** Superblock version | -- | vers 3 | -- | vers 3 | vers 3 |
+** |------------------------------------------------------------------------------|
+** File's low bound | -- | ->v110 | -- | ->v110 | no change |
+** |------------------------------------------------------------------------------|
+** File creation | fail | succeed | fail | succeed | succeed |
+** |______________________________________________________________________________|
+**
+** Creating a file with write/SWMR-write access + non-default fsinfo
+** --------------------------------------------------------------------------------
+** | (earliest, v18) | (earliest, v110) | (v18, v18) | (v18, v110) | (v110, v110) |
+** |______________________________________________________________________________|
+** File creation | fail | succeed | fail | succeed | succeed |
+** |______________________________________________________________________________|
+**
+******************************************************************************************************/
+static void
+test_libver_bounds_super_create(hid_t fapl, hid_t fcpl, htri_t is_swmr, htri_t non_def_fsm)
+{
+ hid_t fid = H5I_INVALID_HID; /* File ID */
+#if 0
+ H5F_t *f = NULL; /* Internal file pointer */
+#endif
+ H5F_libver_t low, high; /* Low and high bounds */
+#if 0
+ hbool_t ok; /* The result is ok or not */
+#endif
+ herr_t ret; /* The return value */
+
+ /* Try to create the file */
+ H5E_BEGIN_TRY
+ {
+ fid = H5Fcreate(FILE8, H5F_ACC_TRUNC | (is_swmr ? H5F_ACC_SWMR_WRITE : 0), fcpl, fapl);
+ }
+ H5E_END_TRY;
+
+#if 0
+ /* Get the internal file pointer if the create succeeds */
+ if (fid >= 0) {
+ f = (H5F_t *)H5VL_object(fid);
+ CHECK_PTR(f, "H5VL_object");
+ }
+#endif
+ /* Retrieve the low/high bounds */
+ ret = H5Pget_libver_bounds(fapl, &low, &high);
+ CHECK(ret, FAIL, "H5Pget_libver_bounds");
+
+ if (non_def_fsm && high < H5F_LIBVER_V110)
+ VERIFY(fid, H5I_INVALID_HID, "H5Fcreate");
+
+ else if (is_swmr) { /* SWMR is enabled */
+ if (high >= H5F_LIBVER_V110) { /* Should succeed */
+ VERIFY(fid >= 0, TRUE, "H5Fcreate");
+#if 0
+ VERIFY(HDF5_SUPERBLOCK_VERSION_3, f->shared->sblock->super_vers, "HDF5_superblock_ver_bounds");
+ VERIFY(f->shared->low_bound >= H5F_LIBVER_V110, TRUE, "HDF5_superblock_ver_bounds");
+#endif
+ }
+ else /* Should fail */
+ VERIFY(fid >= 0, FALSE, "H5Fcreate");
+ }
+ else { /* Should succeed */
+ VERIFY(fid >= 0, TRUE, "H5Fcreate");
+#if 0
+ VERIFY(low, f->shared->low_bound, "HDF5_superblock_ver_bounds");
+
+ switch (low) {
+ case H5F_LIBVER_EARLIEST:
+ ok = (f->shared->sblock->super_vers == HDF5_SUPERBLOCK_VERSION_DEF ||
+ f->shared->sblock->super_vers == HDF5_SUPERBLOCK_VERSION_1 ||
+ f->shared->sblock->super_vers == HDF5_SUPERBLOCK_VERSION_2);
+ VERIFY(ok, TRUE, "HDF5_superblock_ver_bounds");
+ break;
+
+ case H5F_LIBVER_V18:
+ ok = (f->shared->sblock->super_vers == HDF5_SUPERBLOCK_VERSION_2);
+ VERIFY(ok, TRUE, "HDF5_superblock_ver_bounds");
+ break;
+
+ case H5F_LIBVER_V110:
+ case H5F_LIBVER_V112:
+ case H5F_LIBVER_V114:
+ case H5F_LIBVER_V116:
+ ok = (f->shared->sblock->super_vers == HDF5_SUPERBLOCK_VERSION_3);
+ VERIFY(ok, TRUE, "HDF5_superblock_ver_bounds");
+ break;
+
+ case H5F_LIBVER_ERROR:
+ case H5F_LIBVER_NBOUNDS:
+ default:
+ ERROR("H5Pget_libver_bounds");
+
+ } /* end switch */
+#endif
+ } /* end else */
+
+ if (fid >= 0) { /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ }
+
+} /* end test_libver_bounds_super_create() */
+
+/**************************************************************************************************
+**
+** test_libver_bounds_super_open():
+** Verify the following when opening a file which is created with the input fapl, fcpl,
+** and with/without SWMR access:
+** (a) the file's low bound setting
+** (b) fail or succeed in opening the file
+**
+** (1) Create a file with the input fapl, fcpl and with/without SWMR access
+** (2) Close the file
+** (3) Reopen the file with a new fapl that is set to the 5 pairs of low/high bounds
+** in a for loop. For each pair of setting in the new fapl:
+** --Verify the expected results for #a and #b above.
+** --Close the file.
+**
+** For file open, the file's superblock version, the low/high bounds setting in fapl,
+** and with/without SWMR file access will determine the results for #a and #b.
+**
+** The first row for the following tables (#A - #B) is the 5 pairs of low/high bounds setting
+** in the input fapl. The next two rows list the expected results for #a and #b.
+** "-->" indicates "upgrade to"
+**
+** The last table (#C) lists the expected results in opening the file when non-default
+** free-space info (fsinfo) is enabled in fcpl.
+**
+** (A) Opening a file with write access
+**
+** Superblock version 0, 1
+** --------------------------------------------------------------------------------
+** | (earliest, v18) | (earliest, v110) | (v18, v18) | (v18, v110) | (v110, v110) |
+** |______________________________________________________________________________|
+** File's low bound | no change |
+** |------------------------------------------------------------------------------|
+** File open | succeed |
+** |______________________________________________________________________________|
+**
+**
+** Superblock version 2
+** --------------------------------------------------------------------------------
+** | (earliest, v18) | (earliest, v110) | (v18, v18) | (v18, v110) | (v110, v110) |
+** |______________________________________________________________________________|
+** File's low bound | -->v18 | no change |
+** |------------------------------------------------------------------------------|
+** File open | succeed |
+** |______________________________________________________________________________|
+**
+** Superblock version 3
+** --------------------------------------------------------------------------------
+** | (earliest, v18) | (earliest, v110) | (v18, v18) | (v18, v110) | (v110, v110) |
+** |______________________________________________________________________________|
+** File's low bound | -- | -->v110 | -- | -->v110 | no change |
+** |------------------------------------------------------------------------------|
+** File open | fail | succeed | fail | succeed | succeed |
+** |______________________________________________________________________________|
+**
+**
+**
+** (B) Opening a file with SWMR-write access
+**
+** Superblock version 0, 1, 2
+** -------------------------------------------------------------------------------
+** | (earliest, v18) | (earliest, v10) | (v18, v18) | (v18, v110) | (v110, v110) |
+** |_____________________________________________________________________________|
+** File's low bound | ----
+** |-----------------------------------------------------------------------------|
+** File open | fail
+** |_____________________________________________________________________________|
+**
+**
+** Superblock version 3
+** -------------------------------------------------------------------------------
+** | (earliest, v18) | (earliest, v10) | (v18, v18) | (v18, v110) | (v110, v110) |
+** |_____________________________________________________________________________|
+** File's low bound | -- | -->v110 | -- | -->v110 | no change |
+** |-----------------------------------------------------------------------------|
+** File open | fail | succeed | fail | succeed | succeed |
+** |_____________________________________________________________________________|
+**
+**
+** (C) Opening a file with write/SWMR-write access + non-default fsinfo
+** -------------------------------------------------------------------------------
+** | (earliest, v18) | (earliest, v10) | (v18, v18) | (v18, v110) | (v110, v110) |
+** |_____________________________________________________________________________|
+** File open | fail | succeed | fail | succeed | succeed |
+** |_____________________________________________________________________________|
+**
+**
+******************************************************************************************************/
+static void
+test_libver_bounds_super_open(hid_t fapl, hid_t fcpl, htri_t is_swmr, htri_t non_def_fsm)
+{
+ hid_t fid = H5I_INVALID_HID; /* File ID */
+#if 0
+ H5F_t *f = NULL; /* Internal file pointer */
+#endif
+ hid_t new_fapl = H5I_INVALID_HID; /* File access property list */
+#if 0
+ unsigned super_vers; /* Superblock version */
+#endif
+ H5F_libver_t low, high; /* Low and high bounds */
+ herr_t ret; /* Return value */
+
+ /* Create the file with the input fcpl and fapl */
+ H5E_BEGIN_TRY
+ {
+ fid = H5Fcreate(FILE8, H5F_ACC_TRUNC, fcpl, fapl);
+ }
+ H5E_END_TRY;
+
+ /* Retrieve the low/high bounds */
+ ret = H5Pget_libver_bounds(fapl, &low, &high);
+ CHECK(ret, FAIL, "H5Pget_libver_bounds");
+
+ if (non_def_fsm && high < H5F_LIBVER_V110) {
+ VERIFY(fid, H5I_INVALID_HID, "H5Fcreate");
+ }
+ else {
+ VERIFY(fid >= 0, TRUE, "H5Fcreate");
+#if 0
+ /* Get the internal file pointer */
+ f = (H5F_t *)H5VL_object(fid);
+ CHECK_PTR(f, "H5VL_object");
+
+ /* The file's superblock version */
+ super_vers = f->shared->sblock->super_vers;
+#endif
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Create a default file access property list */
+ new_fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(new_fapl, FAIL, "H5Pcreate");
+
+ /* Loop through all the combinations of low/high bounds in new_fapl */
+ for (low = H5F_LIBVER_EARLIEST; low < H5F_LIBVER_NBOUNDS; low++) {
+ for (high = H5F_LIBVER_EARLIEST; high < H5F_LIBVER_NBOUNDS; high++) {
+ H5E_BEGIN_TRY
+ {
+ ret = H5Pset_libver_bounds(new_fapl, low, high);
+ }
+ H5E_END_TRY;
+
+ /* Invalid combinations */
+ if (ret < 0)
+ continue;
+
+ /* Open the file with or without SWMR access */
+ H5E_BEGIN_TRY
+ {
+ fid = H5Fopen(FILE8, H5F_ACC_RDWR | (is_swmr ? H5F_ACC_SWMR_WRITE : 0), new_fapl);
+ }
+ H5E_END_TRY;
+
+ if (non_def_fsm && high < H5F_LIBVER_V110) {
+ VERIFY(fid, H5I_INVALID_HID, "H5Fopen");
+ continue;
+ }
+#if 0
+ /* Get the internal file pointer if the open succeeds */
+ if (fid >= 0) {
+ f = (H5F_t *)H5VL_object(fid);
+ CHECK_PTR(f, "H5VL_object");
+ }
+
+ /* Verify the file open succeeds or fails */
+ switch (super_vers) {
+ case 3:
+ if (high >= H5F_LIBVER_V110) {
+ /* Should succeed */
+ VERIFY(fid >= 0, TRUE, "H5Fopen");
+ VERIFY(f->shared->low_bound >= H5F_LIBVER_V110, TRUE,
+ "HDF5_superblock_ver_bounds");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ }
+ else /* Should fail */
+ VERIFY(fid >= 0, FALSE, "H5Fopen");
+ break;
+
+ case 2:
+ if (is_swmr) /* Should fail */
+ VERIFY(fid >= 0, FALSE, "H5Fopen");
+ else { /* Should succeed */
+ VERIFY(fid >= 0, TRUE, "H5Fopen");
+ VERIFY(f->shared->low_bound >= H5F_LIBVER_V18, TRUE,
+ "HDF5_superblock_ver_bounds");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ }
+ break;
+
+ case 1:
+ case 0:
+ if (is_swmr) /* Should fail */
+ VERIFY(fid >= 0, FALSE, "H5Fopen");
+ else { /* Should succeed */
+ VERIFY(fid >= 0, TRUE, "H5Fopen");
+ VERIFY(f->shared->low_bound, low, "HDF5_superblock_ver_bounds");
+
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ }
+ break;
+
+ default:
+ break;
+ } /* end switch */
+#endif
+ } /* end for */
+ } /* end for */
+
+ /* Close the file access property list */
+ ret = H5Pclose(new_fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+ } /* end else */
+
+} /* end test_libver_bounds_super_open() */
+#endif
+
+/****************************************************************
+**
+** test_libver_bounds_obj():
+** Verify object header versions:
+**
+** (a) Create a file with:
+** --the input fapl
+** --a fcpl that has shared message enabled
+** Verify the root group's object header version.
+** Close the file.
+**
+** (b) Create another file with:
+** --the input fapl
+** --a default fcpl
+** Verify the root group's object header version.
+** Close the file.
+**
+** (c) Reopen the same file in (b) with a new fapl.
+** The new fapl is set to the 5 pairs of low/high
+** bounds in a "for" loop. For each setting in fapl:
+** --Create a group in the file
+** --Verify the group's object header version
+** --Close and delete the group
+** --Close the file
+**
+****************************************************************/
+#if 0
+static void
+test_libver_bounds_obj(hid_t fapl)
+{
+ hid_t fid = H5I_INVALID_HID; /* File ID */
+ hid_t gid = H5I_INVALID_HID; /* Group ID */
+ hid_t fcpl = H5I_INVALID_HID; /* File creation property list */
+ hid_t new_fapl = H5I_INVALID_HID; /* File access property list */
+ H5F_t *f = NULL; /* Internal file pointer */
+ H5F_libver_t low, high; /* Low and high bounds */
+ H5O_native_info_t ninfo; /* Object info */
+ H5G_info_t ginfo; /* Group info */
+ herr_t ret; /* Return value */
+
+ /* Retrieve the low/high bounds from the input fapl */
+ ret = H5Pget_libver_bounds(fapl, &low, &high);
+ CHECK(ret, FAIL, "H5Pget_libver_bounds");
+
+ /* Create a default file creation property list */
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, H5I_INVALID_HID, "H5Pcreate");
+
+ /* Enable shared message in the fcpl */
+ /* This will result in a version 2 object header */
+ ret = H5Pset_shared_mesg_nindexes(fcpl, 1);
+ CHECK(ret, FAIL, "H5Pset_shared_mesg_nindexes");
+ ret = H5Pset_shared_mesg_index(fcpl, 0, H5O_SHMESG_ATTR_FLAG, 2);
+ CHECK(ret, FAIL, "H5Pset_shared_mesg_index");
+
+ /* Create the file with the fcpl and the input fapl */
+ fid = H5Fcreate(FILE8, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, H5I_INVALID_HID, "H5Fcreate");
+
+ /* Get root group's object info */
+ ret = H5Oget_native_info_by_name(fid, "/", &ninfo, H5O_NATIVE_INFO_HDR, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_native_info_by_name");
+
+ /* Verify object header version is 2 because shared message is enabled */
+ VERIFY(ninfo.hdr.version, H5O_VERSION_2, "H5O_obj_ver_bounds");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Close the file creation property list */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Create a file with the default fcpl and input fapl */
+ fid = H5Fcreate(FILE8, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(fid, H5I_INVALID_HID, "H5Fcreate");
+
+ /* Get root group's object info */
+ ret = H5Oget_native_info_by_name(fid, "/", &ninfo, H5O_NATIVE_INFO_HDR, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_native_info_by_name");
+
+ /* Verify object header version is as indicated by low_bound */
+ VERIFY(ninfo.hdr.version, H5O_obj_ver_bounds[low], "H5O_obj_ver_bounds");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Create a new default file access property list which
+ is used to open the file in the "for" loop */
+ new_fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(new_fapl, H5I_INVALID_HID, "H5Pcreate");
+
+ /* Loop through all the combinations of low/high bounds in new_fapl */
+ /* Open the file with the fapl; create a group and verify the
+ object header version, then delete the group and close the file.*/
+ for (low = H5F_LIBVER_EARLIEST; low < H5F_LIBVER_NBOUNDS; low++) {
+ for (high = H5F_LIBVER_EARLIEST; high < H5F_LIBVER_NBOUNDS; high++) {
+ H5E_BEGIN_TRY
+ {
+ ret = H5Pset_libver_bounds(new_fapl, low, high);
+ }
+ H5E_END_TRY;
+
+ if (ret < 0) /* Invalid combinations */
+ continue;
+
+ /* Open the file */
+ H5E_BEGIN_TRY
+ {
+ fid = H5Fopen(FILE8, H5F_ACC_RDWR, new_fapl);
+ }
+ H5E_END_TRY;
+
+ if (fid >= 0) { /* The file open succeeds */
+
+ /* Get the internal file pointer */
+ f = (H5F_t *)H5VL_object(fid);
+ CHECK_PTR(f, "H5VL_object");
+
+ /* Create a group in the file */
+ gid = H5Gcreate2(fid, GRP_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gcreate2");
+
+ /* Get group information */
+ ret = H5Gget_info(gid, &ginfo);
+ CHECK(ret, FAIL, "H5Gget_info");
+
+ /* Verify group storage type */
+ if (f->shared->low_bound >= H5F_LIBVER_V18)
+ /* Links in group are stored in object header */
+ VERIFY(ginfo.storage_type, H5G_STORAGE_TYPE_COMPACT, "H5Gget_info");
+ else
+ /* Links in group are stored with a "symbol table" */
+ VERIFY(ginfo.storage_type, H5G_STORAGE_TYPE_SYMBOL_TABLE, "H5Gget_info");
+
+ /* Get object header information */
+ ret = H5Oget_native_info_by_name(gid, GRP_NAME, &ninfo, H5O_NATIVE_INFO_HDR, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_native_info_by_name");
+
+ /* Verify object header version as indicated by low_bound */
+ VERIFY(ninfo.hdr.version, H5O_obj_ver_bounds[f->shared->low_bound], "H5O_obj_ver_bounds");
+
+ /* Close the group */
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Delete the group */
+ ret = H5Ldelete(fid, GRP_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ } /* end if */
+ } /* end for */
+ } /* end for */
+
+ /* Close the file access property list */
+ ret = H5Pclose(new_fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+} /* end test_libver_bounds_obj() */
+
+/****************************************************************
+**
+** test_libver_bounds_dataset():
+** Verify message versions associated with datasets:
+**
+** (a) Create a file with default fcpl and the input fapl.
+** Create the following two datasets:
+** --A contiguous dataset
+** --A chunked dataset with "no filter edge chunks"
+** For both datasets, verify the versions for the layout,
+** fill value and filter pipeline messages.
+** Close the file.
+**
+** (b) Create a new fapl that is set to the 5 pairs of low/high
+** bounds in a "for" loop. For each pair of setting in the
+** new fapl:
+** --Open the same file in (a) with the fapl
+** --Create a chunked dataset with 2 unlimited
+** dimensions
+** --Verify the versions for the layout, fill value
+** and filter pipeline messages
+** --Close and delete the dataset
+** --Close the file
+**
+****************************************************************/
+static void
+test_libver_bounds_dataset(hid_t fapl)
+{
+ hid_t fid = H5I_INVALID_HID; /* File ID */
+ hid_t new_fapl = H5I_INVALID_HID; /* File access property list */
+ hid_t did = H5I_INVALID_HID; /* Dataset ID */
+ hid_t sid = H5I_INVALID_HID; /* Dataspace ID */
+ hid_t dcpl = H5I_INVALID_HID; /* Dataset creation property list */
+ H5D_t *dset = NULL; /* Internal dataset pointer */
+ H5F_t *f = NULL; /* Internal file pointer */
+ H5F_libver_t low, high; /* Low and high bounds */
+ herr_t ret; /* Return value */
+ hsize_t fix_dims2[2] = {10, 4}; /* Dimension sizes */
+ hsize_t fix_chunks2[2] = {4, 3}; /* Chunk dimension sizes */
+ hsize_t dims2[2] = {1, 4}; /* Dimension sizes */
+ hsize_t max_dims2[2] = {H5S_UNLIMITED, H5S_UNLIMITED}; /* Maximum dimension sizes */
+ hsize_t chunks2[2] = {4, 5}; /* Chunk dimension sizes */
+
+ /* Retrieve the low/high bounds from the input fapl */
+ ret = H5Pget_libver_bounds(fapl, &low, &high);
+ CHECK(ret, FAIL, "H5Pget_libver_bounds");
+
+ /* Create the file with the input fapl */
+ fid = H5Fcreate(FILE8, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(fid, H5I_INVALID_HID, "H5Fcreate");
+
+ /* Create the dataspace */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, H5I_INVALID_HID, "H5Screate");
+
+ /* Create a contiguous dataset */
+ did = H5Dcreate2(fid, DSETA, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(did, H5I_INVALID_HID, "H5Dcreate");
+
+ /* Get the internal dataset pointer */
+ dset = (H5D_t *)H5VL_object(did);
+ CHECK_PTR(dset, "H5VL_object");
+
+ /* Verify version for layout and fill value messages */
+ if (low == H5F_LIBVER_EARLIEST) {
+ /* For layout message: the earliest version the library will set is 3 */
+ /* For fill value message: the earliest version the library will set is 2 */
+ VERIFY(dset->shared->layout.version, H5O_LAYOUT_VERSION_DEFAULT, "H5O_layout_ver_bounds");
+ VERIFY(dset->shared->dcpl_cache.fill.version, H5O_FILL_VERSION_2, "H5O_fill_ver_bounds");
+ }
+ else {
+ VERIFY(dset->shared->layout.version, H5O_layout_ver_bounds[low], "H5O_layout_ver_bounds");
+ VERIFY(dset->shared->dcpl_cache.fill.version, H5O_fill_ver_bounds[low], "H5O_fill_ver_bounds");
+ }
+
+ /* Verify filter pipeline message version */
+ VERIFY(dset->shared->dcpl_cache.pline.version, H5O_pline_ver_bounds[low], "H5O_pline_ver_bounds");
+
+ /* Close the dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close the dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Set up dataspace and dcpl for creating a chunked dataset
+ with "no filter edge chunks" enabled.
+ This will result in a version 4 layout message */
+ sid = H5Screate_simple(2, fix_dims2, NULL);
+ CHECK(sid, H5I_INVALID_HID, "H5Screate_simple");
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, H5I_INVALID_HID, "H5Pcreate");
+ ret = H5Pset_chunk(dcpl, 2, fix_chunks2);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+ ret = H5Pset_chunk_opts(dcpl, H5D_CHUNK_DONT_FILTER_PARTIAL_CHUNKS);
+ CHECK(ret, FAIL, "H5Pset_chunk_opts");
+
+ /* Create the chunked dataset */
+ H5E_BEGIN_TRY
+ {
+ did = H5Dcreate2(fid, DSETB, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (did >= 0) {
+
+ /* Get the internal dataset pointer */
+ dset = (H5D_t *)H5VL_object(did);
+ CHECK_PTR(dset, "H5VL_object");
+
+ /* Verify layout message version and chunk indexing type */
+ VERIFY(dset->shared->layout.version, H5O_LAYOUT_VERSION_4, "H5O_layout_ver_bounds");
+ VERIFY(dset->shared->layout.u.chunk.idx_type, H5D_CHUNK_IDX_FARRAY, "chunk_index_type");
+
+ /* Close the dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+ }
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Close the dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close the dataset creation property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Create a default file access property list which is used
+ to open the file in the 'for' loop */
+ new_fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(new_fapl, H5I_INVALID_HID, "H5Pcreate");
+
+ /* Set up dataspace and dcpl for creating a chunked dataset with
+ 2 unlimited dimensions in the 'for' loop */
+ sid = H5Screate_simple(2, dims2, max_dims2);
+ CHECK(sid, H5I_INVALID_HID, "H5Screate_simple");
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, H5I_INVALID_HID, "H5Pcreate");
+ ret = H5Pset_chunk(dcpl, 2, chunks2);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+
+ /* Loop through all the combinations of low/high bounds in new_fapl */
+ /* Open the file with the fapl and create the chunked dataset */
+ /* Verify the dataset's layout, fill value and filter pipeline message versions */
+ for (low = H5F_LIBVER_EARLIEST; low < H5F_LIBVER_NBOUNDS; low++) {
+ for (high = H5F_LIBVER_EARLIEST; high < H5F_LIBVER_NBOUNDS; high++) {
+ H5E_BEGIN_TRY
+ {
+ ret = H5Pset_libver_bounds(new_fapl, low, high);
+ }
+ H5E_END_TRY;
+
+ if (ret < 0) /* Invalid low/high combinations */
+ continue;
+
+ /* Open the file */
+ H5E_BEGIN_TRY
+ {
+ fid = H5Fopen(FILE8, H5F_ACC_RDWR, new_fapl);
+ }
+ H5E_END_TRY;
+
+ if (fid >= 0) { /* The file open succeeds */
+
+ /* Get the internal file pointer */
+ f = (H5F_t *)H5VL_object(fid);
+ CHECK_PTR(f, "H5VL_object");
+
+ /* Create the chunked dataset */
+ did = H5Dcreate2(fid, DSETC, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(did, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Get the internal file pointer */
+ dset = (H5D_t *)H5VL_object(did);
+ CHECK_PTR(dset, "H5VL_object");
+
+ if (dset) {
+ /* Verify the dataset's layout, fill value and filter pipeline message versions */
+ /* Also verify the chunk indexing type */
+ if (f->shared->low_bound == H5F_LIBVER_EARLIEST) {
+ /* For layout message: the earliest version the library will set is 3 */
+ /* For fill value message: the earliest version the library will set is 2 */
+ VERIFY(dset->shared->layout.version, H5O_LAYOUT_VERSION_DEFAULT,
+ "H5O_layout_ver_bounds");
+ VERIFY(dset->shared->dcpl_cache.fill.version, H5O_FILL_VERSION_2,
+ "H5O_fill_ver_bounds");
+ }
+ else {
+ VERIFY(dset->shared->layout.version, H5O_layout_ver_bounds[f->shared->low_bound],
+ "H5O_layout_ver_bounds");
+ VERIFY(dset->shared->dcpl_cache.fill.version,
+ H5O_fill_ver_bounds[f->shared->low_bound], "H5O_fill_ver_bounds");
+ }
+
+ /* Verify the filter pipeline message version */
+ VERIFY(dset->shared->dcpl_cache.pline.version, H5O_pline_ver_bounds[f->shared->low_bound],
+ "H5O_pline_ver_bounds");
+
+ /* Verify the dataset's chunk indexing type */
+ if (dset->shared->layout.version == H5O_LAYOUT_VERSION_LATEST)
+ VERIFY(dset->shared->layout.u.chunk.idx_type, H5D_CHUNK_IDX_BT2, "chunk_index_type");
+ else
+ VERIFY(dset->shared->layout.u.chunk.idx_type, H5D_CHUNK_IDX_BTREE,
+ "chunk_index_type");
+ }
+
+ /* Close the dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Delete the dataset */
+ ret = H5Ldelete(fid, DSETC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ } /* end if */
+ } /* end for */
+ } /* end for */
+
+ /* Close the file access property list */
+ ret = H5Pclose(new_fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close the dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close the dataset creation property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+} /* end test_libver_bounds_dataset() */
+
+/****************************************************************
+**
+** test_libver_bounds_dataspace():
+** Verify dataspace message versions:
+**
+** (a) Create a file with default fcpl and the input fapl.
+** Create the following two datasets:
+** --A dataset with scalar dataspace
+** --A dataset with null dataspace
+** For both datasets, verify the dataspace message versions.
+** Close the file.
+**
+** (b) Create a new fapl that is set to the 5 pairs of low/high
+** bounds in a "for" loop. For each pair of setting in the
+** new fapl:
+** --Open the same file in (a) with the fapl
+** --Create a chunked dataset, a compact dataset and
+** a contiguous dataset
+** --Verify the dataspace message version for these
+** three datasets
+** --Delete the three datasets and the dataspaces
+** --Close the file
+**
+****************************************************************/
+static void
+test_libver_bounds_dataspace(hid_t fapl)
+{
+ hid_t fid = H5I_INVALID_HID; /* File ID */
+ hid_t new_fapl = H5I_INVALID_HID; /* File access property list */
+ hid_t did = H5I_INVALID_HID, did_null = H5I_INVALID_HID; /* Dataset IDs */
+ hid_t did_compact = H5I_INVALID_HID, did_contig = H5I_INVALID_HID; /* Dataset IDs */
+ hid_t sid = H5I_INVALID_HID, sid_null = H5I_INVALID_HID; /* Dataspace IDs */
+ hid_t sid_compact = H5I_INVALID_HID, sid_contig = H5I_INVALID_HID; /* Dataspace IDs */
+ hid_t dcpl = H5I_INVALID_HID; /* Dataset creation property list */
+ hid_t dcpl_compact = H5I_INVALID_HID, dcpl_contig = H5I_INVALID_HID; /* Dataset creation property lists */
+ H5S_t *space = NULL, *space_null = NULL; /* Internal dataspace pointers */
+ H5F_t *f = NULL; /* Internal file pointer */
+ H5F_libver_t low, high; /* Low and high bounds */
+ hsize_t dims[1] = {1}; /* Dimension sizes */
+ hsize_t dims2[2] = {5, 4}; /* Dimension sizes */
+ hsize_t max_dims[1] = {H5S_UNLIMITED}; /* Maximum dimension sizes */
+ hsize_t chunks[1] = {4}; /* Chunk dimension sizes */
+ herr_t ret; /* Return value */
+
+ /* Retrieve the low/high bounds from the input fapl */
+ ret = H5Pget_libver_bounds(fapl, &low, &high);
+ CHECK(ret, FAIL, "H5Pget_libver_bounds");
+
+ /* Create the file with the input fapl */
+ fid = H5Fcreate(FILE8, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(fid, H5I_INVALID_HID, "H5Fcreate");
+
+ /* Create scalar dataspace */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, H5I_INVALID_HID, "H5Screate");
+
+ /* Create a dataset with the scalar dataspace */
+ did = H5Dcreate2(fid, DSET, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(did, H5I_INVALID_HID, "H5Dcreate");
+
+ /* Get the internal dataspace pointer */
+ sid = H5Dget_space(did);
+ CHECK(sid, H5I_INVALID_HID, "H5Dget_space");
+ space = (H5S_t *)H5I_object(sid);
+ CHECK_PTR(space, "H5I_object");
+
+ /* Verify the dataspace version */
+ VERIFY(space->extent.version, H5O_sdspace_ver_bounds[low], "H5O_sdspace_ver_bounds");
+
+ /* Create null dataspace */
+ sid_null = H5Screate(H5S_NULL);
+ CHECK(sid_null, H5I_INVALID_HID, "H5Screate");
+
+ /* Create a dataset with the null dataspace */
+ did_null = H5Dcreate2(fid, DSET_NULL, H5T_NATIVE_INT, sid_null, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(did_null, H5I_INVALID_HID, "H5Dcreate");
+
+ /* Get the internal dataspace pointer */
+ sid_null = H5Dget_space(did_null);
+ CHECK(sid_null, H5I_INVALID_HID, "H5Dget_space");
+ space_null = (H5S_t *)H5I_object(sid_null);
+ CHECK_PTR(space_null, "H5I_object");
+
+ /* Verify the dataspace version */
+ VERIFY(space_null->extent.version, H5O_SDSPACE_VERSION_2, "H5O_sdspace_ver_bounds");
+
+ /* Close the datasets */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(did_null);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close the dataspaces */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(sid_null);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Create a default file access property list which is used
+ to open the file in the 'for' loop */
+ new_fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(new_fapl, H5I_INVALID_HID, "H5Pcreate");
+
+ /* Set up dataspace and dcpl for creating a chunked dataset */
+ sid = H5Screate_simple(1, dims, max_dims);
+ CHECK(sid, H5I_INVALID_HID, "H5Screate_simple");
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, H5I_INVALID_HID, "H5Pcreate");
+ ret = H5Pset_chunk(dcpl, 1, chunks);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+
+ /* Set up dataspace and dcpl for creating a compact dataset */
+ sid_compact = H5Screate_simple(1, dims, NULL);
+ CHECK(sid_compact, H5I_INVALID_HID, "H5Screate_simple");
+ dcpl_compact = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl_compact, H5I_INVALID_HID, "H5Pcreate");
+ ret = H5Pset_layout(dcpl_compact, H5D_COMPACT);
+ CHECK(ret, FAIL, "H5Pset_layout");
+
+ /* Set up dataspace and dcpl for creating a contiguous dataset */
+ sid_contig = H5Screate_simple(2, dims2, NULL);
+ CHECK(sid_contig, H5I_INVALID_HID, "H5Screate_simple");
+ dcpl_contig = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl_contig, H5I_INVALID_HID, "H5Pcreate");
+ ret = H5Pset_layout(dcpl_contig, H5D_CONTIGUOUS);
+ CHECK(ret, FAIL, "H5Pset_layout");
+
+ /* Loop through all the combinations of low/high bounds in new_fapl */
+ /* Open the file and create the chunked/compact/contiguous datasets */
+ /* Verify the dataspace message version for the three datasets */
+ for (low = H5F_LIBVER_EARLIEST; low < H5F_LIBVER_NBOUNDS; low++) {
+ for (high = H5F_LIBVER_EARLIEST; high < H5F_LIBVER_NBOUNDS; high++) {
+ hid_t tmp_sid, tmp_sid_compact, tmp_sid_contig; /* Dataspace IDs */
+ H5S_t *tmp_space, *tmp_space_compact, *tmp_space_contig; /* Internal dataspace pointers */
+
+ H5E_BEGIN_TRY
+ {
+ ret = H5Pset_libver_bounds(new_fapl, low, high);
+ }
+ H5E_END_TRY;
+
+ if (ret < 0) /* Invalid low/high combinations */
+ continue;
+
+ /* Open the file */
+ H5E_BEGIN_TRY
+ {
+ fid = H5Fopen(FILE8, H5F_ACC_RDWR, new_fapl);
+ }
+ H5E_END_TRY;
+
+ if (fid >= 0) { /* The file open succeeds */
+
+ /* Get the internal file pointer */
+ f = (H5F_t *)H5VL_object(fid);
+ CHECK_PTR(f, "H5VL_object");
+
+ /* Create the chunked dataset */
+ did = H5Dcreate2(fid, DSETA, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(did, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Get the internal dataspace pointer for the chunked dataset */
+ tmp_sid = H5Dget_space(did);
+ CHECK(tmp_sid, H5I_INVALID_HID, "H5Dget_space");
+ tmp_space = (H5S_t *)H5I_object(tmp_sid);
+ CHECK_PTR(tmp_space, "H5I_object");
+
+ /* Create the compact dataset */
+ did_compact = H5Dcreate2(fid, DSETB, H5T_NATIVE_INT, sid_compact, H5P_DEFAULT, dcpl_compact,
+ H5P_DEFAULT);
+ CHECK(did_compact, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Get the internal dataspace pointer for the compact dataset */
+ tmp_sid_compact = H5Dget_space(did_compact);
+ CHECK(tmp_sid_compact, H5I_INVALID_HID, "H5Dget_space");
+ tmp_space_compact = (H5S_t *)H5I_object(tmp_sid_compact);
+ CHECK_PTR(tmp_space_compact, "H5I_object");
+
+ /* Create the contiguous dataset */
+ did_contig =
+ H5Dcreate2(fid, DSETC, H5T_NATIVE_INT, sid_contig, H5P_DEFAULT, dcpl_contig, H5P_DEFAULT);
+ CHECK(did_contig, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Get the internal dataspace pointer for the contiguous dataset */
+ tmp_sid_contig = H5Dget_space(did_contig);
+ CHECK(tmp_sid_contig, H5I_INVALID_HID, "H5Dget_space");
+ tmp_space_contig = (H5S_t *)H5I_object(tmp_sid_contig);
+ CHECK_PTR(tmp_space_contig, "H5I_object");
+
+ if (tmp_space) {
+ /* Verify versions for the three dataspaces */
+ VERIFY(tmp_space->extent.version, H5O_sdspace_ver_bounds[f->shared->low_bound],
+ "H5O_sdspace_ver_bounds");
+ }
+ if (tmp_space_compact) {
+ VERIFY(tmp_space_compact->extent.version, H5O_sdspace_ver_bounds[f->shared->low_bound],
+ "H5O_sdspace_ver_bounds");
+ }
+ if (tmp_space_contig) {
+ VERIFY(tmp_space_contig->extent.version, H5O_sdspace_ver_bounds[f->shared->low_bound],
+ "H5O_sdspace_ver_bounds");
+ }
+
+ /* Close the three datasets */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(did_compact);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Dclose(did_contig);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close the three dataspaces */
+ ret = H5Sclose(tmp_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(tmp_sid_compact);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(tmp_sid_contig);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Delete the three datasets */
+ ret = H5Ldelete(fid, DSETA, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+ ret = H5Ldelete(fid, DSETB, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+ ret = H5Ldelete(fid, DSETC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ } /* end if */
+ } /* end for */
+ } /* end for */
+
+ /* Close the file access property list */
+ ret = H5Pclose(new_fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close the three dataspaces */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(sid_compact);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(sid_contig);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close the three dataset creation property lists */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(dcpl_compact);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(dcpl_contig);
+ CHECK(ret, FAIL, "H5Pclose");
+
+} /* end test_libver_bounds_dataspace() */
+
+/****************************************************************
+**
+** test_libver_bounds_datatype():
+** Verify the datatype message version:
+**
+** (a) Create the following datatypes:
+** 1) integer
+** 2) enum
+** 3) array
+** 4) compound
+** 5) vlen
+** (b) Call test_libver_bounds_datatype_check() for each
+** datatype in (a) to verify the datatype message version.
+**
+****************************************************************/
+static void
+test_libver_bounds_datatype(hid_t fapl)
+{
+ hid_t tid = H5I_INVALID_HID, tid_enum = H5I_INVALID_HID, tid_array = H5I_INVALID_HID; /* Datatype IDs */
+ hid_t tid_compound = H5I_INVALID_HID, tid_vlen = H5I_INVALID_HID; /* Datatype IDs */
+ int enum_value; /* Value for enum datatype */
+ typedef struct s1 { /* Data structure for compound datatype */
+ char c;
+ int i;
+ } s1;
+ hsize_t dims[1] = {1}; /* Dimension sizes */
+ herr_t ret; /* Return value */
+
+ /* Create integer datatype */
+ tid = H5Tcopy(H5T_NATIVE_INT);
+
+ /* Verify datatype message version */
+ test_libver_bounds_datatype_check(fapl, tid);
+
+ /* Create enum datatype */
+ tid_enum = H5Tenum_create(tid);
+ enum_value = 0;
+ H5Tenum_insert(tid_enum, "val1", &enum_value);
+ enum_value = 1;
+ H5Tenum_insert(tid_enum, "val2", &enum_value);
+
+ /* Verify datatype message version */
+ test_libver_bounds_datatype_check(fapl, tid_enum);
+
+ /* Create array datatype */
+ tid_array = H5Tarray_create2(tid, 1, dims);
+
+ /* Verify datatype message version */
+ test_libver_bounds_datatype_check(fapl, tid_array);
+
+ /* Create compound datatype */
+ tid_compound = H5Tcreate(H5T_COMPOUND, sizeof(s1));
+ H5Tinsert(tid_compound, "c", HOFFSET(s1, c), H5T_STD_U8LE);
+ H5Tinsert(tid_compound, "i", HOFFSET(s1, i), H5T_NATIVE_INT);
+
+ /* Verify datatype message version */
+ test_libver_bounds_datatype_check(fapl, tid_compound);
+
+ /* Create vlen datatype */
+ tid_vlen = H5Tvlen_create(tid);
+
+ /* Verify datatype message version */
+ test_libver_bounds_datatype_check(fapl, tid_vlen);
+
+ /* Close the datatypes */
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ ret = H5Tclose(tid_enum);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ ret = H5Tclose(tid_array);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ ret = H5Tclose(tid_compound);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ ret = H5Tclose(tid_vlen);
+ CHECK(ret, FAIL, "H5Tclose");
+
+} /* end test_libver_bounds_datatype() */
+
+/****************************************************************
+**
+** test_libver_bounds_datatype_check():
+** Helper routine called by test_libver_bounds_datatype()
+** to verify the datatype message version for the input tid:
+**
+** (a) Create a file with default fcpl and the input fapl.
+** Create a contiguous dataset with the input tid.
+** Verify the datatype message version.
+** Create a committed datatype of string to be
+** used later.
+** Close the file.
+**
+** (b) Create a new fapl that is set to the 5 pairs of low/high
+** bounds in a "for" loop. For each pair of setting in
+** the new fapl:
+** --Open the same file in (a) with the fapl
+** --Verify the message version for the committed
+** datatype created earlier
+** --Create a chunked dataset with the input tid
+** --Verify the datatype message version
+** --Close and delete the dataset
+** --Close the file
+**
+****************************************************************/
+static void
+test_libver_bounds_datatype_check(hid_t fapl, hid_t tid)
+{
+ hid_t fid = H5I_INVALID_HID; /* File ID */
+ hid_t new_fapl = H5I_INVALID_HID; /* File access property list */
+ hid_t dcpl = H5I_INVALID_HID; /* Dataset creation property list */
+ hid_t dtid = H5I_INVALID_HID; /* Datatype ID for the dataset */
+ hid_t str_tid = H5I_INVALID_HID; /* String datatype ID */
+ hid_t did = H5I_INVALID_HID; /* Dataset ID */
+ hid_t sid = H5I_INVALID_HID; /* Dataspace ID */
+ hsize_t dims[1] = {1}; /* Dimension sizes */
+ hsize_t dims2[2] = {5, 4}; /* Dimension sizes */
+ hsize_t max_dims2[2] = {H5S_UNLIMITED, H5S_UNLIMITED}; /* Maximum dimension sizes */
+ hsize_t chunks[2] = {2, 3}; /* Chunk dimension sizes */
+ H5T_t *dtype = NULL; /* Internal datatype pointer */
+ H5T_t *str_dtype = NULL; /* Internal datatype pointer for the string datatype */
+ H5F_t *f = NULL; /* Internal file pointer */
+ H5F_libver_t low, high; /* Low and high bounds */
+ herr_t ret; /* Return value */
+
+ /* Retrieve the low/high version bounds from the input fapl */
+ ret = H5Pget_libver_bounds(fapl, &low, &high);
+ CHECK(ret, FAIL, "H5Pget_libver_bounds");
+
+ /* Create the file with the input fapl */
+ fid = H5Fcreate(FILE8, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(fid, H5I_INVALID_HID, "H5Fcreate");
+
+ /* Create a committed datatype of string which will be used
+ later inside the 'for' loop */
+ str_tid = H5Tcopy(H5T_C_S1);
+ CHECK(str_tid, H5I_INVALID_HID, "H5Tcopy");
+ ret = H5Tset_size(str_tid, (size_t)10);
+ CHECK(ret, FAIL, "H5Tset_size");
+ ret = H5Tcommit2(fid, "datatype", str_tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+ ret = H5Tclose(str_tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Create dataspace */
+ sid = H5Screate_simple(1, dims, NULL);
+ CHECK(sid, H5I_INVALID_HID, "H5Screate_simple");
+
+ /* Create a dataset with the input tid */
+ did = H5Dcreate2(fid, DSET1, tid, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(did, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Get the dataset's datatype */
+ dtid = H5Dget_type(did);
+ CHECK(dtid, H5I_INVALID_HID, "H5Dget_type");
+
+ /* Get the internal datatype pointer */
+ dtype = (H5T_t *)H5I_object(dtid);
+ CHECK_PTR(dtype, "H5I_object");
+
+ /* Verify the datatype message version */
+ /* H5T_COMPOUND, H5T_ENUM, H5T_ARRAY:
+ * --the library will set version according to low_bound
+ * --H5T_ARRAY: the earliest version the library will set is 2
+ * H5T_INTEGER, H5T_FLOAT, H5T_TIME, H5T_STRING, H5T_BITFIELD, H5T_OPAQUE, H5T_REFERENCE:
+ * --the library will only use basic version
+ */
+
+ if (dtype->shared->type == H5T_COMPOUND || dtype->shared->type == H5T_ENUM ||
+ dtype->shared->type == H5T_ARRAY) {
+ if (dtype->shared->type == H5T_ARRAY && low == H5F_LIBVER_EARLIEST)
+ VERIFY(dtype->shared->version, H5O_DTYPE_VERSION_2, "H5O_dtype_ver_bounds");
+ else
+ VERIFY(dtype->shared->version, H5O_dtype_ver_bounds[low], "H5O_dtype_ver_bounds");
+ }
+ else
+ VERIFY(dtype->shared->version, H5O_dtype_ver_bounds[H5F_LIBVER_EARLIEST], "H5O_dtype_ver_bounds");
+
+ /* Close the dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close the dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close the datatype */
+ ret = H5Tclose(dtid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Create a default file access property list */
+ new_fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(new_fapl, H5I_INVALID_HID, "H5Pcreate");
+
+ /* Set up dataspace and dcpl for creating a chunked dataset */
+ sid = H5Screate_simple(2, dims2, max_dims2);
+ CHECK(sid, H5I_INVALID_HID, "H5Screate_simple");
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, H5I_INVALID_HID, "H5Pcreate");
+ ret = H5Pset_chunk(dcpl, 2, chunks);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+
+ /* Loop through all the combinations of low/high bounds */
+ /* Open the file and create the chunked dataset with the input tid */
+ /* Verify the dataset's datatype message version */
+ /* Also verify the committed atatype message version */
+ for (low = H5F_LIBVER_EARLIEST; low < H5F_LIBVER_NBOUNDS; low++) {
+ for (high = H5F_LIBVER_EARLIEST; high < H5F_LIBVER_NBOUNDS; high++) {
+ H5E_BEGIN_TRY
+ {
+ ret = H5Pset_libver_bounds(new_fapl, low, high);
+ }
+ H5E_END_TRY;
+
+ if (ret < 0) /* Invalid low/high combinations */
+ continue;
+
+ /* Open the file */
+ H5E_BEGIN_TRY
+ {
+ fid = H5Fopen(FILE8, H5F_ACC_RDWR, new_fapl);
+ }
+ H5E_END_TRY;
+
+ if (fid >= 0) { /* The file open succeeds */
+
+ /* Get the internal file pointer */
+ f = (H5F_t *)H5VL_object(fid);
+ CHECK_PTR(f, "H5VL_object");
+
+ /* Open the committed datatype */
+ str_tid = H5Topen2(fid, "datatype", H5P_DEFAULT);
+ CHECK(str_tid, FAIL, "H5Topen2");
+ str_dtype = (H5T_t *)H5VL_object(str_tid);
+ CHECK_PTR(str_dtype, "H5VL_object");
+
+ /* Verify the committed datatype message version */
+ VERIFY(str_dtype->shared->version, H5O_dtype_ver_bounds[H5F_LIBVER_EARLIEST],
+ "H5O_dtype_ver_bounds");
+
+ /* Close the committed datatype */
+ ret = H5Tclose(str_tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Create the chunked dataset */
+ did = H5Dcreate2(fid, DSETNAME, tid, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(did, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Get the dataset's datatype */
+ dtid = H5Dget_type(did);
+ CHECK(dtid, H5I_INVALID_HID, "H5Dget_type");
+
+ /* Get the internal datatype pointer */
+ dtype = (H5T_t *)H5I_object(dtid);
+ CHECK_PTR(dtype, "H5I_object");
+
+ if (dtype) {
+ /* Verify the dataset's datatype message version */
+ /* H5T_COMPOUND, H5T_ENUM, H5T_ARRAY:
+ * --the library will set version according to low_bound
+ * --H5T_ARRAY: the earliest version the library will set is 2
+ * H5T_INTEGER, H5T_FLOAT, H5T_TIME, H5T_STRING, H5T_BITFIELD, H5T_OPAQUE, H5T_REFERENCE:
+ * --the library will only use basic version
+ */
+ if (dtype->shared->type == H5T_COMPOUND || dtype->shared->type == H5T_ENUM ||
+ dtype->shared->type == H5T_ARRAY) {
+ if (dtype->shared->type == H5T_ARRAY && f->shared->low_bound == H5F_LIBVER_EARLIEST)
+ VERIFY(dtype->shared->version, H5O_DTYPE_VERSION_2, "H5O_dtype_ver_bounds");
+ else
+ VERIFY(dtype->shared->version, H5O_dtype_ver_bounds[f->shared->low_bound],
+ "H5O_dtype_ver_bounds");
+ }
+ else
+ VERIFY(dtype->shared->version, H5O_dtype_ver_bounds[H5F_LIBVER_EARLIEST],
+ "H5O_dtype_ver_bounds");
+ }
+
+ /* Close the dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close the dataset's datatype */
+ ret = H5Tclose(dtid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Delete the dataset */
+ ret = H5Ldelete(fid, DSETNAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ } /* end if */
+ } /* end for */
+ } /* end for */
+
+ /* Close the file access property list */
+ ret = H5Pclose(new_fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close the dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close the dataset creation property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+} /* end test_libver_bounds_datatype_check() */
+
+/****************************************************************
+**
+** test_libver_bounds_attributes():
+** Verify the attribute message versions:
+**
+** (a) Create a file with default fcpl and the input fapl.
+** Create a group and attach the following three attributes
+** to the group:
+** (1) Attribute with a committed datatype
+** (2) Attribute with integer type
+** (3) Attribute with character encoding set
+** Verify the three attributes' message versions.
+** Close the file.
+**
+** (b) Create a fcpl that has shared datatype message enabled.
+** Create a file with the fcpl and the input fapl.
+** Create a group and attach an attribute with shared
+** integer type to the group.
+** Verify the attribute message version.
+** Close the file
+**
+** (b) Create a new fapl that is set to the 5 pairs of low/high
+** bounds in a "for" loop. For each pair of setting in
+** the new fapl:
+** --Open the same file in (b) with the fapl
+** --Open the group and attach an attribute with integer
+** type to the group
+** --Verify the attribute message version
+** --Delete the attribute
+** --Close the group and the file
+**
+****************************************************************/
+static void
+test_libver_bounds_attributes(hid_t fapl)
+{
+ hid_t fid = H5I_INVALID_HID; /* File ID */
+ hid_t fcpl = H5I_INVALID_HID; /* File creation property list */
+ hid_t new_fapl = H5I_INVALID_HID; /* File access property list */
+ hid_t tid = H5I_INVALID_HID; /* Datatype ID */
+ hid_t gid = H5I_INVALID_HID; /* Group ID */
+ hid_t sid = H5I_INVALID_HID; /* Dataspace ID */
+ hid_t aid = H5I_INVALID_HID; /* Attribute ID */
+ hid_t attr_cpl = H5I_INVALID_HID; /* Attribute creation property list */
+ H5A_t *attr = NULL; /* Internal attribute pointer */
+ H5F_t *f = NULL; /* Internal file pointer */
+ H5F_libver_t low, high; /* Low and high bounds */
+ herr_t ret; /* Return value */
+
+ /* Retrieve the low/high bounds from the input fapl */
+ ret = H5Pget_libver_bounds(fapl, &low, &high);
+ CHECK(ret, FAIL, "H5Pget_libver_bounds");
+
+ /* Create the file */
+ fid = H5Fcreate(FILE8, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(fid, H5I_INVALID_HID, "H5Fcreate");
+
+ /* Integer datatype */
+ tid = H5Tcopy(H5T_NATIVE_INT);
+ CHECK(tid, H5I_INVALID_HID, "H5Tcopy");
+
+ /* Create a committed datatype */
+ ret = H5Tcommit2(fid, "datatype", tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+
+ /* Create dataspace */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, H5I_INVALID_HID, "H5Screate");
+
+ /* Create a group */
+ gid = H5Gcreate2(fid, GRP_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid, H5I_INVALID_HID, "H5Gcreate2");
+
+ /* Attach an attribute to the group with the committed datatype */
+ aid = H5Acreate2(gid, "attr1", tid, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, H5I_INVALID_HID, "H5Acreate2");
+
+ /* Get the internal attribute pointer */
+ attr = (H5A_t *)H5VL_object(aid);
+ CHECK_PTR(attr, "H5VL_object");
+
+ /* Verify the attribute version */
+ if (low == H5F_LIBVER_EARLIEST)
+ /* The earliest version the library can set for an attribute with committed datatype is 2 */
+ VERIFY(attr->shared->version, H5O_ATTR_VERSION_2, "H5O_attr_ver_bounds");
+ else
+ VERIFY(attr->shared->version, H5O_attr_ver_bounds[low], "H5O_attr_ver_bounds");
+
+ /* Close the attribute */
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Create an attribute to the group with integer type */
+ aid = H5Acreate2(gid, "attr2", H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+
+ /* Get the internal attribute pointer */
+ attr = (H5A_t *)H5VL_object(aid);
+ CHECK_PTR(attr, "H5VL_object");
+
+ /* Verify attribute version */
+ VERIFY(attr->shared->version, H5O_attr_ver_bounds[low], "H5O_attr_ver_bounds");
+
+ /* Close the attribute */
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Enable character encoding in attribute creation property list */
+ attr_cpl = H5Pcreate(H5P_ATTRIBUTE_CREATE);
+ CHECK(attr_cpl, H5I_INVALID_HID, "H5Pcreate");
+ ret = H5Pset_char_encoding(attr_cpl, H5T_CSET_UTF8);
+ CHECK(ret, FAIL, "H5Pset_char_encoding");
+
+ /* Attach an attribute to the group with character encoding set */
+ aid = H5Acreate2(gid, "attr3", H5T_NATIVE_INT, sid, attr_cpl, H5P_DEFAULT);
+ CHECK(aid, H5I_INVALID_HID, "H5Acreate2");
+
+ /* Get internal attribute pointer */
+ attr = (H5A_t *)H5VL_object(aid);
+ CHECK_PTR(attr, "H5VL_object");
+
+ /* Verify attribute version */
+ if (low == H5F_LIBVER_EARLIEST)
+ /* The earliest version the library can set for an attribute with character encoding is 3 */
+ VERIFY(attr->shared->version, H5O_ATTR_VERSION_3, "H5O_attr_ver_bounds");
+ else
+ VERIFY(attr->shared->version, H5O_attr_ver_bounds[low], "H5O_attr_ver_bounds");
+
+ /* Close the attribute */
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close the attribute creation property list */
+ ret = H5Pclose(attr_cpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close the group */
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close the dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close the datatype */
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Create a copy of the file creation property list */
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, H5I_INVALID_HID, "H5Pcreate");
+
+ /* Enable shared datatype message */
+ ret = H5Pset_shared_mesg_nindexes(fcpl, 1);
+ CHECK(ret, FAIL, "H5Pset_shared_mesg_nindexes");
+ ret = H5Pset_shared_mesg_index(fcpl, 0, H5O_SHMESG_DTYPE_FLAG, 2);
+ CHECK(ret, FAIL, "H5Pset_shared_mesg_index");
+
+ /* Create the file with shared datatype message enabled */
+ fid = H5Fcreate(FILE8, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, H5I_INVALID_HID, "H5Fcreate");
+
+ /* Create an integer datatype */
+ tid = H5Tcopy(H5T_NATIVE_INT);
+ CHECK(tid, H5I_INVALID_HID, "H5Tcopy");
+
+ /* Create dataspace */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, H5I_INVALID_HID, "H5Screate");
+
+ /* Create a group */
+ gid = H5Gcreate2(fid, GRP_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid, H5I_INVALID_HID, "H5Gcreate2");
+
+ /* Attach an attribute to the group with shared integer datatype */
+ aid = H5Acreate2(gid, ATTR_NAME, tid, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, H5I_INVALID_HID, "H5Acreate2");
+
+ /* Get the internal attribute pointer */
+ attr = (H5A_t *)H5VL_object(aid);
+ CHECK_PTR(attr, "H5VL_object");
+
+ /* Verify the attribute version */
+ if (low == H5F_LIBVER_EARLIEST)
+ /* The earliest version the library can set for an attribute with shared datatype is 2 */
+ VERIFY(attr->shared->version, H5O_ATTR_VERSION_2, "H5O_attr_ver_bounds");
+ else
+ VERIFY(attr->shared->version, H5O_attr_ver_bounds[low], "H5O_attr_ver_bounds");
+
+ /* Close the attribute */
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close the group */
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close the dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close the datatype */
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Create a default file access property list */
+ new_fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(new_fapl, FAIL, "H5Pcreate");
+
+ /* Create a scalar dataspace to be used later for the attribute */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, H5I_INVALID_HID, "H5Screate");
+
+ /* Loop through all the combinations of low/high bounds */
+ /* Open the file and group and attach an attribute to the group */
+ /* Verify the attribute version */
+ for (low = H5F_LIBVER_EARLIEST; low < H5F_LIBVER_NBOUNDS; low++) {
+ for (high = H5F_LIBVER_EARLIEST; high < H5F_LIBVER_NBOUNDS; high++) {
+ H5E_BEGIN_TRY
+ {
+ ret = H5Pset_libver_bounds(new_fapl, low, high);
+ }
+ H5E_END_TRY;
+
+ if (ret < 0) /* Invalid low/high combinations */
+ continue;
+
+ /* Open the file */
+ H5E_BEGIN_TRY
+ {
+ fid = H5Fopen(FILE8, H5F_ACC_RDWR, new_fapl);
+ }
+ H5E_END_TRY;
+
+ if (fid >= 0) { /* The file open succeeds */
+
+ /* Get the internal file pointer */
+ f = (H5F_t *)H5VL_object(fid);
+ CHECK_PTR(f, "H5VL_object");
+
+ /* Open the group */
+ gid = H5Gopen2(fid, GRP_NAME, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gopen2");
+
+ /* Attach an attribute to the group */
+ aid = H5Acreate2(gid, "attr1", H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+
+ /* Get the internal attribute pointer */
+ attr = (H5A_t *)H5VL_object(aid);
+ CHECK_PTR(attr, "H5VL_object");
+
+ /* Verify the attribute message version */
+ VERIFY(attr->shared->version, H5O_attr_ver_bounds[f->shared->low_bound],
+ "H5O_attr_ver_bounds");
+
+ /* Close the attribute */
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Delete the attribute */
+ ret = H5Adelete(gid, "attr1");
+ CHECK(ret, FAIL, "H5Adelete");
+
+ /* Close the group */
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ } /* end if */
+ } /* end for */
+ } /* end for */
+
+ /* Close the file access property list */
+ ret = H5Pclose(new_fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close the dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+} /* end test_libver_bounds_attributes() */
+
+/****************************************************************
+**
+** test_libver_macros():
+** Verify that H5_VERSION_GE and H5_VERSION_LE work correactly.
+**
+****************************************************************/
+static void
+test_libver_macros(void)
+{
+ int major = H5_VERS_MAJOR;
+ int minor = H5_VERS_MINOR;
+ int release = H5_VERS_RELEASE;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing macros for library version comparison\n"));
+
+ VERIFY(H5_VERSION_GE(major, minor, release), TRUE, "H5_VERSION_GE");
+ VERIFY(H5_VERSION_GE(major - 1, minor, release), TRUE, "H5_VERSION_GE");
+ VERIFY(H5_VERSION_GE(major - 1, minor + 1, release), TRUE, "H5_VERSION_GE");
+ VERIFY(H5_VERSION_GE(major - 1, minor, release + 1), TRUE, "H5_VERSION_GE");
+ VERIFY(H5_VERSION_GE(major, minor - 1, release), TRUE, "H5_VERSION_GE");
+ VERIFY(H5_VERSION_GE(major, minor - 1, release + 1), TRUE, "H5_VERSION_GE");
+ if (H5_VERS_RELEASE > 0)
+ VERIFY(H5_VERSION_GE(major, minor, release - 1), TRUE, "H5_VERSION_GE");
+
+ VERIFY(H5_VERSION_GE(major + 1, minor, release), FALSE, "H5_VERSION_GE");
+ VERIFY(H5_VERSION_GE(major + 1, minor - 1, release), FALSE, "H5_VERSION_GE");
+ VERIFY(H5_VERSION_GE(major + 1, minor - 1, release - 1), FALSE, "H5_VERSION_GE");
+ VERIFY(H5_VERSION_GE(major, minor + 1, release), FALSE, "H5_VERSION_GE");
+ VERIFY(H5_VERSION_GE(major, minor + 1, release - 1), FALSE, "H5_VERSION_GE");
+ VERIFY(H5_VERSION_GE(major, minor, release + 1), FALSE, "H5_VERSION_GE");
+
+ VERIFY(H5_VERSION_LE(major, minor, release), TRUE, "H5_VERSION_LE");
+ VERIFY(H5_VERSION_LE(major + 1, minor, release), TRUE, "H5_VERSION_LE");
+ VERIFY(H5_VERSION_LE(major + 1, minor - 1, release), TRUE, "H5_VERSION_LE");
+ VERIFY(H5_VERSION_LE(major + 1, minor - 1, release - 1), TRUE, "H5_VERSION_LE");
+ VERIFY(H5_VERSION_LE(major, minor + 1, release), TRUE, "H5_VERSION_LE");
+ VERIFY(H5_VERSION_LE(major, minor + 1, release - 1), TRUE, "H5_VERSION_LE");
+ VERIFY(H5_VERSION_LE(major, minor, release + 1), TRUE, "H5_VERSION_LE");
+
+ VERIFY(H5_VERSION_LE(major - 1, minor, release), FALSE, "H5_VERSION_LE");
+ VERIFY(H5_VERSION_LE(major - 1, minor + 1, release), FALSE, "H5_VERSION_LE");
+ VERIFY(H5_VERSION_LE(major - 1, minor + 1, release + 1), FALSE, "H5_VERSION_LE");
+ VERIFY(H5_VERSION_LE(major, minor - 1, release), FALSE, "H5_VERSION_LE");
+ VERIFY(H5_VERSION_LE(major, minor - 1, release + 1), FALSE, "H5_VERSION_LE");
+ if (H5_VERS_RELEASE > 0)
+ VERIFY(H5_VERSION_LE(major, minor, release - 1), FALSE, "H5_VERSION_LE");
+} /* test_libver_macros() */
+
+/****************************************************************
+**
+** test_libver_macros2():
+** Verify that H5_VERSION_GE works correactly and show how
+** to use it.
+**
+****************************************************************/
+static void
+test_libver_macros2(void)
+{
+ hid_t file;
+ hid_t grp;
+ htri_t status;
+ herr_t ret; /* Return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing macros for library version comparison with a file\n"));
+
+ /*
+ * Create a file.
+ */
+ file = H5Fcreate(FILE6, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file, FAIL, "H5Fcreate");
+
+ /*
+ * Create a group in the file.
+ */
+ grp = H5Gcreate2(file, "Group", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file, FAIL, "H5Gcreate");
+
+ /*
+ * Close the group
+ */
+ ret = H5Gclose(grp);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /*
+ * Delete the group using different function based on the library version.
+ * And verify the action.
+ */
+#if H5_VERSION_GE(1, 8, 0)
+ ret = H5Ldelete(file, "Group", H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lunlink");
+
+ status = H5Lexists(file, "Group", H5P_DEFAULT);
+ VERIFY(status, FALSE, "H5Lexists");
+#else
+ ret = H5Gunlink(file, "Group");
+ CHECK(ret, FAIL, "H5Gunlink");
+
+ H5E_BEGIN_TRY
+ {
+ grp = H5Gopen(file, "Group");
+ }
+ H5E_END_TRY;
+ VERIFY(grp, FAIL, "H5Gopen");
+#endif
+
+ /*
+ * Close the file.
+ */
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* test_libver_macros2() */
+#endif
+
+#if 0
+/****************************************************************
+**
+** test_filesize():
+** Verify H5Fincrement_filesize() and H5Fget_eoa() works as
+** indicated in the "RFC: Enhancement to the tool h5clear".
+**
+****************************************************************/
+static void
+test_incr_filesize(void)
+{
+ hid_t fid; /* File opened with read-write permission */
+ h5_stat_size_t filesize; /* Size of file when empty */
+ hid_t fcpl; /* File creation property list */
+ hid_t fapl; /* File access property list */
+ hid_t dspace; /* Dataspace ID */
+ hid_t dset; /* Dataset ID */
+ hid_t dcpl; /* Dataset creation property list */
+ unsigned u; /* Local index variable */
+ char filename[FILENAME_LEN]; /* Filename to use */
+ char name[32]; /* Dataset name */
+ haddr_t stored_eoa; /* The stored EOA value */
+ hid_t driver_id = -1; /* ID for this VFD */
+ unsigned long driver_flags = 0; /* VFD feature flags */
+ herr_t ret; /* Return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing H5Fincrement_filesize() and H5Fget_eoa())\n"));
+
+ fapl = h5_fileaccess();
+ h5_fixname(FILE8, fapl, filename, sizeof filename);
+
+ /* Get the VFD feature flags */
+ driver_id = H5Pget_driver(fapl);
+ CHECK(driver_id, FAIL, "H5Pget_driver");
+
+ ret = H5FDdriver_query(driver_id, &driver_flags);
+ CHECK(ret, FAIL, "H5PDdriver_query");
+
+ /* Check whether the VFD feature flag supports these two public routines */
+ if (driver_flags & H5FD_FEAT_SUPPORTS_SWMR_IO) {
+
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, FAIL, "H5Pcreate");
+
+ /* Set file space strategy */
+ ret = H5Pset_file_space_strategy(fcpl, H5F_FSPACE_STRATEGY_FSM_AGGR, FALSE, (hsize_t)1);
+ CHECK(ret, FAIL, "H5P_set_file_space_strategy");
+
+ /* Create the test file */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ dspace = H5Screate(H5S_SCALAR);
+ CHECK(dspace, FAIL, "H5Screate");
+
+ /* Create a dataset creation property list */
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+
+ /* Set the space allocation time to early */
+ ret = H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_EARLY);
+ CHECK(ret, FAIL, "H5Pset_alloc_time");
+
+ /* Create datasets in file */
+ for (u = 0; u < 10; u++) {
+ HDsnprintf(name, sizeof(name), "Dataset %u", u);
+ dset = H5Dcreate2(fid, name, H5T_STD_U32LE, dspace, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dset, FAIL, "H5Dcreate2");
+
+ ret = H5Dclose(dset);
+ CHECK(ret, FAIL, "H5Dclose");
+ } /* end for */
+
+ /* Close dataspace */
+ ret = H5Sclose(dspace);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close dataset creation property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Get the file size */
+ filesize = h5_get_file_size(filename, fapl);
+
+ /* Open the file */
+ fid = H5Fopen(filename, H5F_ACC_RDWR, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Get the stored EOA */
+ ret = H5Fget_eoa(fid, &stored_eoa);
+ CHECK(ret, FAIL, "H5Fget_eoa");
+
+ /* Verify the stored EOA is the same as filesize */
+ VERIFY(filesize, stored_eoa, "file size");
+
+ /* Set the EOA to the MAX(EOA, EOF) + 512 */
+ ret = H5Fincrement_filesize(fid, 512);
+ CHECK(ret, FAIL, "H5Fincrement_filesize");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Get the file size */
+ filesize = h5_get_file_size(filename, fapl);
+
+ /* Verify the filesize is the previous stored_eoa + 512 */
+ VERIFY(filesize, stored_eoa + 512, "file size");
+
+ /* Close the file access property list */
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close the file creation property list */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ }
+} /* end test_incr_filesize() */
+#endif
+
+/****************************************************************
+**
+** test_min_dset_ohdr():
+** Test API calls to toggle dataset object header minimization.
+**
+** TODO (as separate function?):
+** + setting persists between close and (re)open?
+** + dataset header sizes created while changing value of toggle
+**
+****************************************************************/
+#if 0
+static void
+test_min_dset_ohdr(void)
+{
+ const char basename[] = "min_dset_ohdr_testfile";
+ char filename[FILENAME_LEN] = "";
+ hid_t file_id = -1;
+ hid_t file2_id = -1;
+ hbool_t minimize;
+ herr_t ret;
+
+ MESSAGE(5, ("Testing dataset object header minimization\n"));
+
+ /*********/
+ /* SETUP */
+ /*********/
+
+ h5_fixname(basename, H5P_DEFAULT, filename, sizeof(filename));
+
+ file_id = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK_I(file_id, "H5Fcreate");
+
+ /*********/
+ /* TESTS */
+ /*********/
+
+ /*----------------------------------------
+ * TEST default value
+ */
+ ret = H5Fget_dset_no_attrs_hint(file_id, &minimize);
+ CHECK(ret, FAIL, "H5Fget_dset_no_attrs_hint");
+ VERIFY(minimize, FALSE, "minimize flag");
+
+ /*----------------------------------------
+ * TEST set to TRUE
+ */
+ ret = H5Fset_dset_no_attrs_hint(file_id, TRUE);
+ CHECK(ret, FAIL, "H5Fset_dset_no_attrs_hint");
+
+ ret = H5Fget_dset_no_attrs_hint(file_id, &minimize);
+ CHECK(ret, FAIL, "H5Fget_dset_no_attrs_hint");
+ VERIFY(minimize, TRUE, "minimize flag");
+
+ /*----------------------------------------
+ * TEST second file open on same filename
+ */
+ file2_id = H5Fopen(filename, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK_I(file2_id, "H5Fopen");
+
+ /* verify TRUE setting on second open
+ */
+ ret = H5Fget_dset_no_attrs_hint(file_id, &minimize);
+ CHECK(ret, FAIL, "H5Fget_dset_no_attrs_hint");
+ VERIFY(minimize, TRUE, "minimize flag");
+
+ /* re-set to FALSE on first open
+ */
+ ret = H5Fset_dset_no_attrs_hint(file_id, FALSE);
+ CHECK(ret, FAIL, "H5Fset_dset_no_attrs_hint");
+
+ /* verify FALSE set on both opens
+ */
+ ret = H5Fget_dset_no_attrs_hint(file_id, &minimize);
+ CHECK(ret, FAIL, "H5Fget_dset_no_attrs_hint");
+ VERIFY(minimize, FALSE, "minimize flag");
+
+ ret = H5Fget_dset_no_attrs_hint(file2_id, &minimize);
+ CHECK(ret, FAIL, "H5Fget_dset_no_attrs_hint");
+ VERIFY(minimize, FALSE, "minimize flag");
+
+ /* re-set to TRUE on second open
+ */
+ ret = H5Fset_dset_no_attrs_hint(file2_id, TRUE);
+ CHECK(ret, FAIL, "H5Fset_dset_no_attrs_hint");
+
+ /* verify TRUE set on both opens
+ */
+ ret = H5Fget_dset_no_attrs_hint(file_id, &minimize);
+ CHECK(ret, FAIL, "H5Fget_dset_no_attrs_hint");
+ VERIFY(minimize, TRUE, "minimize flag");
+
+ ret = H5Fget_dset_no_attrs_hint(file2_id, &minimize);
+ CHECK(ret, FAIL, "H5Fget_dset_no_attrs_hint");
+ VERIFY(minimize, TRUE, "minimize flag");
+
+ /*----------------------------------------
+ * TEST error cases
+ */
+
+ /* trying to set with invalid file ID */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Fset_dset_no_attrs_hint(-1, TRUE);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Fset_dset_no_attrs_hint");
+
+ /* trying to get with invalid file ID */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Fget_dset_no_attrs_hint(-1, &minimize);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Fget_dset_no_attrs_hint");
+
+ /* trying to get with invalid pointer */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Fget_dset_no_attrs_hint(file_id, NULL);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Fget_dset_no_attrs_hint");
+
+ /************/
+ /* TEARDOWN */
+ /************/
+
+ ret = H5Fclose(file_id);
+ CHECK(ret, FAIL, "H5Fclose");
+ ret = H5Fclose(file2_id);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_min_dset_ohdr() */
+#endif
+
+/****************************************************************
+**
+** test_deprec():
+** Test deprecated functionality.
+**
+****************************************************************/
+#if 0
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+static void
+test_deprec(const char *env_h5_drvr)
+{
+ hid_t file; /* File IDs for old & new files */
+ hid_t fcpl; /* File creation property list */
+ hid_t fapl; /* File creation property list */
+ hid_t new_fapl;
+ hsize_t align;
+ unsigned super; /* Superblock version # */
+ unsigned freelist; /* Free list version # */
+ unsigned stab; /* Symbol table entry version # */
+ unsigned shhdr; /* Shared object header version # */
+ H5F_info1_t finfo; /* global information about file */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing deprecated routines\n"));
+
+ /* Creating a file with the default file creation property list should
+ * create a version 0 superblock
+ */
+
+ /* Create file with default file creation property list */
+ file = H5Fcreate(FILE1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file, FAIL, "H5Fcreate");
+
+ /* Get the file's version information */
+ ret = H5Fget_info1(file, &finfo);
+ CHECK(ret, FAIL, "H5Fget_info1");
+ VERIFY(finfo.super_ext_size, 0, "H5Fget_info1");
+ VERIFY(finfo.sohm.hdr_size, 0, "H5Fget_info1");
+ VERIFY(finfo.sohm.msgs_info.index_size, 0, "H5Fget_info1");
+ VERIFY(finfo.sohm.msgs_info.heap_size, 0, "H5Fget_info1");
+
+ /* Get the file's dataset creation property list */
+ fcpl = H5Fget_create_plist(file);
+ CHECK(fcpl, FAIL, "H5Fget_create_plist");
+
+ /* Get the file's version information */
+ ret = H5Pget_version(fcpl, &super, &freelist, &stab, &shhdr);
+ CHECK(ret, FAIL, "H5Pget_version");
+ VERIFY(super, 0, "H5Pget_version");
+ VERIFY(freelist, 0, "H5Pget_version");
+ VERIFY(stab, 0, "H5Pget_version");
+ VERIFY(shhdr, 0, "H5Pget_version");
+
+ /* Close FCPL */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Only run this part of the test with the sec2/default driver */
+ if (h5_using_default_driver(env_h5_drvr)) {
+ /* Create a file creation property list */
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, FAIL, "H5Pcreate");
+
+ /* Set a property in the FCPL that will push the superblock version up */
+ ret = H5Pset_file_space_strategy(fcpl, H5F_FSPACE_STRATEGY_PAGE, 1, (hsize_t)0);
+ ret = H5Pset_file_space_page_size(fcpl, (hsize_t)512);
+ CHECK(ret, FAIL, "H5Pset_file_space_strategy");
+
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ ret = H5Pset_alignment(fapl, (hsize_t)1, (hsize_t)1024);
+ CHECK(ret, FAIL, "H5Pset_alignment");
+
+ /* Creating a file with the non-default file creation property list should
+ * create a version 2 superblock
+ */
+
+ /* Create file with custom file creation property list */
+ file = H5Fcreate(FILE1, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(file, FAIL, "H5Fcreate");
+
+ new_fapl = H5Fget_access_plist(file);
+ H5Pget_alignment(new_fapl, NULL, &align);
+
+ /* Close FCPL */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Get the file's version information */
+ ret = H5Fget_info1(file, &finfo);
+ CHECK(ret, FAIL, "H5Fget_info1");
+ VERIFY(finfo.super_ext_size, 152, "H5Fget_info1");
+ VERIFY(finfo.sohm.hdr_size, 0, "H5Fget_info1");
+ VERIFY(finfo.sohm.msgs_info.index_size, 0, "H5Fget_info1");
+ VERIFY(finfo.sohm.msgs_info.heap_size, 0, "H5Fget_info1");
+
+ /* Get the file's dataset creation property list */
+ fcpl = H5Fget_create_plist(file);
+ CHECK(fcpl, FAIL, "H5Fget_create_plist");
+
+ /* Get the file's version information */
+ ret = H5Pget_version(fcpl, &super, &freelist, &stab, &shhdr);
+ CHECK(ret, FAIL, "H5Pget_version");
+ VERIFY(super, 2, "H5Pget_version");
+ VERIFY(freelist, 0, "H5Pget_version");
+ VERIFY(stab, 0, "H5Pget_version");
+ VERIFY(shhdr, 0, "H5Pget_version");
+
+ /* Close FCPL */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open the file */
+ file = H5Fopen(FILE1, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(file, FAIL, "H5Fcreate");
+
+ /* Get the file's version information */
+ ret = H5Fget_info1(file, &finfo);
+ CHECK(ret, FAIL, "H5Fget_info1");
+ VERIFY(finfo.super_ext_size, 152, "H5Fget_info1");
+ VERIFY(finfo.sohm.hdr_size, 0, "H5Fget_info1");
+ VERIFY(finfo.sohm.msgs_info.index_size, 0, "H5Fget_info1");
+ VERIFY(finfo.sohm.msgs_info.heap_size, 0, "H5Fget_info1");
+
+ /* Get the file's creation property list */
+ fcpl = H5Fget_create_plist(file);
+ CHECK(fcpl, FAIL, "H5Fget_create_plist");
+
+ /* Get the file's version information */
+ ret = H5Pget_version(fcpl, &super, &freelist, &stab, &shhdr);
+ CHECK(ret, FAIL, "H5Pget_version");
+ VERIFY(super, 2, "H5Pget_version");
+ VERIFY(freelist, 0, "H5Pget_version");
+ VERIFY(stab, 0, "H5Pget_version");
+ VERIFY(shhdr, 0, "H5Pget_version");
+
+ /* Close FCPL */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ { /* Test deprecated H5Pget/set_file_space() */
+
+ H5F_file_space_type_t old_strategy;
+ hsize_t old_threshold;
+ hid_t fid;
+ hid_t ffcpl;
+
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, FAIL, "H5Pcreate");
+
+ ret = H5Pget_file_space(fcpl, &old_strategy, &old_threshold);
+ CHECK(ret, FAIL, "H5Pget_file_space");
+ VERIFY(old_strategy, H5F_FILE_SPACE_ALL, "H5Pget_file_space");
+ VERIFY(old_threshold, H5F_FREE_SPACE_THRESHOLD_DEF, "H5Pget_file_space");
+
+ /* Set file space strategy and free space section threshold */
+ ret = H5Pset_file_space(fcpl, H5F_FILE_SPACE_ALL_PERSIST, (hsize_t)0);
+ CHECK(ret, FAIL, "H5Pget_file_space");
+
+ /* Get the file space info from the creation property */
+ ret = H5Pget_file_space(fcpl, &old_strategy, &old_threshold);
+ CHECK(ret, FAIL, "H5Pget_file_space");
+ VERIFY(old_strategy, H5F_FILE_SPACE_ALL_PERSIST, "H5Pget_file_space");
+ VERIFY(old_threshold, H5F_FREE_SPACE_THRESHOLD_DEF, "H5Pget_file_space");
+
+ ret = H5Pset_file_space(fcpl, H5F_FILE_SPACE_DEFAULT, (hsize_t)3);
+ CHECK(ret, FAIL, "H5Pget_file_space");
+
+ ret = H5Pget_file_space(fcpl, &old_strategy, &old_threshold);
+ CHECK(ret, FAIL, "H5Pget_file_space");
+ VERIFY(old_strategy, H5F_FILE_SPACE_ALL_PERSIST, "H5Pget_file_space");
+ VERIFY(old_threshold, 3, "H5Pget_file_space");
+
+ /* Create a file */
+ fid = H5Fcreate(FILE1, H5F_ACC_TRUNC, fcpl, H5P_DEFAULT);
+ CHECK(file, FAIL, "H5Fcreate");
+
+ old_strategy = H5F_FILE_SPACE_DEFAULT;
+ old_threshold = 0;
+ ffcpl = H5Fget_create_plist(fid);
+ ret = H5Pget_file_space(ffcpl, &old_strategy, &old_threshold);
+ CHECK(ret, FAIL, "H5Pget_file_space");
+ VERIFY(old_strategy, H5F_FILE_SPACE_ALL_PERSIST, "H5Pget_file_space");
+ VERIFY(old_threshold, 3, "H5Pget_file_space");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ ret = H5Pclose(ffcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Reopen the file */
+ fid = H5Fopen(FILE1, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ old_strategy = H5F_FILE_SPACE_DEFAULT;
+ old_threshold = 0;
+ ffcpl = H5Fget_create_plist(fid);
+ ret = H5Pget_file_space(ffcpl, &old_strategy, &old_threshold);
+ CHECK(ret, FAIL, "H5Pget_file_space");
+ VERIFY(old_strategy, H5F_FILE_SPACE_ALL_PERSIST, "H5Pget_file_space");
+ VERIFY(old_threshold, 3, "H5Pget_file_space");
+
+ ret = H5Pclose(ffcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ }
+ }
+
+} /* test_deprec */
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
+#endif
+
+/****************************************************************
+**
+** test_file(): Main low-level file I/O test routine.
+**
+****************************************************************/
+void
+test_file(void)
+{
+ const char *env_h5_drvr; /* File Driver value from environment */
+ hid_t fapl_id = H5I_INVALID_HID; /* VFD-dependent fapl ID */
+ hbool_t driver_is_default_compatible;
+ herr_t ret;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Low-Level File I/O\n"));
+
+ /* Get the VFD to use */
+ env_h5_drvr = HDgetenv(HDF5_DRIVER);
+ if (env_h5_drvr == NULL)
+ env_h5_drvr = "nomatch";
+
+ /* Improved version of VFD-dependent checks */
+ fapl_id = h5_fileaccess();
+ CHECK(fapl_id, H5I_INVALID_HID, "h5_fileaccess");
+
+ ret = h5_driver_is_default_vfd_compatible(fapl_id, &driver_is_default_compatible);
+ CHECK(ret, FAIL, "h5_driver_is_default_vfd_compatible");
+
+ test_file_create(); /* Test file creation(also creation templates)*/
+ test_file_open(env_h5_drvr); /* Test file opening */
+ test_file_reopen(); /* Test file reopening */
+ test_file_close(); /* Test file close behavior */
+ test_get_file_id(); /* Test H5Iget_file_id */
+ test_get_obj_ids(); /* Test H5Fget_obj_ids for Jira Issue 8528 */
+ test_file_perm(); /* Test file access permissions */
+ test_file_perm2(); /* Test file access permission again */
+ test_file_is_accessible(env_h5_drvr); /* Test detecting HDF5 files correctly */
+ test_file_delete(fapl_id); /* Test H5Fdelete */
+ test_file_open_dot(); /* Test opening objects with "." for a name */
+ test_file_open_overlap(); /* Test opening files in an overlapping manner */
+ test_file_getname(); /* Test basic H5Fget_name() functionality */
+ test_file_double_root_open(); /* Test opening root group from two files works properly */
+ test_file_double_group_open(); /* Test opening same group from two files works properly */
+ test_file_double_dataset_open(); /* Test opening same dataset from two files works properly */
+ test_file_double_datatype_open(); /* Test opening same named datatype from two files works properly */
+ test_file_double_file_dataset_open(TRUE);
+ test_file_double_file_dataset_open(FALSE);
+#if 0
+ test_userblock_file_size(
+ env_h5_drvr); /* Tests that files created with a userblock have the correct size */
+ test_cached_stab_info(); /* Tests that files are created with cached stab info in the superblock */
+
+ if (driver_is_default_compatible) {
+ test_rw_noupdate(); /* Test to ensure that RW permissions don't write the file unless dirtied */
+ }
+
+ test_userblock_alignment(
+ env_h5_drvr); /* Tests that files created with a userblock and alignment interact properly */
+ test_userblock_alignment_paged(env_h5_drvr); /* Tests files created with a userblock and alignment (via
+ paged aggregation) interact properly */
+ test_filespace_info(env_h5_drvr); /* Test file creation public routines: */
+ /* H5Pget/set_file_space_strategy() & H5Pget/set_file_space_page_size() */
+ /* Skipped testing for multi/split drivers */
+ test_file_freespace(env_h5_drvr); /* Test file public routine H5Fget_freespace() */
+ /* Skipped testing for multi/split drivers */
+ /* Setup for multi/split drivers are there already */
+ test_sects_freespace(env_h5_drvr,
+ TRUE); /* Test file public routine H5Fget_free_sections() for new format */
+ /* Skipped testing for multi/split drivers */
+ /* Setup for multi/split drivers are there already */
+ test_sects_freespace(env_h5_drvr, FALSE); /* Test file public routine H5Fget_free_sections() */
+ /* Skipped testing for multi/split drivers */
+
+ if (driver_is_default_compatible) {
+ test_filespace_compatible(); /* Test compatibility for file space management */
+
+ test_filespace_round_compatible(); /* Testing file space compatibility for files from trunk to 1_8 to
+ trunk */
+ test_filespace_1_10_0_compatible(); /* Testing file space compatibility for files from release 1.10.0
+ */
+ }
+
+ test_libver_bounds(); /* Test compatibility for file space management */
+ test_libver_bounds_low_high(env_h5_drvr);
+ test_libver_macros(); /* Test the macros for library version comparison */
+ test_libver_macros2(); /* Test the macros for library version comparison */
+ test_incr_filesize(); /* Test H5Fincrement_filesize() and H5Fget_eoa() */
+ test_min_dset_ohdr(); /* Test dataset object header minimization */
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+ test_file_ishdf5(env_h5_drvr); /* Test detecting HDF5 files correctly */
+ test_deprec(env_h5_drvr); /* Test deprecated routines */
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
+#endif
+
+ ret = H5Pclose(fapl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+
+} /* test_file() */
+
+/*-------------------------------------------------------------------------
+ * Function: cleanup_file
+ *
+ * Purpose: Cleanup temporary test files
+ *
+ * Return: none
+ *
+ * Programmer: Albert Cheng
+ * July 2, 1998
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+cleanup_file(void)
+{
+ H5E_BEGIN_TRY
+ {
+ H5Fdelete(SFILE1, H5P_DEFAULT);
+ H5Fdelete(FILE1, H5P_DEFAULT);
+ H5Fdelete(FILE2, H5P_DEFAULT);
+ H5Fdelete(FILE3, H5P_DEFAULT);
+ H5Fdelete(FILE4, H5P_DEFAULT);
+ H5Fdelete(FILE5, H5P_DEFAULT);
+ H5Fdelete(FILE6, H5P_DEFAULT);
+ H5Fdelete(FILE7, H5P_DEFAULT);
+ H5Fdelete(DST_FILE, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+}
diff --git a/test/API/tgenprop.c b/test/API/tgenprop.c
new file mode 100644
index 0000000..c1ee8af
--- /dev/null
+++ b/test/API/tgenprop.c
@@ -0,0 +1,2201 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/***********************************************************
+ *
+ * Test program: tgenprop
+ *
+ * Test the Generic Property functionality
+ *
+ *************************************************************/
+
+#define H5P_FRIEND /*suppress error about including H5Ppkg */
+
+/* Define this macro to indicate that the testing APIs should be available */
+#define H5P_TESTING
+
+#include "testhdf5.h"
+
+/* #include "H5Dprivate.h" */ /* For Dataset creation property list names */
+/* #include "H5Ppkg.h" */ /* Generic Properties */
+
+#define FILENAME "tgenprop.h5"
+
+/* Property definitions */
+#define CLASS1_NAME "Class 1"
+#define CLASS1_PATH "root/Class 1"
+
+#define CLASS2_NAME "Class 2"
+#define CLASS2_PATH "root/Class 1/Class 2"
+
+/* Property definitions */
+#define PROP1_NAME "Property 1"
+int prop1_def = 10; /* Property 1 default value */
+#define PROP1_SIZE sizeof(prop1_def)
+#define PROP1_DEF_VALUE (&prop1_def)
+
+#define PROP2_NAME "Property 2"
+float prop2_def = 3.14F; /* Property 2 default value */
+#define PROP2_SIZE sizeof(prop2_def)
+#define PROP2_DEF_VALUE (&prop2_def)
+
+#define PROP3_NAME "Property 3"
+char prop3_def[10] = "Ten chars"; /* Property 3 default value */
+#define PROP3_SIZE sizeof(prop3_def)
+#define PROP3_DEF_VALUE (&prop3_def)
+
+#define PROP4_NAME "Property 4"
+double prop4_def = 1.41; /* Property 4 default value */
+#define PROP4_SIZE sizeof(prop4_def)
+#define PROP4_DEF_VALUE (&prop4_def)
+
+/* Structs used during iteration */
+typedef struct iter_data_t {
+ int iter_count;
+ char **names;
+} iter_data_t;
+
+typedef struct count_data_t {
+ int count;
+ hid_t id;
+} count_data_t;
+
+/****************************************************************
+**
+** test_genprop_basic_class(): Test basic generic property list code.
+** Tests creating new generic classes.
+**
+****************************************************************/
+static void
+test_genprop_basic_class(void)
+{
+ hid_t cid1; /* Generic Property class ID */
+ hid_t cid2; /* Generic Property class ID */
+ hid_t cid3; /* Generic Property class ID */
+ char *name; /* Name of class */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Basic Generic Property List Class Creation Functionality\n"));
+
+ /* Create a new generic class, derived from the root of the class hierarchy */
+ cid1 = H5Pcreate_class(H5P_ROOT, CLASS1_NAME, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(cid1, "H5Pcreate_class");
+
+ /* Check class name */
+ name = H5Pget_class_name(cid1);
+ CHECK_PTR(name, "H5Pget_class_name");
+ if (HDstrcmp(name, CLASS1_NAME) != 0)
+ TestErrPrintf("Class names don't match!, name=%s, CLASS1_NAME=%s\n", name, CLASS1_NAME);
+ H5free_memory(name);
+
+ /* Check class parent */
+ cid2 = H5Pget_class_parent(cid1);
+ CHECK_I(cid2, "H5Pget_class_parent");
+
+ /* Verify class parent correct */
+ ret = H5Pequal(cid2, H5P_ROOT);
+ VERIFY(ret, 1, "H5Pequal");
+
+ /* Make certain false positives aren't being returned */
+ ret = H5Pequal(cid2, H5P_FILE_CREATE);
+ VERIFY(ret, 0, "H5Pequal");
+
+ /* Close parent class */
+ ret = H5Pclose_class(cid2);
+ CHECK_I(ret, "H5Pclose_class");
+
+ /* Close class */
+ ret = H5Pclose_class(cid1);
+ CHECK_I(ret, "H5Pclose_class");
+
+ /* Create another new generic class, derived from file creation class */
+ cid1 = H5Pcreate_class(H5P_FILE_CREATE, CLASS2_NAME, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(cid1, "H5Pcreate_class");
+
+ /* Check class name */
+ name = H5Pget_class_name(cid1);
+ CHECK_PTR(name, "H5Pget_class_name");
+ if (HDstrcmp(name, CLASS2_NAME) != 0)
+ TestErrPrintf("Class names don't match!, name=%s, CLASS2_NAME=%s\n", name, CLASS2_NAME);
+ H5free_memory(name);
+
+ /* Check class parent */
+ cid2 = H5Pget_class_parent(cid1);
+ CHECK_I(cid2, "H5Pget_class_parent");
+
+ /* Verify class parent correct */
+ ret = H5Pequal(cid2, H5P_FILE_CREATE);
+ VERIFY(ret, 1, "H5Pequal");
+
+ /* Check class parent's parent */
+ cid3 = H5Pget_class_parent(cid2);
+ CHECK_I(cid3, "H5Pget_class_parent");
+
+ /* Verify class parent's parent correct */
+ ret = H5Pequal(cid3, H5P_GROUP_CREATE);
+ VERIFY(ret, 1, "H5Pequal");
+
+ /* Close parent class's parent */
+ ret = H5Pclose_class(cid3);
+ CHECK_I(ret, "H5Pclose_class");
+
+ /* Close parent class */
+ ret = H5Pclose_class(cid2);
+ CHECK_I(ret, "H5Pclose_class");
+
+ /* Close class */
+ ret = H5Pclose_class(cid1);
+ CHECK_I(ret, "H5Pclose_class");
+} /* end test_genprop_basic_class() */
+
+/****************************************************************
+**
+** test_genprop_basic_class_prop(): Test basic generic property list code.
+** Tests adding properties to generic classes.
+**
+****************************************************************/
+static void
+test_genprop_basic_class_prop(void)
+{
+ hid_t cid1; /* Generic Property class ID */
+ size_t size; /* Size of property */
+ size_t nprops; /* Number of properties in class */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Basic Generic Property List Class Properties Functionality\n"));
+
+ /* Create a new generic class, derived from the root of the class hierarchy */
+ cid1 = H5Pcreate_class(H5P_ROOT, CLASS1_NAME, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(cid1, "H5Pcreate_class");
+
+ /* Check the number of properties in class */
+ ret = H5Pget_nprops(cid1, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 0, "H5Pget_nprops");
+
+ /* Check the existence of the first property (should fail) */
+ ret = H5Pexist(cid1, PROP1_NAME);
+ VERIFY(ret, 0, "H5Pexist");
+
+ /* Insert first property into class (with no callbacks) */
+ ret =
+ H5Pregister2(cid1, PROP1_NAME, PROP1_SIZE, PROP1_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister2");
+
+ /* Try to insert the first property again (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Pregister2(cid1, PROP1_NAME, PROP1_SIZE, PROP1_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL,
+ NULL);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Pregister2");
+
+ /* Check the existence of the first property */
+ ret = H5Pexist(cid1, PROP1_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+
+ /* Check the size of the first property */
+ ret = H5Pget_size(cid1, PROP1_NAME, &size);
+ CHECK_I(ret, "H5Pget_size");
+ VERIFY(size, PROP1_SIZE, "H5Pget_size");
+
+ /* Check the number of properties in class */
+ ret = H5Pget_nprops(cid1, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 1, "H5Pget_nprops");
+
+ /* Insert second property into class (with no callbacks) */
+ ret =
+ H5Pregister2(cid1, PROP2_NAME, PROP2_SIZE, PROP2_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister2");
+
+ /* Try to insert the second property again (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Pregister2(cid1, PROP2_NAME, PROP2_SIZE, PROP2_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL,
+ NULL);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Pregister2");
+
+ /* Check the existence of the second property */
+ ret = H5Pexist(cid1, PROP2_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+
+ /* Check the size of the second property */
+ ret = H5Pget_size(cid1, PROP2_NAME, &size);
+ CHECK_I(ret, "H5Pget_size");
+ VERIFY(size, PROP2_SIZE, "H5Pget_size");
+
+ /* Check the number of properties in class */
+ ret = H5Pget_nprops(cid1, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 2, "H5Pget_nprops");
+
+ /* Insert third property into class (with no callbacks) */
+ ret =
+ H5Pregister2(cid1, PROP3_NAME, PROP3_SIZE, PROP3_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister2");
+
+ /* Check the existence of the third property */
+ ret = H5Pexist(cid1, PROP3_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+
+ /* Check the size of the third property */
+ ret = H5Pget_size(cid1, PROP3_NAME, &size);
+ CHECK_I(ret, "H5Pget_size");
+ VERIFY(size, PROP3_SIZE, "H5Pget_size");
+
+ /* Check the number of properties in class */
+ ret = H5Pget_nprops(cid1, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 3, "H5Pget_nprops");
+
+ /* Unregister first property */
+ ret = H5Punregister(cid1, PROP1_NAME);
+ CHECK_I(ret, "H5Punregister");
+
+ /* Try to check the size of the first property (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Pget_size(cid1, PROP1_NAME, &size);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Pget_size");
+
+ /* Check the number of properties in class */
+ ret = H5Pget_nprops(cid1, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 2, "H5Pget_nprops");
+
+ /* Unregister second property */
+ ret = H5Punregister(cid1, PROP2_NAME);
+ CHECK_I(ret, "H5Punregister");
+
+ /* Check the number of properties in class */
+ ret = H5Pget_nprops(cid1, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 1, "H5Pget_nprops");
+
+ /* Unregister third property */
+ ret = H5Punregister(cid1, PROP3_NAME);
+ CHECK_I(ret, "H5Punregister");
+
+ /* Check the number of properties in class */
+ ret = H5Pget_nprops(cid1, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 0, "H5Pget_nprops");
+
+ /* Close class */
+ ret = H5Pclose_class(cid1);
+ CHECK_I(ret, "H5Pclose_class");
+} /* end test_genprop_basic_class_prop() */
+
+/****************************************************************
+**
+** test_genprop_iter1(): Property iterator for test_genprop_class_iter
+**
+****************************************************************/
+static int
+test_genprop_iter1(hid_t H5_ATTR_UNUSED id, const char *name, void *iter_data)
+{
+ iter_data_t *idata = (iter_data_t *)iter_data;
+
+ return HDstrcmp(name, idata->names[idata->iter_count++]);
+}
+
+/****************************************************************
+**
+** test_genprop_class_iter(): Test basic generic property list code.
+** Tests iterating over properties in a generic class.
+**
+****************************************************************/
+static void
+test_genprop_class_iter(void)
+{
+ hid_t cid1; /* Generic Property class ID */
+ size_t nprops; /* Number of properties in class */
+ int idx; /* Index to start iteration at */
+ struct { /* Struct for iterations */
+ int iter_count;
+ const char **names;
+ } iter_struct;
+ const char *pnames[4] = {/* Names of properties for iterator */
+ PROP1_NAME, PROP2_NAME, PROP3_NAME, PROP4_NAME};
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Basic Generic Property List Class Property Iteration Functionality\n"));
+
+ /* Create a new generic class, derived from the root of the class hierarchy */
+ cid1 = H5Pcreate_class(H5P_ROOT, CLASS1_NAME, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(cid1, "H5Pcreate_class");
+
+ /* Insert first property into class (with no callbacks) */
+ ret =
+ H5Pregister2(cid1, PROP1_NAME, PROP1_SIZE, PROP1_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister2");
+
+ /* Insert second property into class (with no callbacks) */
+ ret =
+ H5Pregister2(cid1, PROP2_NAME, PROP2_SIZE, PROP2_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister2");
+
+ /* Insert third property into class (with no callbacks) */
+ ret =
+ H5Pregister2(cid1, PROP3_NAME, PROP3_SIZE, PROP3_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister2");
+
+ /* Insert third property into class (with no callbacks) */
+ ret =
+ H5Pregister2(cid1, PROP4_NAME, PROP4_SIZE, PROP4_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister2");
+
+ /* Check the number of properties in class */
+ ret = H5Pget_nprops(cid1, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 4, "H5Pget_nprops");
+
+ /* Iterate over all properties in class */
+ iter_struct.iter_count = 0;
+ iter_struct.names = pnames;
+ ret = H5Piterate(cid1, NULL, test_genprop_iter1, &iter_struct);
+ VERIFY(ret, 0, "H5Piterate");
+
+ /* Iterate over last three properties in class */
+ idx = iter_struct.iter_count = 1;
+ ret = H5Piterate(cid1, &idx, test_genprop_iter1, &iter_struct);
+ VERIFY(ret, 0, "H5Piterate");
+ VERIFY(idx, (int)nprops, "H5Piterate");
+
+ /* Close class */
+ ret = H5Pclose_class(cid1);
+ CHECK_I(ret, "H5Pclose_class");
+} /* end test_genprop_class_iter() */
+
+/****************************************************************
+**
+** test_genprop_cls_*_cb1(): Property List callbacks for test_genprop_class_callback
+**
+****************************************************************/
+static herr_t
+test_genprop_cls_crt_cb1(hid_t list_id, void *create_data)
+{
+ count_data_t *cdata = (count_data_t *)create_data;
+
+ cdata->count++;
+ cdata->id = list_id;
+
+ return SUCCEED;
+}
+
+static herr_t
+test_genprop_cls_cpy_cb1(hid_t new_list_id, hid_t H5_ATTR_UNUSED old_list_id, void *copy_data)
+{
+ count_data_t *cdata = (count_data_t *)copy_data;
+
+ cdata->count++;
+ cdata->id = new_list_id;
+
+ return SUCCEED;
+}
+
+static herr_t
+test_genprop_cls_cls_cb1(hid_t list_id, void *create_data)
+{
+ count_data_t *cdata = (count_data_t *)create_data;
+
+ cdata->count++;
+ cdata->id = list_id;
+
+ return SUCCEED;
+}
+
+/****************************************************************
+**
+** test_genprop_class_callback(): Test basic generic property list code.
+** Tests callbacks for property lists in a generic class.
+**
+****************************************************************/
+static void
+test_genprop_class_callback(void)
+{
+ hid_t cid1; /* Generic Property class ID */
+ hid_t cid2; /* Generic Property class ID */
+ hid_t lid1; /* Generic Property list ID */
+ hid_t lid2; /* Generic Property list ID */
+ hid_t lid3; /* Generic Property list ID */
+ size_t nprops; /* Number of properties in class */
+ struct { /* Struct for callbacks */
+ int count;
+ hid_t id;
+ } crt_cb_struct, cpy_cb_struct, cls_cb_struct;
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Basic Generic Property List Class Callback Functionality\n"));
+
+ /* Create a new generic class, derived from the root of the class hierarchy */
+ cid1 =
+ H5Pcreate_class(H5P_ROOT, CLASS1_NAME, test_genprop_cls_crt_cb1, &crt_cb_struct,
+ test_genprop_cls_cpy_cb1, &cpy_cb_struct, test_genprop_cls_cls_cb1, &cls_cb_struct);
+ CHECK_I(cid1, "H5Pcreate_class");
+
+ /* Insert first property into class (with no callbacks) */
+ ret =
+ H5Pregister2(cid1, PROP1_NAME, PROP1_SIZE, PROP1_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister2");
+
+ /* Insert second property into class (with no callbacks) */
+ ret =
+ H5Pregister2(cid1, PROP2_NAME, PROP2_SIZE, PROP2_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister2");
+
+ /* Insert third property into class (with no callbacks) */
+ ret =
+ H5Pregister2(cid1, PROP3_NAME, PROP3_SIZE, PROP3_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister2");
+
+ /* Check the number of properties in class */
+ ret = H5Pget_nprops(cid1, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 3, "H5Pget_nprops");
+
+ /* Initialize class callback structs */
+ crt_cb_struct.count = 0;
+ crt_cb_struct.id = (-1);
+ cpy_cb_struct.count = 0;
+ cpy_cb_struct.id = (-1);
+ cls_cb_struct.count = 0;
+ cls_cb_struct.id = (-1);
+
+ /* Create a property list from the class */
+ lid1 = H5Pcreate(cid1);
+ CHECK_I(lid1, "H5Pcreate");
+
+ /* Verify that the creation callback occurred */
+ VERIFY(crt_cb_struct.count, 1, "H5Pcreate");
+ VERIFY(crt_cb_struct.id, lid1, "H5Pcreate");
+
+ /* Check the number of properties in list */
+ ret = H5Pget_nprops(lid1, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 3, "H5Pget_nprops");
+
+ /* Create another property list from the class */
+ lid2 = H5Pcreate(cid1);
+ CHECK_I(lid2, "H5Pcreate");
+
+ /* Verify that the creation callback occurred */
+ VERIFY(crt_cb_struct.count, 2, "H5Pcreate");
+ VERIFY(crt_cb_struct.id, lid2, "H5Pcreate");
+
+ /* Check the number of properties in list */
+ ret = H5Pget_nprops(lid2, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 3, "H5Pget_nprops");
+
+ /* Create another property list by copying an existing list */
+ lid3 = H5Pcopy(lid1);
+ CHECK_I(lid3, "H5Pcopy");
+
+ /* Verify that the copy callback occurred */
+ VERIFY(cpy_cb_struct.count, 1, "H5Pcopy");
+ VERIFY(cpy_cb_struct.id, lid3, "H5Pcopy");
+
+ /* Check the number of properties in list */
+ ret = H5Pget_nprops(lid3, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 3, "H5Pget_nprops");
+
+ /* Close first list */
+ ret = H5Pclose(lid1);
+ CHECK_I(ret, "H5Pclose");
+
+ /* Verify that the close callback occurred */
+ VERIFY(cls_cb_struct.count, 1, "H5Pclose");
+ VERIFY(cls_cb_struct.id, lid1, "H5Pclose");
+
+ /* Close second list */
+ ret = H5Pclose(lid2);
+ CHECK_I(ret, "H5Pclose");
+
+ /* Verify that the close callback occurred */
+ VERIFY(cls_cb_struct.count, 2, "H5Pclose");
+ VERIFY(cls_cb_struct.id, lid2, "H5Pclose");
+
+ /* Close third list */
+ ret = H5Pclose(lid3);
+ CHECK_I(ret, "H5Pclose");
+
+ /* Verify that the close callback occurred */
+ VERIFY(cls_cb_struct.count, 3, "H5Pclose");
+ VERIFY(cls_cb_struct.id, lid3, "H5Pclose");
+
+ /* Create another new generic class, derived from first class */
+ cid2 =
+ H5Pcreate_class(cid1, CLASS2_NAME, test_genprop_cls_crt_cb1, &crt_cb_struct, test_genprop_cls_cpy_cb1,
+ &cpy_cb_struct, test_genprop_cls_cls_cb1, &cls_cb_struct);
+ CHECK_I(cid2, "H5Pcreate_class");
+
+ /* Insert fourth property into class (with no callbacks) */
+ ret =
+ H5Pregister2(cid2, PROP4_NAME, PROP4_SIZE, PROP4_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister2");
+
+ /* Check the number of properties in class */
+ /* (only reports the number of properties in 2nd class) */
+ ret = H5Pget_nprops(cid2, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 1, "H5Pget_nprops");
+
+ /* Create a property list from the 2nd class */
+ lid1 = H5Pcreate(cid2);
+ CHECK_I(lid1, "H5Pcreate");
+
+ /* Verify that both of the creation callbacks occurred */
+ VERIFY(crt_cb_struct.count, 4, "H5Pcreate");
+ VERIFY(crt_cb_struct.id, lid1, "H5Pcreate");
+
+ /* Check the number of properties in list */
+ ret = H5Pget_nprops(lid1, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 4, "H5Pget_nprops");
+
+ /* Create another property list by copying existing list */
+ lid2 = H5Pcopy(lid1);
+ CHECK_I(lid2, "H5Pcopy");
+
+ /* Verify that both of the copy callbacks occurred */
+ VERIFY(cpy_cb_struct.count, 3, "H5Pcopy");
+ VERIFY(cpy_cb_struct.id, lid2, "H5Pcopy");
+
+ /* Check the number of properties in list */
+ ret = H5Pget_nprops(lid2, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 4, "H5Pget_nprops");
+
+ /* Close first list */
+ ret = H5Pclose(lid1);
+ CHECK_I(ret, "H5Pclose");
+
+ /* Verify that both of the close callbacks occurred */
+ VERIFY(cls_cb_struct.count, 5, "H5Pclose");
+ VERIFY(cls_cb_struct.id, lid1, "H5Pclose");
+
+ /* Close second list */
+ ret = H5Pclose(lid2);
+ CHECK_I(ret, "H5Pclose");
+
+ /* Verify that both of the close callbacks occurred */
+ VERIFY(cls_cb_struct.count, 7, "H5Pclose");
+ VERIFY(cls_cb_struct.id, lid2, "H5Pclose");
+
+ /* Close classes */
+ ret = H5Pclose_class(cid1);
+ CHECK_I(ret, "H5Pclose_class");
+ ret = H5Pclose_class(cid2);
+ CHECK_I(ret, "H5Pclose_class");
+} /* end test_genprop_class_callback() */
+
+/****************************************************************
+**
+** test_genprop_basic_list(): Test basic generic property list code.
+** Tests creating new generic property lists.
+**
+****************************************************************/
+static void
+test_genprop_basic_list(void)
+{
+ hid_t cid1; /* Generic Property class ID */
+ hid_t cid2; /* Generic Property class ID */
+ hid_t lid1; /* Generic Property list ID */
+ size_t nprops; /* Number of properties */
+ size_t size; /* Size of property */
+ int prop1_value; /* Value for property #1 */
+ float prop2_value; /* Value for property #2 */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Basic Generic Property List Creation Functionality\n"));
+
+ /* Create a new generic class, derived from the root of the class hierarchy */
+ cid1 = H5Pcreate_class(H5P_ROOT, CLASS1_NAME, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(cid1, "H5Pcreate_class");
+
+ /* Add several properties (w/default values) */
+
+ /* Insert first property into class (with no callbacks) */
+ ret =
+ H5Pregister2(cid1, PROP1_NAME, PROP1_SIZE, PROP1_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister2");
+
+ /* Insert second property into class (with no callbacks) */
+ ret =
+ H5Pregister2(cid1, PROP2_NAME, PROP2_SIZE, PROP2_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister2");
+
+ /* Check the number of properties in class */
+ ret = H5Pget_nprops(cid1, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 2, "H5Pget_nprops");
+
+ /* Create a property list from the class */
+ lid1 = H5Pcreate(cid1);
+ CHECK_I(lid1, "H5Pcreate");
+
+ /* Get the list's class */
+ cid2 = H5Pget_class(lid1);
+ CHECK_I(cid2, "H5Pget_class");
+
+ /* Check that the list's class is correct */
+ ret = H5Pequal(cid1, cid2);
+ VERIFY(ret, 1, "H5Pequal");
+
+ /* Check correct "is a" class/list relationship */
+ ret = H5Pisa_class(lid1, cid1);
+ VERIFY(ret, 1, "H5Pisa_class");
+
+ /* Check "is a" class/list relationship another way */
+ ret = H5Pisa_class(lid1, cid2);
+ VERIFY(ret, 1, "H5Pisa_class");
+
+ /* Close class */
+ ret = H5Pclose_class(cid2);
+ CHECK_I(ret, "H5Pclose_class");
+
+ /* Check the number of properties in list */
+ ret = H5Pget_nprops(lid1, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 2, "H5Pget_nprops");
+
+ /* Check existence of properties */
+ ret = H5Pexist(lid1, PROP1_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+ ret = H5Pexist(lid1, PROP2_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+
+ /* Check the sizes of the properties */
+ ret = H5Pget_size(lid1, PROP1_NAME, &size);
+ CHECK_I(ret, "H5Pget_size");
+ VERIFY(size, PROP1_SIZE, "H5Pget_size");
+ ret = H5Pget_size(lid1, PROP2_NAME, &size);
+ CHECK_I(ret, "H5Pget_size");
+ VERIFY(size, PROP2_SIZE, "H5Pget_size");
+
+ /* Check values of properties (set with default values) */
+ ret = H5Pget(lid1, PROP1_NAME, &prop1_value);
+ CHECK_I(ret, "H5Pget");
+ VERIFY(prop1_value, *PROP1_DEF_VALUE, "H5Pget");
+ ret = H5Pget(lid1, PROP2_NAME, &prop2_value);
+ CHECK_I(ret, "H5Pget");
+ /* Verify the floating-poing value in this way to avoid compiler warning. */
+ if (!H5_FLT_ABS_EQUAL(prop2_value, *PROP2_DEF_VALUE))
+ HDprintf("*** UNEXPECTED VALUE from %s should be %f, but is %f at line %4d in %s\n", "H5Pget",
+ (double)*PROP2_DEF_VALUE, (double)prop2_value, (int)__LINE__, __FILE__);
+
+ /* Close list */
+ ret = H5Pclose(lid1);
+ CHECK_I(ret, "H5Pclose");
+
+ /* Close class */
+ ret = H5Pclose_class(cid1);
+ CHECK_I(ret, "H5Pclose_class");
+
+} /* end test_genprop_basic_list() */
+
+/****************************************************************
+**
+** test_genprop_basic_list_prop(): Test basic generic property list code.
+** Tests creating new generic property lists and adding and
+** removing properties from them.
+**
+****************************************************************/
+static void
+test_genprop_basic_list_prop(void)
+{
+ hid_t cid1; /* Generic Property class ID */
+ hid_t lid1; /* Generic Property list ID */
+ size_t nprops; /* Number of properties */
+ int prop1_value; /* Value for property #1 */
+ float prop2_value; /* Value for property #2 */
+ char prop3_value[10]; /* Property #3 value */
+ double prop4_value; /* Property #4 value */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Basic Generic Property List Property Functionality\n"));
+
+ /* Create a new generic class, derived from the root of the class hierarchy */
+ cid1 = H5Pcreate_class(H5P_ROOT, CLASS1_NAME, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(cid1, "H5Pcreate_class");
+
+ /* Add several properties (several w/default values) */
+
+ /* Insert first property into class (with no callbacks) */
+ ret =
+ H5Pregister2(cid1, PROP1_NAME, PROP1_SIZE, PROP1_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister2");
+
+ /* Insert second property into class (with no callbacks) */
+ ret =
+ H5Pregister2(cid1, PROP2_NAME, PROP2_SIZE, PROP2_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister2");
+
+ /* Create a property list from the class */
+ lid1 = H5Pcreate(cid1);
+ CHECK_I(lid1, "H5Pcreate");
+
+ /* Check the number of properties in list */
+ ret = H5Pget_nprops(lid1, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 2, "H5Pget_nprops");
+
+ /* Add temporary properties */
+
+ /* Insert first temporary property into list (with no callbacks) */
+ ret = H5Pinsert2(lid1, PROP3_NAME, PROP3_SIZE, PROP3_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pinsert2");
+
+ /* Insert second temporary property into list (with no callbacks) */
+ ret = H5Pinsert2(lid1, PROP4_NAME, PROP4_SIZE, PROP4_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pinsert2");
+
+ /* Check the number of properties in list */
+ ret = H5Pget_nprops(lid1, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 4, "H5Pget_nprops");
+
+ /* Check existence of all properties */
+ ret = H5Pexist(lid1, PROP1_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+ ret = H5Pexist(lid1, PROP2_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+ ret = H5Pexist(lid1, PROP3_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+ ret = H5Pexist(lid1, PROP4_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+
+ /* Check values of permanent properties (set with default values) */
+ ret = H5Pget(lid1, PROP1_NAME, &prop1_value);
+ CHECK_I(ret, "H5Pget");
+ VERIFY(prop1_value, *PROP1_DEF_VALUE, "H5Pget");
+ ret = H5Pget(lid1, PROP2_NAME, &prop2_value);
+ CHECK_I(ret, "H5Pget");
+ /* Verify the floating-poing value in this way to avoid compiler warning. */
+ if (!H5_FLT_ABS_EQUAL(prop2_value, *PROP2_DEF_VALUE))
+ HDprintf("*** UNEXPECTED VALUE from %s should be %f, but is %f at line %4d in %s\n", "H5Pget",
+ (double)*PROP2_DEF_VALUE, (double)prop2_value, (int)__LINE__, __FILE__);
+
+ /* Check values of temporary properties (set with regular values) */
+ ret = H5Pget(lid1, PROP3_NAME, &prop3_value);
+ CHECK_I(ret, "H5Pget");
+ if (HDmemcmp(&prop3_value, PROP3_DEF_VALUE, PROP3_SIZE) != 0)
+ TestErrPrintf("Property #3 doesn't match!, line=%d\n", __LINE__);
+ ret = H5Pget(lid1, PROP4_NAME, &prop4_value);
+ CHECK_I(ret, "H5Pget");
+ /* Verify the floating-poing value in this way to avoid compiler warning. */
+ if (!H5_DBL_ABS_EQUAL(prop4_value, *PROP4_DEF_VALUE))
+ HDprintf("*** UNEXPECTED VALUE from %s should be %f, but is %f at line %4d in %s\n", "H5Pget",
+ *PROP4_DEF_VALUE, prop4_value, (int)__LINE__, __FILE__);
+
+ /* Delete permanent property */
+ ret = H5Premove(lid1, PROP2_NAME);
+ CHECK_I(ret, "H5Premove");
+
+ /* Check number of properties */
+ ret = H5Pget_nprops(lid1, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 3, "H5Pget_nprops");
+
+ /* Delete temporary property */
+ ret = H5Premove(lid1, PROP3_NAME);
+ CHECK_I(ret, "H5Premove");
+
+ /* Check number of properties */
+ ret = H5Pget_nprops(lid1, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 2, "H5Pget_nprops");
+
+ /* Check existence of remaining properties */
+ ret = H5Pexist(lid1, PROP1_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+ ret = H5Pexist(lid1, PROP4_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+
+ /* Check values of permanent properties (set with default values) */
+ ret = H5Pget(lid1, PROP1_NAME, &prop1_value);
+ CHECK_I(ret, "H5Pget");
+ VERIFY(prop1_value, *PROP1_DEF_VALUE, "H5Pget");
+
+ /* Check values of temporary properties (set with regular values) */
+ ret = H5Pget(lid1, PROP4_NAME, &prop4_value);
+ CHECK_I(ret, "H5Pget");
+ /* Verify the floating-poing value in this way to avoid compiler warning. */
+ if (!H5_DBL_ABS_EQUAL(prop4_value, *PROP4_DEF_VALUE))
+ HDprintf("*** UNEXPECTED VALUE from %s should be %f, but is %f at line %4d in %s\n", "H5Pget",
+ *PROP4_DEF_VALUE, prop4_value, (int)__LINE__, __FILE__);
+
+ /* Close list */
+ ret = H5Pclose(lid1);
+ CHECK_I(ret, "H5Pclose");
+
+ /* Close class */
+ ret = H5Pclose_class(cid1);
+ CHECK_I(ret, "H5Pclose_class");
+
+} /* end test_genprop_basic_list_prop() */
+
+/****************************************************************
+**
+** test_genprop_iter2(): Property iterator for test_genprop_list_iter
+**
+****************************************************************/
+static int
+test_genprop_iter2(hid_t H5_ATTR_UNUSED id, const char *name, void *iter_data)
+{
+ iter_data_t *idata = (iter_data_t *)iter_data;
+
+ return HDstrcmp(name, idata->names[idata->iter_count++]);
+}
+
+/****************************************************************
+**
+** test_genprop_list_iter(): Test basic generic property list code.
+** Tests iterating over generic property list properties.
+**
+****************************************************************/
+static void
+test_genprop_list_iter(void)
+{
+ hid_t cid1; /* Generic Property class ID */
+ hid_t lid1; /* Generic Property list ID */
+ size_t nprops; /* Number of properties */
+ int idx; /* Index to start iteration at */
+ struct { /* Struct for iterations */
+ int iter_count;
+ const char **names;
+ } iter_struct;
+ const char *pnames[4] = {/* Names of properties for iterator */
+ PROP3_NAME, PROP4_NAME, PROP1_NAME, PROP2_NAME};
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Generic Property List Iteration Functionality\n"));
+
+ /* Create a new generic class, derived from the root of the class hierarchy */
+ cid1 = H5Pcreate_class(H5P_ROOT, CLASS1_NAME, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(cid1, "H5Pcreate_class");
+
+ /* Add several properties (several w/default values) */
+
+ /* Insert first property into class (with no callbacks) */
+ ret =
+ H5Pregister2(cid1, PROP1_NAME, PROP1_SIZE, PROP1_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister2");
+
+ /* Insert second property into class (with no callbacks) */
+ ret =
+ H5Pregister2(cid1, PROP2_NAME, PROP2_SIZE, PROP2_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister2");
+
+ /* Create a property list from the class */
+ lid1 = H5Pcreate(cid1);
+ CHECK_I(lid1, "H5Pcreate");
+
+ /* Check the number of properties in list */
+ ret = H5Pget_nprops(lid1, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 2, "H5Pget_nprops");
+
+ /* Add temporary properties */
+
+ /* Insert first temporary property into class (with no callbacks) */
+ ret = H5Pinsert2(lid1, PROP3_NAME, PROP3_SIZE, PROP3_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pinsert2");
+
+ /* Insert second temporary property into class (with no callbacks) */
+ ret = H5Pinsert2(lid1, PROP4_NAME, PROP4_SIZE, PROP4_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pinsert2");
+
+ /* Check the number of properties in list */
+ ret = H5Pget_nprops(lid1, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 4, "H5Pget_nprops");
+
+ /* Iterate over all properties in list */
+ iter_struct.iter_count = 0;
+ iter_struct.names = pnames;
+ ret = H5Piterate(lid1, NULL, test_genprop_iter2, &iter_struct);
+ VERIFY(ret, 0, "H5Piterate");
+
+ /* Iterate over last three properties in list */
+ idx = iter_struct.iter_count = 1;
+ ret = H5Piterate(lid1, &idx, test_genprop_iter2, &iter_struct);
+ VERIFY(ret, 0, "H5Piterate");
+ VERIFY(idx, (int)nprops, "H5Piterate");
+
+ /* Close list */
+ ret = H5Pclose(lid1);
+ CHECK_I(ret, "H5Pclose");
+
+ /* Close class */
+ ret = H5Pclose_class(cid1);
+ CHECK_I(ret, "H5Pclose_class");
+
+} /* end test_genprop_list_iter() */
+
+typedef struct {
+ /* Creation information */
+ int crt_count;
+ char *crt_name;
+ void *crt_value;
+
+ /* Set information */
+ int set_count;
+ hid_t set_plist_id;
+ char *set_name;
+ void *set_value;
+
+ /* Get information */
+ int get_count;
+ hid_t get_plist_id;
+ char *get_name;
+ void *get_value;
+
+ /* Delete information */
+ int del_count;
+ hid_t del_plist_id;
+ char *del_name;
+ void *del_value;
+
+ /* Copy information */
+ int cop_count;
+ char *cop_name;
+ void *cop_value;
+
+ /* Compare information */
+ int cmp_count;
+
+ /* Close information */
+ int cls_count;
+ char *cls_name;
+ void *cls_value;
+} prop_cb_info;
+
+/* Global variables for Callback information */
+prop_cb_info prop1_cb_info; /* Callback statistics for property #1 */
+prop_cb_info prop2_cb_info; /* Callback statistics for property #2 */
+prop_cb_info prop3_cb_info; /* Callback statistics for property #3 */
+
+/****************************************************************
+**
+** test_genprop_cls_cpy_cb2(): Property Class callback for test_genprop_list_callback
+**
+****************************************************************/
+static herr_t
+test_genprop_cls_cpy_cb2(hid_t new_list_id, hid_t H5_ATTR_UNUSED old_list_id, void *create_data)
+{
+ count_data_t *cdata = (count_data_t *)create_data;
+
+ cdata->count++;
+ cdata->id = new_list_id;
+
+ return SUCCEED;
+}
+
+/****************************************************************
+**
+** test_genprop_prop_crt_cb1(): Property creation callback for test_genprop_list_callback
+**
+****************************************************************/
+static herr_t
+test_genprop_prop_crt_cb1(const char *name, size_t size, void *def_value)
+{
+ /* Set the information from the creation call */
+ prop1_cb_info.crt_count++;
+ prop1_cb_info.crt_name = HDstrdup(name);
+ prop1_cb_info.crt_value = HDmalloc(size);
+ HDmemcpy(prop1_cb_info.crt_value, def_value, size);
+
+ return (SUCCEED);
+}
+
+/****************************************************************
+**
+** test_genprop_prop_set_cb1(): Property set callback for test_genprop_list_callback
+**
+****************************************************************/
+static herr_t
+test_genprop_prop_set_cb1(hid_t plist_id, const char *name, size_t size, void *value)
+{
+ /* Set the information from the set call */
+ prop1_cb_info.set_count++;
+ prop1_cb_info.set_plist_id = plist_id;
+ if (prop1_cb_info.set_name == NULL)
+ prop1_cb_info.set_name = HDstrdup(name);
+ if (prop1_cb_info.set_value == NULL)
+ prop1_cb_info.set_value = HDmalloc(size);
+ HDmemcpy(prop1_cb_info.set_value, value, size);
+
+ return (SUCCEED);
+}
+
+/****************************************************************
+**
+** test_genprop_prop_get_cb1(): Property get callback for test_genprop_list_callback
+**
+****************************************************************/
+static herr_t
+test_genprop_prop_get_cb1(hid_t plist_id, const char *name, size_t size, void *value)
+{
+ /* Set the information from the get call */
+ prop1_cb_info.get_count++;
+ prop1_cb_info.get_plist_id = plist_id;
+ if (prop1_cb_info.get_name == NULL)
+ prop1_cb_info.get_name = HDstrdup(name);
+ if (prop1_cb_info.get_value == NULL)
+ prop1_cb_info.get_value = HDmalloc(size);
+ HDmemcpy(prop1_cb_info.get_value, value, size);
+
+ return (SUCCEED);
+}
+
+/****************************************************************
+**
+** test_genprop_prop_cop_cb1(): Property copy callback for test_genprop_list_callback
+**
+****************************************************************/
+static herr_t
+test_genprop_prop_cop_cb1(const char *name, size_t size, void *value)
+{
+ /* Set the information from the get call */
+ prop1_cb_info.cop_count++;
+ if (prop1_cb_info.cop_name == NULL)
+ prop1_cb_info.cop_name = HDstrdup(name);
+ if (prop1_cb_info.cop_value == NULL)
+ prop1_cb_info.cop_value = HDmalloc(size);
+ HDmemcpy(prop1_cb_info.cop_value, value, size);
+
+ return (SUCCEED);
+}
+
+/****************************************************************
+**
+** test_genprop_prop_cmp_cb1(): Property comparison callback for test_genprop_list_callback
+**
+****************************************************************/
+static int
+test_genprop_prop_cmp_cb1(const void *value1, const void *value2, size_t size)
+{
+ /* Set the information from the comparison call */
+ prop1_cb_info.cmp_count++;
+
+ return (HDmemcmp(value1, value2, size));
+}
+
+/****************************************************************
+**
+** test_genprop_prop_cmp_cb3(): Property comparison callback for test_genprop_list_callback
+**
+****************************************************************/
+static int
+test_genprop_prop_cmp_cb3(const void *value1, const void *value2, size_t size)
+{
+ /* Set the information from the comparison call */
+ prop3_cb_info.cmp_count++;
+
+ return (HDmemcmp(value1, value2, size));
+}
+
+/****************************************************************
+**
+** test_genprop_prop_cls_cb1(): Property close callback for test_genprop_list_callback
+**
+****************************************************************/
+static herr_t
+test_genprop_prop_cls_cb1(const char *name, size_t size, void *value)
+{
+ /* Set the information from the close call */
+ prop1_cb_info.cls_count++;
+ if (prop1_cb_info.cls_name == NULL)
+ prop1_cb_info.cls_name = HDstrdup(name);
+ if (prop1_cb_info.cls_value == NULL)
+ prop1_cb_info.cls_value = HDmalloc(size);
+ HDmemcpy(prop1_cb_info.cls_value, value, size);
+
+ return (SUCCEED);
+}
+
+/****************************************************************
+**
+** test_genprop_prop_del_cb2(): Property delete callback for test_genprop_list_callback
+**
+****************************************************************/
+static herr_t
+test_genprop_prop_del_cb2(hid_t plist_id, const char *name, size_t size, void *value)
+{
+ /* Set the information from the delete call */
+ prop2_cb_info.del_count++;
+ prop2_cb_info.del_plist_id = plist_id;
+ prop2_cb_info.del_name = HDstrdup(name);
+ prop2_cb_info.del_value = HDmalloc(size);
+ HDmemcpy(prop2_cb_info.del_value, value, size);
+
+ return (SUCCEED);
+}
+
+/****************************************************************
+**
+** test_genprop_list_callback(): Test basic generic property list code.
+** Tests callbacks for properties in a generic property list.
+**
+****************************************************************/
+static void
+test_genprop_list_callback(void)
+{
+ hid_t cid1; /* Generic Property class ID */
+ hid_t lid1; /* Generic Property list ID */
+ hid_t lid2; /* 2nd Generic Property list ID */
+ size_t nprops; /* Number of properties in class */
+ int prop1_value; /* Value for property #1 */
+ int prop1_new_value = 20; /* Property #1 new value */
+ float prop2_value; /* Value for property #2 */
+ char prop3_value[10]; /* Property #3 value */
+ char prop3_new_value[10] = "10 chairs"; /* Property #3 new value */
+ double prop4_value; /* Property #4 value */
+ struct { /* Struct for callbacks */
+ int count;
+ hid_t id;
+ } cop_cb_struct;
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Basic Generic Property List Property Callback Functionality\n"));
+
+ /* Create a new generic class, derived from the root of the class hierarchy */
+ cid1 = H5Pcreate_class(H5P_ROOT, CLASS1_NAME, NULL, NULL, test_genprop_cls_cpy_cb2, &cop_cb_struct, NULL,
+ NULL);
+ CHECK_I(cid1, "H5Pcreate_class");
+
+ /* Insert first property into class (with callbacks) */
+ ret = H5Pregister2(cid1, PROP1_NAME, PROP1_SIZE, PROP1_DEF_VALUE, test_genprop_prop_crt_cb1,
+ test_genprop_prop_set_cb1, test_genprop_prop_get_cb1, NULL, test_genprop_prop_cop_cb1,
+ test_genprop_prop_cmp_cb1, test_genprop_prop_cls_cb1);
+ CHECK_I(ret, "H5Pregister2");
+
+ /* Insert second property into class (with only delete callback) */
+ ret = H5Pregister2(cid1, PROP2_NAME, PROP2_SIZE, PROP2_DEF_VALUE, NULL, NULL, NULL,
+ test_genprop_prop_del_cb2, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister2");
+
+ /* Insert third property into class (with only compare callback) */
+ ret = H5Pregister2(cid1, PROP3_NAME, PROP3_SIZE, PROP3_DEF_VALUE, NULL, NULL, NULL, NULL, NULL,
+ test_genprop_prop_cmp_cb3, NULL);
+ CHECK_I(ret, "H5Pregister2");
+
+ /* Insert fourth property into class (with no callbacks) */
+ ret =
+ H5Pregister2(cid1, PROP4_NAME, PROP4_SIZE, PROP4_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister2");
+
+ /* Check the number of properties in class */
+ ret = H5Pget_nprops(cid1, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 4, "H5Pget_nprops");
+
+ /* Initialize class callback structs */
+ cop_cb_struct.count = 0;
+ cop_cb_struct.id = (-1);
+
+ /* Initialize callback information for properties tracked */
+ HDmemset(&prop1_cb_info, 0, sizeof(prop_cb_info));
+ HDmemset(&prop2_cb_info, 0, sizeof(prop_cb_info));
+ HDmemset(&prop3_cb_info, 0, sizeof(prop_cb_info));
+
+ /* Create a property list from the class */
+ lid1 = H5Pcreate(cid1);
+ CHECK_I(lid1, "H5Pcreate");
+
+ /* The compare callback should not have been called once on property 1, as
+ * the property is always copied */
+ VERIFY(prop1_cb_info.cmp_count, 0, "H5Pcreate");
+ /* The compare callback should not have been called on property 3, as there
+ * is no create callback */
+ VERIFY(prop3_cb_info.cmp_count, 0, "H5Pcreate");
+
+ /* Verify creation callback information for properties tracked */
+ VERIFY(prop1_cb_info.crt_count, 1, "H5Pcreate");
+ if (HDstrcmp(prop1_cb_info.crt_name, PROP1_NAME) != 0)
+ TestErrPrintf("Property #1 name doesn't match!, line=%d\n", __LINE__);
+ if (HDmemcmp(prop1_cb_info.crt_value, PROP1_DEF_VALUE, PROP1_SIZE) != 0)
+ TestErrPrintf("Property #1 value doesn't match!, line=%d\n", __LINE__);
+
+ /* Check values of permanent properties (set with default values) */
+ ret = H5Pget(lid1, PROP1_NAME, &prop1_value);
+ CHECK_I(ret, "H5Pget");
+ VERIFY(prop1_value, *PROP1_DEF_VALUE, "H5Pget");
+ /* The compare callback should not have been called */
+ VERIFY(prop1_cb_info.cmp_count, 0, "H5Pget");
+ ret = H5Pget(lid1, PROP2_NAME, &prop2_value);
+ CHECK_I(ret, "H5Pget");
+ /* Verify the floating-poing value in this way to avoid compiler warning. */
+ if (!H5_FLT_ABS_EQUAL(prop2_value, *PROP2_DEF_VALUE))
+ HDprintf("*** UNEXPECTED VALUE from %s should be %f, but is %f at line %4d in %s\n", "H5Pget",
+ (double)*PROP2_DEF_VALUE, (double)prop2_value, (int)__LINE__, __FILE__);
+
+ /* Check values of temporary properties (set with regular values) */
+ ret = H5Pget(lid1, PROP3_NAME, &prop3_value);
+ CHECK_I(ret, "H5Pget");
+ if (HDmemcmp(&prop3_value, PROP3_DEF_VALUE, PROP3_SIZE) != 0)
+ TestErrPrintf("Property #3 doesn't match!, line=%d\n", __LINE__);
+ /* The compare callback should not have been called, as there is no get
+ * callback for this property */
+ VERIFY(prop3_cb_info.cmp_count, 0, "H5Pget");
+ ret = H5Pget(lid1, PROP4_NAME, &prop4_value);
+ CHECK_I(ret, "H5Pget");
+ /* Verify the floating-poing value in this way to avoid compiler warning. */
+ if (!H5_DBL_ABS_EQUAL(prop4_value, *PROP4_DEF_VALUE))
+ HDprintf("*** UNEXPECTED VALUE from %s should be %f, but is %f at line %4d in %s\n", "H5Pget",
+ *PROP4_DEF_VALUE, prop4_value, (int)__LINE__, __FILE__);
+
+ /* Verify get callback information for properties tracked */
+ VERIFY(prop1_cb_info.get_count, 1, "H5Pget");
+ VERIFY(prop1_cb_info.get_plist_id, lid1, "H5Pget");
+ if (HDstrcmp(prop1_cb_info.get_name, PROP1_NAME) != 0)
+ TestErrPrintf("Property #1 name doesn't match!, line=%d\n", __LINE__);
+ if (HDmemcmp(prop1_cb_info.get_value, PROP1_DEF_VALUE, PROP1_SIZE) != 0)
+ TestErrPrintf("Property #1 value doesn't match!, line=%d\n", __LINE__);
+
+ /* Set value of property #1 to different value */
+ ret = H5Pset(lid1, PROP1_NAME, &prop1_new_value);
+ CHECK_I(ret, "H5Pset");
+
+ /* Verify set callback information for properties tracked */
+ VERIFY(prop1_cb_info.set_count, 1, "H5Pset");
+ VERIFY(prop1_cb_info.set_plist_id, lid1, "H5Pset");
+ if (HDstrcmp(prop1_cb_info.set_name, PROP1_NAME) != 0)
+ TestErrPrintf("Property #1 name doesn't match!, line=%d\n", __LINE__);
+ if (HDmemcmp(prop1_cb_info.set_value, &prop1_new_value, PROP1_SIZE) != 0)
+ TestErrPrintf("Property #1 value doesn't match!, line=%d\n", __LINE__);
+
+ /* The compare callback should not have been called */
+ VERIFY(prop1_cb_info.cmp_count, 0, "H5Pset");
+
+ /* Set value of property #3 to different value */
+ ret = H5Pset(lid1, PROP3_NAME, prop3_new_value);
+ CHECK_I(ret, "H5Pset");
+
+ /* The compare callback should not have been called */
+ VERIFY(prop3_cb_info.cmp_count, 0, "H5Pset");
+
+ /* Check new value of tracked properties */
+ ret = H5Pget(lid1, PROP1_NAME, &prop1_value);
+ CHECK_I(ret, "H5Pget");
+ VERIFY(prop1_value, prop1_new_value, "H5Pget");
+
+ /* Verify get callback information again for properties tracked */
+ VERIFY(prop1_cb_info.get_count, 2, "H5Pget");
+ VERIFY(prop1_cb_info.get_plist_id, lid1, "H5Pget");
+ if (HDstrcmp(prop1_cb_info.get_name, PROP1_NAME) != 0)
+ TestErrPrintf("Property #1 name doesn't match!, line=%d\n", __LINE__);
+ if (HDmemcmp(prop1_cb_info.get_value, &prop1_new_value, PROP1_SIZE) != 0)
+ TestErrPrintf("Property #1 value doesn't match!, line=%d\n", __LINE__);
+
+ /* Delete property #2 */
+ ret = H5Premove(lid1, PROP2_NAME);
+ CHECK_I(ret, "H5Premove");
+
+ /* Verify delete callback information for properties tracked */
+ VERIFY(prop2_cb_info.del_count, 1, "H5Premove");
+ VERIFY(prop2_cb_info.del_plist_id, lid1, "H5Premove");
+ if (HDstrcmp(prop2_cb_info.del_name, PROP2_NAME) != 0)
+ TestErrPrintf("Property #2 name doesn't match!, line=%d\n", __LINE__);
+ if (HDmemcmp(prop2_cb_info.del_value, PROP2_DEF_VALUE, PROP2_SIZE) != 0)
+ TestErrPrintf("Property #2 value doesn't match!, line=%d\n", __LINE__);
+
+ /* Copy first list */
+ lid2 = H5Pcopy(lid1);
+ CHECK_I(lid2, "H5Pcopy");
+
+ /* Verify copy callback information for properties tracked */
+ VERIFY(prop1_cb_info.cop_count, 1, "H5Pcopy");
+ if (HDstrcmp(prop1_cb_info.cop_name, PROP1_NAME) != 0)
+ TestErrPrintf("Property #1 name doesn't match!, line=%d\n", __LINE__);
+ if (HDmemcmp(prop1_cb_info.cop_value, &prop1_new_value, PROP1_SIZE) != 0)
+ TestErrPrintf("Property #1 value doesn't match!, line=%d\n", __LINE__);
+
+ /* Verify that the class creation callback occurred */
+ VERIFY(cop_cb_struct.count, 1, "H5Pcopy");
+ VERIFY(cop_cb_struct.id, lid2, "H5Pcopy");
+
+ /* Compare the two lists */
+ ret = H5Pequal(lid1, lid2);
+ VERIFY(ret, 1, "H5Pequal");
+
+ /* Verify compare callback information for properties tracked */
+ VERIFY(prop1_cb_info.cmp_count, 1, "H5Pequal");
+ VERIFY(prop3_cb_info.cmp_count, 1, "H5Pequal");
+
+ /* Close first list */
+ ret = H5Pclose(lid1);
+ CHECK_I(ret, "H5Pclose");
+
+ /* Verify close callback information for properties tracked */
+ VERIFY(prop1_cb_info.cls_count, 1, "H5Pclose");
+ if (HDstrcmp(prop1_cb_info.cls_name, PROP1_NAME) != 0)
+ TestErrPrintf("Property #1 name doesn't match!, line=%d\n", __LINE__);
+ if (HDmemcmp(prop1_cb_info.cls_value, &prop1_new_value, PROP1_SIZE) != 0)
+ TestErrPrintf("Property #1 value doesn't match!, line=%d\n", __LINE__);
+
+ /* Close second list */
+ ret = H5Pclose(lid2);
+ CHECK_I(ret, "H5Pclose");
+
+ /* Verify close callback information for properties tracked */
+ VERIFY(prop1_cb_info.cls_count, 2, "H5Pclose");
+
+ /* Free memory allocated for tracking properties */
+ HDfree(prop1_cb_info.crt_name);
+ HDfree(prop1_cb_info.crt_value);
+ HDfree(prop1_cb_info.get_name);
+ HDfree(prop1_cb_info.get_value);
+ HDfree(prop1_cb_info.set_name);
+ HDfree(prop1_cb_info.set_value);
+ HDfree(prop1_cb_info.cop_name);
+ HDfree(prop1_cb_info.cop_value);
+ HDfree(prop1_cb_info.cls_name);
+ HDfree(prop1_cb_info.cls_value);
+ HDfree(prop2_cb_info.del_name);
+ HDfree(prop2_cb_info.del_value);
+
+ /* Close class */
+ ret = H5Pclose_class(cid1);
+ CHECK_I(ret, "H5Pclose_class");
+} /* end test_genprop_list_callback() */
+
+/****************************************************************
+**
+** test_genprop_list_addprop(): Test adding properties to a
+** standard HDF5 property list and verify that the library
+** ignores the extra properties.
+**
+****************************************************************/
+static void
+test_genprop_list_addprop(void)
+{
+ hid_t fid; /* File ID */
+ hid_t did; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t pid; /* Property List ID */
+ int prop1_value; /* Value for property #1 */
+ herr_t ret; /* Generic return value */
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create scalar dataspace for dataset */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create a dataset creation property list */
+ pid = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(pid, FAIL, "H5Pcreate");
+
+ /* Insert temporary property into class (with no callbacks) */
+ ret = H5Pinsert2(pid, PROP1_NAME, PROP1_SIZE, PROP1_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pinsert2");
+
+ /* Check existence of added property */
+ ret = H5Pexist(pid, PROP1_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+
+ /* Check values of property (set with default value) */
+ ret = H5Pget(pid, PROP1_NAME, &prop1_value);
+ CHECK_I(ret, "H5Pget");
+ VERIFY(prop1_value, *PROP1_DEF_VALUE, "H5Pget");
+
+ /* Create a dataset */
+ did = H5Dcreate2(fid, "Dataset1", H5T_NATIVE_INT, sid, H5P_DEFAULT, pid, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+
+ /* Check existence of added property (after using property list) */
+ ret = H5Pexist(pid, PROP1_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+
+ /* Check values of property (set with default value) (after using property list) */
+ ret = H5Pget(pid, PROP1_NAME, &prop1_value);
+ CHECK_I(ret, "H5Pget");
+ VERIFY(prop1_value, *PROP1_DEF_VALUE, "H5Pget");
+
+ /* Close property list */
+ ret = H5Pclose(pid);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* end test_genprop_list_addprop() */
+
+/****************************************************************
+**
+** test_genprop_class_addprop(): Test adding properties to a
+** standard HDF5 property class and verify that the library
+** ignores the extra properties and continues to recognize the
+** derived class as a valid version of the derived-from class.
+**
+****************************************************************/
+static void
+test_genprop_class_addprop(void)
+{
+ hid_t fid; /* File ID */
+ hid_t did; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t cid; /* Property Class ID */
+ hid_t pid; /* Property List ID */
+ int prop1_value; /* Value for property #1 */
+ herr_t ret; /* Generic return value */
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create scalar dataspace for dataset */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create a new class, derived from the dataset creation property list class */
+ cid = H5Pcreate_class(H5P_DATASET_CREATE, CLASS1_NAME, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(cid, "H5Pcreate_class");
+#if 0
+ /* Check existence of an original property */
+ ret = H5Pexist(cid, H5O_CRT_PIPELINE_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+#endif
+ /* Insert first property into class (with no callbacks) */
+ ret =
+ H5Pregister2(cid, PROP1_NAME, PROP1_SIZE, PROP1_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister2");
+#if 0
+ /* Check existence of an original property */
+ ret = H5Pexist(cid, H5O_CRT_PIPELINE_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+#endif
+ /* Check existence of added property */
+ ret = H5Pexist(cid, PROP1_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+
+ /* Create a derived dataset creation property list */
+ pid = H5Pcreate(cid);
+ CHECK(pid, FAIL, "H5Pcreate");
+#if 0
+ /* Check existence of an original property */
+ ret = H5Pexist(pid, H5O_CRT_PIPELINE_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+#endif
+ /* Check existence of added property */
+ ret = H5Pexist(pid, PROP1_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+
+ /* Check values of property (set with default value) */
+ ret = H5Pget(pid, PROP1_NAME, &prop1_value);
+ CHECK_I(ret, "H5Pget");
+ VERIFY(prop1_value, *PROP1_DEF_VALUE, "H5Pget");
+
+ /* Insert second property into class (with no callbacks) */
+ ret =
+ H5Pregister2(cid, PROP2_NAME, PROP2_SIZE, PROP2_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister2");
+#if 0
+ /* Check existence of an original property (in class) */
+ ret = H5Pexist(cid, H5O_CRT_PIPELINE_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+#endif
+ /* Check existence of first added property (in class) */
+ ret = H5Pexist(cid, PROP1_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+
+ /* Check existence of second added property (in class) */
+ ret = H5Pexist(cid, PROP2_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+#if 0
+ /* Check existence of an original property (in property list) */
+ ret = H5Pexist(pid, H5O_CRT_PIPELINE_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+#endif
+ /* Check existence of first added property (in property list) */
+ ret = H5Pexist(pid, PROP1_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+
+ /* Check existence of second added property (in property list) (should not exist) */
+ ret = H5Pexist(pid, PROP2_NAME);
+ VERIFY(ret, 0, "H5Pexist");
+
+ /* Create a dataset */
+ did = H5Dcreate2(fid, "Dataset1", H5T_NATIVE_INT, sid, H5P_DEFAULT, pid, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+
+ /* Check existence of added property (after using property list) */
+ ret = H5Pexist(pid, PROP1_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+
+ /* Check values of property (set with default value) (after using property list) */
+ ret = H5Pget(pid, PROP1_NAME, &prop1_value);
+ CHECK_I(ret, "H5Pget");
+ VERIFY(prop1_value, *PROP1_DEF_VALUE, "H5Pget");
+
+ /* Close property class */
+ ret = H5Pclose_class(cid);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close property list */
+ ret = H5Pclose(pid);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* end test_genprop_class_addprop() */
+
+/****************************************************************
+**
+** test_genprop_list_add_remove_prop(): Test adding then removing the
+** same properties to a standard HDF5 property list. This is testing
+** also for a memory leak that could be caused by not freeing the
+** removed property resources from the property list.
+**
+****************************************************************/
+static void
+test_genprop_list_add_remove_prop(void)
+{
+ hid_t pid; /* Property List ID */
+ herr_t ret; /* Generic return value */
+
+ /* Create a dataset creation property list */
+ pid = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(pid, FAIL, "H5Pcreate");
+
+ /* Insert temporary property into class (with no callbacks) */
+ ret = H5Pinsert2(pid, PROP1_NAME, PROP1_SIZE, PROP1_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pinsert2");
+
+ /* Delete added property */
+ ret = H5Premove(pid, PROP1_NAME);
+ CHECK_I(ret, "H5Premove");
+
+ /* Insert temporary property into class (with no callbacks) */
+ ret = H5Pinsert2(pid, PROP1_NAME, PROP1_SIZE, PROP1_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pinsert2");
+
+ /* Delete added property */
+ ret = H5Premove(pid, PROP1_NAME);
+ CHECK_I(ret, "H5Premove");
+
+ /* Close property list */
+ ret = H5Pclose(pid);
+ CHECK(ret, FAIL, "H5Pclose");
+
+} /* end test_genprop_list_add_remove_prop() */
+
+/****************************************************************
+**
+** test_genprop_equal(): Test basic generic property list code.
+** More tests for H5Pequal()
+**
+****************************************************************/
+static void
+test_genprop_equal(void)
+{
+ hid_t cid1; /* Generic Property class ID */
+ hid_t lid1; /* Generic Property list ID */
+ hid_t lid2; /* Generic Property list ID */
+ int prop1_new_value = 20; /* Property #1 new value */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Basic Generic Property List Equal Functionality\n"));
+
+ /* Create a new generic class, derived from the root of the class hierarchy */
+ cid1 = H5Pcreate_class(H5P_ROOT, CLASS1_NAME, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(cid1, "H5Pcreate_class");
+
+ /* Insert first property into class (with no callbacks) */
+ ret =
+ H5Pregister2(cid1, PROP1_NAME, PROP1_SIZE, PROP1_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister2");
+
+ /* Insert second property into class (with no callbacks) */
+ ret =
+ H5Pregister2(cid1, PROP2_NAME, PROP2_SIZE, PROP2_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister2");
+
+ /* Create a property list from the class */
+ lid1 = H5Pcreate(cid1);
+ CHECK_I(lid1, "H5Pcreate");
+
+ /* Copy the property list */
+ lid2 = H5Pcopy(lid1);
+ CHECK_I(lid2, "H5Pcopy");
+
+ /* Check that the lists are equal */
+ ret = H5Pequal(lid1, lid2);
+ VERIFY(ret, 1, "H5Pequal");
+
+ /* Set property in first list to another value */
+ ret = H5Pset(lid1, PROP1_NAME, &prop1_new_value);
+ CHECK_I(ret, "H5Pset");
+
+ /* Check that the lists are not equal */
+ ret = H5Pequal(lid1, lid2);
+ VERIFY(ret, 0, "H5Pequal");
+
+ /* Set property in first list back to default */
+ ret = H5Pset(lid1, PROP1_NAME, PROP1_DEF_VALUE);
+ CHECK_I(ret, "H5Pset");
+
+ /* Check that the lists are still equal */
+ ret = H5Pequal(lid1, lid2);
+ VERIFY(ret, 1, "H5Pequal");
+
+ /* Insert first temporary property into first list (with no callbacks) */
+ ret = H5Pinsert2(lid1, PROP3_NAME, PROP3_SIZE, PROP3_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pinsert2");
+
+ /* Check that the lists are not equal */
+ ret = H5Pequal(lid1, lid2);
+ VERIFY(ret, 0, "H5Pequal");
+
+ /* Insert first temporary property into second list (with no callbacks) */
+ ret = H5Pinsert2(lid2, PROP3_NAME, PROP3_SIZE, PROP3_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pinsert2");
+
+ /* Check that the lists are equal */
+ ret = H5Pequal(lid1, lid2);
+ VERIFY(ret, 1, "H5Pequal");
+
+ /* Insert second temporary property into second list (with no callbacks) */
+ ret = H5Pinsert2(lid2, PROP4_NAME, PROP4_SIZE, PROP4_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pinsert2");
+
+ /* Check that the lists are not equal */
+ ret = H5Pequal(lid1, lid2);
+ VERIFY(ret, 0, "H5Pequal");
+
+ /* Insert second temporary property into first list (with no callbacks) */
+ ret = H5Pinsert2(lid1, PROP4_NAME, PROP4_SIZE, PROP4_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pinsert2");
+
+ /* Check that the lists are equal */
+ ret = H5Pequal(lid1, lid2);
+ VERIFY(ret, 1, "H5Pequal");
+
+ /* Remove first temporary property from first list */
+ ret = H5Premove(lid1, PROP3_NAME);
+ CHECK_I(ret, "H5Premove");
+
+ /* Check that the lists are not equal */
+ ret = H5Pequal(lid1, lid2);
+ VERIFY(ret, 0, "H5Pequal");
+
+ /* Remove second temporary property from second list */
+ ret = H5Premove(lid2, PROP4_NAME);
+ CHECK_I(ret, "H5Premove");
+
+ /* Check that the lists are not equal */
+ ret = H5Pequal(lid1, lid2);
+ VERIFY(ret, 0, "H5Pequal");
+
+ /* Remove first temporary property from second list */
+ ret = H5Premove(lid2, PROP3_NAME);
+ CHECK_I(ret, "H5Premove");
+
+ /* Check that the lists are not equal */
+ ret = H5Pequal(lid1, lid2);
+ VERIFY(ret, 0, "H5Pequal");
+
+ /* Remove first permanent property from first list */
+ ret = H5Premove(lid1, PROP1_NAME);
+ CHECK_I(ret, "H5Premove");
+
+ /* Check that the lists are not equal */
+ ret = H5Pequal(lid1, lid2);
+ VERIFY(ret, 0, "H5Pequal");
+
+ /* Remove second temporary property from first list */
+ ret = H5Premove(lid1, PROP4_NAME);
+ CHECK_I(ret, "H5Premove");
+
+ /* Check that the lists are not equal */
+ ret = H5Pequal(lid1, lid2);
+ VERIFY(ret, 0, "H5Pequal");
+
+ /* Remove first permanent property from second list */
+ ret = H5Premove(lid2, PROP1_NAME);
+ CHECK_I(ret, "H5Premove");
+
+ /* Check that the lists are equal */
+ ret = H5Pequal(lid1, lid2);
+ VERIFY(ret, 1, "H5Pequal");
+
+ /* Close property lists */
+ ret = H5Pclose(lid1);
+ CHECK_I(ret, "H5Pclose");
+ ret = H5Pclose(lid2);
+ CHECK_I(ret, "H5Pclose");
+
+ /* Close class */
+ ret = H5Pclose_class(cid1);
+ CHECK_I(ret, "H5Pclose_class");
+} /* ent test_genprop_equal() */
+
+/****************************************************************
+**
+** test_genprop_path(): Test basic generic property list code.
+** Tests for class paths
+**
+****************************************************************/
+static void
+test_genprop_path(void)
+{
+ hid_t cid1; /* Generic Property class ID */
+ hid_t cid2; /* Generic Property class ID */
+#if 0
+ hid_t cid3; /* Generic Property class ID */
+ char *path; /* Class path */
+#endif
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Generic Property List Class Path Functionality\n"));
+
+ /* Create a new generic class, derived from the root of the class hierarchy */
+ cid1 = H5Pcreate_class(H5P_ROOT, CLASS1_NAME, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(cid1, "H5Pcreate_class");
+
+ /* Insert first property into class (with no callbacks) */
+ ret =
+ H5Pregister2(cid1, PROP1_NAME, PROP1_SIZE, PROP1_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister2");
+#if 0
+ /* Get full path for first class */
+ path = H5P__get_class_path_test(cid1);
+ CHECK_PTR(path, "H5P__get_class_path_test");
+ if (HDstrcmp(path, CLASS1_PATH) != 0)
+ TestErrPrintf("Class names don't match!, path=%s, CLASS1_PATH=%s\n", path, CLASS1_PATH);
+ H5free_memory(path);
+#endif
+ /* Create another new generic class, derived from first class */
+ cid2 = H5Pcreate_class(cid1, CLASS2_NAME, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(cid2, "H5Pcreate_class");
+
+ /* Insert second property into class (with no callbacks) */
+ ret =
+ H5Pregister2(cid2, PROP2_NAME, PROP2_SIZE, PROP2_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister2");
+#if 0
+ /* Get full path for second class */
+ path = H5P__get_class_path_test(cid2);
+ CHECK_PTR(path, "H5P__get_class_path_test");
+ if (HDstrcmp(path, CLASS2_PATH) != 0)
+ TestErrPrintf("Class names don't match!, path=%s, CLASS2_PATH=%s\n", path, CLASS2_PATH);
+
+ /* Open a copy of the class with the path name */
+ cid3 = H5P__open_class_path_test(path);
+ CHECK_I(cid3, "H5P__open_class_path_test");
+
+ /* Check that the classes are equal */
+ ret = H5Pequal(cid2, cid3);
+ VERIFY(ret, 1, "H5Pequal");
+
+ /* Release the path string */
+ H5free_memory(path);
+
+ /* Close class */
+ ret = H5Pclose_class(cid3);
+ CHECK_I(ret, "H5Pclose_class");
+#endif
+ /* Close class */
+ ret = H5Pclose_class(cid1);
+ CHECK_I(ret, "H5Pclose_class");
+
+ /* Close class */
+ ret = H5Pclose_class(cid2);
+ CHECK_I(ret, "H5Pclose_class");
+
+} /* ent test_genprop_path() */
+
+/****************************************************************
+**
+** test_genprop_refcount(): Test basic generic property list code.
+** Tests for correct reference counting
+**
+****************************************************************/
+static void
+test_genprop_refcount(void)
+{
+ hid_t cid1; /* Generic Property class ID */
+ hid_t lid1; /* Generic Property class ID */
+ char *name; /* Name of class */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Generic Property List Reference Count Functionality\n"));
+
+ /* Create a new generic class, derived from the root of the class hierarchy */
+ cid1 = H5Pcreate_class(H5P_ROOT, CLASS1_NAME, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(cid1, "H5Pcreate_class");
+
+ /* Insert first property into class (with no callbacks) */
+ ret =
+ H5Pregister2(cid1, PROP1_NAME, PROP1_SIZE, PROP1_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister2");
+
+ /* Create a new generic list, derived from the root of the class hierarchy */
+ lid1 = H5Pcreate(cid1);
+ CHECK_I(lid1, "H5Pcreate");
+
+ /* Check class name */
+ name = H5Pget_class_name(cid1);
+ CHECK_PTR(name, "H5Pget_class_name");
+ if (HDstrcmp(name, CLASS1_NAME) != 0)
+ TestErrPrintf("Class names don't match!, name=%s, CLASS1_NAME=%s\n", name, CLASS1_NAME);
+ H5free_memory(name);
+
+ /* Close class */
+ ret = H5Pclose_class(cid1);
+ CHECK_I(ret, "H5Pclose_class");
+
+ /* Get the list's class */
+ cid1 = H5Pget_class(lid1);
+ CHECK_I(cid1, "H5Pget_class");
+
+ /* Check correct "is a" class/list relationship */
+ ret = H5Pisa_class(lid1, cid1);
+ VERIFY(ret, 1, "H5Pisa_class");
+
+ /* Check class name */
+ name = H5Pget_class_name(cid1);
+ CHECK_PTR(name, "H5Pget_class_name");
+ if (HDstrcmp(name, CLASS1_NAME) != 0)
+ TestErrPrintf("Class names don't match!, name=%s, CLASS1_NAME=%s\n", name, CLASS1_NAME);
+ H5free_memory(name);
+
+ /* Close list */
+ ret = H5Pclose(lid1);
+ CHECK_I(ret, "H5Pclose");
+
+ /* Check class name */
+ name = H5Pget_class_name(cid1);
+ CHECK_PTR(name, "H5Pget_class_name");
+ if (HDstrcmp(name, CLASS1_NAME) != 0)
+ TestErrPrintf("Class names don't match!, name=%s, CLASS1_NAME=%s\n", name, CLASS1_NAME);
+ H5free_memory(name);
+
+ /* Close class */
+ ret = H5Pclose_class(cid1);
+ CHECK_I(ret, "H5Pclose_class");
+
+} /* ent test_genprop_refcount() */
+
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+/****************************************************************
+**
+** test_genprop_deprec_class(): Test basic generic property list code.
+** Tests deprecated property class API routines.
+**
+****************************************************************/
+static void
+test_genprop_deprec_class(void)
+{
+ hid_t cid1; /* Generic Property class ID */
+ size_t size; /* Size of property */
+ size_t nprops; /* Number of properties in class */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Deprecated Generic Property List Functions\n"));
+
+ /* Create a new generic class, derived from the root of the class hierarchy */
+ cid1 = H5Pcreate_class(H5P_ROOT, CLASS1_NAME, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(cid1, "H5Pcreate_class");
+
+ /* Check the number of properties in class */
+ ret = H5Pget_nprops(cid1, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 0, "H5Pget_nprops");
+
+ /* Check the existence of the first property (should fail) */
+ ret = H5Pexist(cid1, PROP1_NAME);
+ VERIFY(ret, 0, "H5Pexist");
+
+ /* Insert first property into class (with no callbacks) */
+ ret = H5Pregister1(cid1, PROP1_NAME, PROP1_SIZE, PROP1_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister1");
+
+ /* Try to insert the first property again (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Pregister1(cid1, PROP1_NAME, PROP1_SIZE, PROP1_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Pregister1");
+
+ /* Check the existence of the first property */
+ ret = H5Pexist(cid1, PROP1_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+
+ /* Check the size of the first property */
+ ret = H5Pget_size(cid1, PROP1_NAME, &size);
+ CHECK_I(ret, "H5Pget_size");
+ VERIFY(size, PROP1_SIZE, "H5Pget_size");
+
+ /* Check the number of properties in class */
+ ret = H5Pget_nprops(cid1, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 1, "H5Pget_nprops");
+
+ /* Insert second property into class (with no callbacks) */
+ ret = H5Pregister1(cid1, PROP2_NAME, PROP2_SIZE, PROP2_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister1");
+
+ /* Try to insert the second property again (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Pregister1(cid1, PROP2_NAME, PROP2_SIZE, PROP2_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Pregister1");
+
+ /* Check the existence of the second property */
+ ret = H5Pexist(cid1, PROP2_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+
+ /* Check the size of the second property */
+ ret = H5Pget_size(cid1, PROP2_NAME, &size);
+ CHECK_I(ret, "H5Pget_size");
+ VERIFY(size, PROP2_SIZE, "H5Pget_size");
+
+ /* Check the number of properties in class */
+ ret = H5Pget_nprops(cid1, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 2, "H5Pget_nprops");
+
+ /* Insert third property into class (with no callbacks) */
+ ret = H5Pregister1(cid1, PROP3_NAME, PROP3_SIZE, PROP3_DEF_VALUE, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pregister1");
+
+ /* Check the existence of the third property */
+ ret = H5Pexist(cid1, PROP3_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+
+ /* Check the size of the third property */
+ ret = H5Pget_size(cid1, PROP3_NAME, &size);
+ CHECK_I(ret, "H5Pget_size");
+ VERIFY(size, PROP3_SIZE, "H5Pget_size");
+
+ /* Check the number of properties in class */
+ ret = H5Pget_nprops(cid1, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 3, "H5Pget_nprops");
+
+ /* Unregister first property */
+ ret = H5Punregister(cid1, PROP1_NAME);
+ CHECK_I(ret, "H5Punregister");
+
+ /* Try to check the size of the first property (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Pget_size(cid1, PROP1_NAME, &size);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Pget_size");
+
+ /* Check the number of properties in class */
+ ret = H5Pget_nprops(cid1, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 2, "H5Pget_nprops");
+
+ /* Unregister second property */
+ ret = H5Punregister(cid1, PROP2_NAME);
+ CHECK_I(ret, "H5Punregister");
+
+ /* Check the number of properties in class */
+ ret = H5Pget_nprops(cid1, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 1, "H5Pget_nprops");
+
+ /* Unregister third property */
+ ret = H5Punregister(cid1, PROP3_NAME);
+ CHECK_I(ret, "H5Punregister");
+
+ /* Check the number of properties in class */
+ ret = H5Pget_nprops(cid1, &nprops);
+ CHECK_I(ret, "H5Pget_nprops");
+ VERIFY(nprops, 0, "H5Pget_nprops");
+
+ /* Close class */
+ ret = H5Pclose_class(cid1);
+ CHECK_I(ret, "H5Pclose_class");
+} /* end test_genprop_deprec_class() */
+
+/****************************************************************
+**
+** test_genprop_deprec2(): Test basic generic property list code.
+** Tests deprecated property list API routines.
+**
+****************************************************************/
+static void
+test_genprop_deprec_list(void)
+{
+ hid_t fid; /* File ID */
+ hid_t did; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t pid; /* Property List ID */
+ int prop1_value; /* Value for property #1 */
+ herr_t ret; /* Generic return value */
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create scalar dataspace for dataset */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create a dataset creation property list */
+ pid = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(pid, FAIL, "H5Pcreate");
+
+ /* Insert temporary property into class (with no callbacks) */
+ ret = H5Pinsert1(pid, PROP1_NAME, PROP1_SIZE, PROP1_DEF_VALUE, NULL, NULL, NULL, NULL, NULL);
+ CHECK_I(ret, "H5Pinsert1");
+
+ /* Check existence of added property */
+ ret = H5Pexist(pid, PROP1_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+
+ /* Check values of property (set with default value) */
+ ret = H5Pget(pid, PROP1_NAME, &prop1_value);
+ CHECK_I(ret, "H5Pget");
+ VERIFY(prop1_value, *PROP1_DEF_VALUE, "H5Pget");
+
+ /* Create a dataset */
+ did = H5Dcreate2(fid, "Dataset1", H5T_NATIVE_INT, sid, H5P_DEFAULT, pid, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+
+ /* Check existence of added property (after using property list) */
+ ret = H5Pexist(pid, PROP1_NAME);
+ VERIFY(ret, 1, "H5Pexist");
+
+ /* Check values of property (set with default value) (after using property list) */
+ ret = H5Pget(pid, PROP1_NAME, &prop1_value);
+ CHECK_I(ret, "H5Pget");
+ VERIFY(prop1_value, *PROP1_DEF_VALUE, "H5Pget");
+
+ /* Close property list */
+ ret = H5Pclose(pid);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_genprop_deprec_list() */
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
+
+/****************************************************************
+**
+** test_genprop(): Main generic property testing routine.
+**
+****************************************************************/
+void
+test_genprop(void)
+{
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Generic Properties\n"));
+
+ /* These tests use the same file... */
+ test_genprop_basic_class(); /* Test basic code for creating a generic class */
+ test_genprop_basic_class_prop(); /* Test basic code for adding properties to a generic class */
+ test_genprop_class_iter(); /* Test code for iterating over properties in a generic class */
+ test_genprop_class_callback(); /* Test code for property class callbacks */
+
+ test_genprop_basic_list(); /* Test basic code for creating a generic property list */
+ test_genprop_basic_list_prop(); /* Test basic code for adding properties to a generic property list */
+ test_genprop_list_iter(); /* Test basic code for iterating over properties in a generic property list */
+ test_genprop_list_callback(); /* Test code for property list callbacks */
+
+ test_genprop_list_addprop(); /* Test adding properties to HDF5 property list */
+ test_genprop_class_addprop(); /* Test adding properties to HDF5 property class */
+
+ test_genprop_list_add_remove_prop(); /* Test adding and removing the same property several times to HDF5
+ property list */
+
+ test_genprop_equal(); /* Tests for more H5Pequal verification */
+ test_genprop_path(); /* Tests for class path verification */
+ test_genprop_refcount(); /* Tests for class reference counting */
+
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+ test_genprop_deprec_class(); /* Tests for deprecated routines */
+ test_genprop_deprec_list(); /* Tests for deprecated routines */
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
+
+} /* test_genprop() */
+
+/*-------------------------------------------------------------------------
+ * Function: cleanup_genprop
+ *
+ * Purpose: Cleanup temporary test files
+ *
+ * Return: none
+ *
+ * Programmer: Quincey Koziol
+ * June 8, 1999
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+cleanup_genprop(void)
+{
+ H5Fdelete(FILENAME, H5P_DEFAULT);
+}
diff --git a/test/API/th5o.c b/test/API/th5o.c
new file mode 100644
index 0000000..916f005
--- /dev/null
+++ b/test/API/th5o.c
@@ -0,0 +1,1889 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/***********************************************************
+ *
+ * Test program: th5o
+ *
+ * Test public H5O functions for accessing
+ *
+ *************************************************************/
+
+#include "testhdf5.h"
+
+#if 0
+#include "H5Fprivate.h"
+#include "H5VLprivate.h"
+#include "H5VLnative_private.h"
+#endif
+
+#define TEST_FILENAME "th5o_file.h5"
+
+#define RANK 2
+#define DIM0 5
+#define DIM1 10
+
+#define TEST6_DIM1 100
+#define TEST6_DIM2 100
+
+/****************************************************************
+**
+** test_h5o_open(): Test H5Oopen function.
+**
+****************************************************************/
+static void
+test_h5o_open(void)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t grp, dset, dtype, dspace; /* Object identifiers */
+ char filename[1024];
+ hsize_t dims[RANK];
+ H5I_type_t id_type; /* Type of IDs returned from H5Oopen */
+ H5G_info_t ginfo; /* Group info struct */
+ H5T_class_t type_class; /* Class of the datatype */
+ herr_t ret; /* Value returned from API calls */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing H5Oopen\n"));
+
+ h5_fixname(TEST_FILENAME, H5P_DEFAULT, filename, sizeof filename);
+
+ /* Create a new HDF5 file */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create a group, dataset, and committed datatype within the file */
+ /* Create the group */
+ grp = H5Gcreate2(fid, "group", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(grp, FAIL, "H5Gcreate2");
+ ret = H5Gclose(grp);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Commit the type inside the group */
+ dtype = H5Tcopy(H5T_NATIVE_INT);
+ CHECK(dtype, FAIL, "H5Tcopy");
+ ret = H5Tcommit2(fid, "group/datatype", dtype, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+ ret = H5Tclose(dtype);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Create the data space for the dataset. */
+ dims[0] = DIM0;
+ dims[1] = DIM1;
+ dspace = H5Screate_simple(RANK, dims, NULL);
+ CHECK(dspace, FAIL, "H5Screate_simple");
+
+ /* Create the dataset. */
+ dset = H5Dcreate2(fid, "dataset", H5T_NATIVE_INT, dspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset, FAIL, "H5Dcreate2");
+ ret = H5Dclose(dset);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Sclose(dspace);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Now make sure that H5Oopen can open all three types of objects */
+ grp = H5Oopen(fid, "group", H5P_DEFAULT);
+ CHECK(grp, FAIL, "H5Oopen");
+ dtype = H5Oopen(fid, "group/datatype", H5P_DEFAULT);
+ CHECK(dtype, FAIL, "H5Oopen");
+ /* Check that we can use the group as a valid location */
+ dset = H5Oopen(grp, "/dataset", H5P_DEFAULT);
+ CHECK(dset, FAIL, "H5Oopen");
+
+ /* Make sure that each is the right kind of ID */
+ id_type = H5Iget_type(grp);
+ VERIFY(id_type, H5I_GROUP, "H5Iget_type for group ID");
+ id_type = H5Iget_type(dtype);
+ VERIFY(id_type, H5I_DATATYPE, "H5Iget_type for datatype ID");
+ id_type = H5Iget_type(dset);
+ VERIFY(id_type, H5I_DATASET, "H5Iget_type for dataset ID");
+
+ /* Do something more complex with each of the IDs to make sure they "work" */
+ ret = H5Gget_info(grp, &ginfo);
+ CHECK(ret, FAIL, "H5Gget_info");
+ VERIFY(ginfo.nlinks, 1, "H5Gget_info"); /* There should be one object, the datatype */
+
+ type_class = H5Tget_class(dtype);
+ VERIFY(type_class, H5T_INTEGER, "H5Tget_class");
+
+ dspace = H5Dget_space(dset);
+ CHECK(dspace, FAIL, "H5Dget_space");
+
+ /* Close the IDs */
+ ret = H5Sclose(dspace);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Gclose(grp);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Tclose(dtype);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Dclose(dset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Trying to open objects with bogus names should fail gracefully */
+ H5E_BEGIN_TRY
+ {
+ grp = H5Oopen(fid, "bogus_group", H5P_DEFAULT);
+ VERIFY(grp, FAIL, "H5Oopen");
+ dtype = H5Oopen(fid, "group/bogus_datatype", H5P_DEFAULT);
+ VERIFY(dtype, FAIL, "H5Oopen");
+ dset = H5Oopen(fid, "/bogus_dataset", H5P_DEFAULT);
+ VERIFY(dset, FAIL, "H5Oopen");
+ }
+ H5E_END_TRY
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Trying to open an object with a bogus file ID should fail */
+ H5E_BEGIN_TRY
+ {
+ dset = H5Oopen(fid, "dataset", H5P_DEFAULT);
+ VERIFY(dset, FAIL, "H5Oopen");
+ }
+ H5E_END_TRY
+} /* test_h5o_open() */
+
+/****************************************************************
+**
+** test_h5o_close(): Test H5Oclose function.
+**
+****************************************************************/
+static void
+test_h5o_close(void)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t grp, dset, dtype, dspace; /* Object identifiers */
+ char filename[1024];
+ hsize_t dims[RANK];
+ herr_t ret; /* Value returned from API calls */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing H5Oclose\n"));
+
+ h5_fixname(TEST_FILENAME, H5P_DEFAULT, filename, sizeof filename);
+
+ /* Create a new HDF5 file */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create a group, dataset, and committed datatype within the file */
+ /* Create the group and close it with H5Oclose */
+ grp = H5Gcreate2(fid, "group", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(grp, FAIL, "H5Gcreate2");
+ VERIFY_TYPE(H5Iget_type(grp), H5I_GROUP, H5I_type_t, "%d", "H5Iget_type");
+ ret = H5Oclose(grp);
+ CHECK(ret, FAIL, "H5Oclose");
+
+ /* Commit the type inside the group */
+ dtype = H5Tcopy(H5T_NATIVE_INT);
+ CHECK(dtype, FAIL, "H5Tcopy");
+ ret = H5Tcommit2(fid, "group/datatype", dtype, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+ ret = H5Oclose(dtype);
+ CHECK(ret, FAIL, "H5Oclose");
+
+ /* Create the data space for the dataset. */
+ dims[0] = DIM0;
+ dims[1] = DIM1;
+ dspace = H5Screate_simple(RANK, dims, NULL);
+ CHECK(dspace, FAIL, "H5Screate_simple");
+
+ /* Create the dataset. */
+ dset = H5Dcreate2(fid, "dataset", H5T_NATIVE_INT, dspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset, FAIL, "H5Dcreate2");
+ ret = H5Oclose(dset);
+ CHECK(ret, FAIL, "H5Oclose");
+
+ /* Attempting to close the data space with H5Oclose should fail */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Oclose(dspace);
+ VERIFY(ret, FAIL, "H5Oclose");
+ }
+ H5E_END_TRY
+ /* Close the dataspace for real */
+ ret = H5Sclose(dspace);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Make sure that H5Oclose can close objects opened with H5Oopen */
+ grp = H5Oopen(fid, "group", H5P_DEFAULT);
+ CHECK(grp, FAIL, "H5Oopen");
+ dtype = H5Oopen(fid, "group/datatype", H5P_DEFAULT);
+ CHECK(dtype, FAIL, "H5Oopen");
+ dset = H5Oopen(fid, "dataset", H5P_DEFAULT);
+ CHECK(dset, FAIL, "H5Oopen");
+
+ ret = H5Oclose(grp);
+ CHECK(ret, FAIL, "H5Oclose");
+ ret = H5Oclose(dtype);
+ CHECK(ret, FAIL, "H5Oclose");
+ ret = H5Oclose(dset);
+ CHECK(ret, FAIL, "H5Oclose");
+
+ /* Make sure H5Oclose can close objects opened with H5*open */
+ grp = H5Gopen2(fid, "group", H5P_DEFAULT);
+ CHECK(grp, FAIL, "H5Gopen2");
+ dtype = H5Topen2(fid, "group/datatype", H5P_DEFAULT);
+ CHECK(dtype, FAIL, "H5Topen2");
+ dset = H5Dopen2(fid, "dataset", H5P_DEFAULT);
+ CHECK(dset, FAIL, "H5Dopen2");
+
+ ret = H5Oclose(grp);
+ CHECK(ret, FAIL, "H5Oclose");
+ ret = H5Oclose(dtype);
+ CHECK(ret, FAIL, "H5Oclose");
+ ret = H5Oclose(dset);
+ CHECK(ret, FAIL, "H5Oclose");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+}
+
+#if 0
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+/****************************************************************
+**
+** test_h5o_open_by_addr(): Test H5Oopen_by_addr function.
+**
+****************************************************************/
+static void
+test_h5o_open_by_addr(void)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t grp, dset, dtype, dspace; /* Object identifiers */
+ char filename[1024];
+ H5L_info2_t li; /* Buffer for H5Lget_info2 */
+ haddr_t grp_addr; /* Addresses for objects */
+ haddr_t dset_addr;
+ haddr_t dtype_addr;
+ hsize_t dims[RANK];
+ H5I_type_t id_type; /* Type of IDs returned from H5Oopen */
+ H5G_info_t ginfo; /* Group info struct */
+ H5T_class_t type_class; /* Class of the datatype */
+ herr_t ret; /* Value returned from API calls */
+
+ h5_fixname(TEST_FILENAME, H5P_DEFAULT, filename, sizeof filename);
+
+ /* Create a new HDF5 file */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create a group, dataset, and committed datatype within the file */
+ /* Create the group */
+ grp = H5Gcreate2(fid, "group", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(grp, FAIL, "H5Gcreate2");
+ ret = H5Gclose(grp);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Commit the type inside the group */
+ dtype = H5Tcopy(H5T_NATIVE_INT);
+ CHECK(dtype, FAIL, "H5Tcopy");
+ ret = H5Tcommit2(fid, "group/datatype", dtype, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+ ret = H5Tclose(dtype);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Create the data space for the dataset. */
+ dims[0] = DIM0;
+ dims[1] = DIM1;
+ dspace = H5Screate_simple(RANK, dims, NULL);
+ CHECK(dspace, FAIL, "H5Screate_simple");
+
+ /* Create the dataset. */
+ dset = H5Dcreate2(fid, "dataset", H5T_NATIVE_INT, dspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset, FAIL, "H5Dcreate2");
+ ret = H5Dclose(dset);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Sclose(dspace);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Get address for each object */
+ ret = H5Lget_info2(fid, "group", &li, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lget_info2");
+ ret = H5VLnative_token_to_addr(fid, li.u.token, &grp_addr);
+ CHECK(ret, FAIL, "H5VLnative_token_to_addr");
+
+ ret = H5Lget_info2(fid, "group/datatype", &li, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lget_info2");
+ ret = H5VLnative_token_to_addr(fid, li.u.token, &dtype_addr);
+ CHECK(ret, FAIL, "H5VLnative_token_to_addr");
+
+ ret = H5Lget_info2(fid, "dataset", &li, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lget_info2");
+ ret = H5VLnative_token_to_addr(fid, li.u.token, &dset_addr);
+ CHECK(ret, FAIL, "H5VLnative_token_to_addr");
+
+ /* Now make sure that H5Oopen_by_addr can open all three types of objects */
+ grp = H5Oopen_by_addr(fid, grp_addr);
+ CHECK(grp, FAIL, "H5Oopen_by_addr");
+ dtype = H5Oopen_by_addr(fid, dtype_addr);
+ CHECK(dtype, FAIL, "H5Oopen_by_addr");
+ /* Check that we can use the group ID as a valid location */
+ dset = H5Oopen_by_addr(grp, dset_addr);
+ CHECK(dset, FAIL, "H5Oopen_by_addr");
+
+ /* Make sure that each is the right kind of ID */
+ id_type = H5Iget_type(grp);
+ VERIFY(id_type, H5I_GROUP, "H5Iget_type for group ID");
+ id_type = H5Iget_type(dtype);
+ VERIFY(id_type, H5I_DATATYPE, "H5Iget_type for datatype ID");
+ id_type = H5Iget_type(dset);
+ VERIFY(id_type, H5I_DATASET, "H5Iget_type for dataset ID");
+
+ /* Do something more complex with each of the IDs to make sure they "work" */
+ ret = H5Gget_info(grp, &ginfo);
+ CHECK(ret, FAIL, "H5Gget_info");
+ VERIFY(ginfo.nlinks, 1, "H5Gget_info"); /* There should be one object, the datatype */
+
+ type_class = H5Tget_class(dtype);
+ VERIFY(type_class, H5T_INTEGER, "H5Tget_class");
+
+ dspace = H5Dget_space(dset);
+ CHECK(dspace, FAIL, "H5Dget_space");
+
+ /* Close the IDs */
+ ret = H5Sclose(dspace);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Gclose(grp);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Tclose(dtype);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Dclose(dset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Try giving some bogus values to H5O_open_by_addr. */
+ /* Try to open an object with a bad address */
+ grp_addr += 20;
+ H5E_BEGIN_TRY
+ {
+ grp = H5Oopen_by_addr(fid, grp_addr);
+ }
+ H5E_END_TRY
+ VERIFY(grp, FAIL, "H5Oopen_by_addr");
+
+ /* For instance, an objectno smaller than the end of the file's superblock should
+ * trigger an error */
+ grp_addr = 10;
+ H5E_BEGIN_TRY
+ {
+ grp = H5Oopen_by_addr(fid, grp_addr);
+ }
+ H5E_END_TRY
+ VERIFY(grp, FAIL, "H5Oopen_by_addr");
+
+ /* Likewise, an objectno larger than the size of the file should fail */
+ grp_addr = 0;
+ grp_addr = 1000000000;
+ H5E_BEGIN_TRY
+ {
+ grp = H5Oopen_by_addr(fid, grp_addr);
+ }
+ H5E_END_TRY
+ VERIFY(grp, FAIL, "H5Oopen_by_addr");
+
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Also, trying to open an object without a valid location should fail */
+ H5E_BEGIN_TRY
+ {
+ dtype = H5Oopen_by_addr(fid, dtype_addr);
+ }
+ H5E_END_TRY
+ VERIFY(dtype, FAIL, "H5Oopen_by_addr");
+} /* test_h5o_open_by_addr() */
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
+#endif
+
+/****************************************************************
+**
+** test_h5o_open_by_token(): Test H5Oopen_by_token function.
+**
+****************************************************************/
+static void
+test_h5o_open_by_token(void)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t grp, dset, dtype, dspace; /* Object identifiers */
+ char filename[1024];
+ H5L_info2_t li; /* Buffer for H5Lget_info */
+ hsize_t dims[RANK];
+ H5I_type_t id_type; /* Type of IDs returned from H5Oopen */
+ H5G_info_t ginfo; /* Group info struct */
+ H5T_class_t type_class; /* Class of the datatype */
+ herr_t ret; /* Value returned from API calls */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing H5Oopen_by_token\n"));
+
+ h5_fixname(TEST_FILENAME, H5P_DEFAULT, filename, sizeof filename);
+
+ /* Create a new HDF5 file */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create a group, dataset, and committed datatype within the file */
+ /* Create the group */
+ grp = H5Gcreate2(fid, "group", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(grp, FAIL, "H5Gcreate2");
+ ret = H5Gclose(grp);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Commit the type inside the group */
+ dtype = H5Tcopy(H5T_NATIVE_INT);
+ CHECK(dtype, FAIL, "H5Tcopy");
+ ret = H5Tcommit2(fid, "group/datatype", dtype, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+ ret = H5Tclose(dtype);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Create the data space for the dataset. */
+ dims[0] = DIM0;
+ dims[1] = DIM1;
+ dspace = H5Screate_simple(RANK, dims, NULL);
+ CHECK(dspace, FAIL, "H5Screate_simple");
+
+ /* Create the dataset. */
+ dset = H5Dcreate2(fid, "dataset", H5T_NATIVE_INT, dspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset, FAIL, "H5Dcreate2");
+ ret = H5Dclose(dset);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Sclose(dspace);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Make sure that H5Oopen_by_token can open all three types of objects */
+ ret = H5Lget_info2(fid, "group", &li, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lget_info");
+ grp = H5Oopen_by_token(fid, li.u.token);
+ CHECK(grp, FAIL, "H5Oopen_by_token");
+
+ ret = H5Lget_info2(fid, "group/datatype", &li, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lget_info");
+ dtype = H5Oopen_by_token(fid, li.u.token);
+ CHECK(dtype, FAIL, "H5Oopen_by_token");
+
+ ret = H5Lget_info2(fid, "dataset", &li, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lget_info");
+ /* Check that we can use the group ID as a valid location */
+ dset = H5Oopen_by_token(grp, li.u.token);
+ CHECK(dset, FAIL, "H5Oopen_by_token");
+
+ /* Make sure that each is the right kind of ID */
+ id_type = H5Iget_type(grp);
+ VERIFY(id_type, H5I_GROUP, "H5Iget_type for group ID");
+ id_type = H5Iget_type(dtype);
+ VERIFY(id_type, H5I_DATATYPE, "H5Iget_type for datatype ID");
+ id_type = H5Iget_type(dset);
+ VERIFY(id_type, H5I_DATASET, "H5Iget_type for dataset ID");
+
+ /* Do something more complex with each of the IDs to make sure they "work" */
+ ret = H5Gget_info(grp, &ginfo);
+ CHECK(ret, FAIL, "H5Gget_info");
+ VERIFY(ginfo.nlinks, 1, "H5Gget_info"); /* There should be one object, the datatype */
+
+ type_class = H5Tget_class(dtype);
+ VERIFY(type_class, H5T_INTEGER, "H5Tget_class");
+
+ dspace = H5Dget_space(dset);
+ CHECK(dspace, FAIL, "H5Dget_space");
+
+ /* Close the IDs */
+ ret = H5Sclose(dspace);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Gclose(grp);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Tclose(dtype);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Dclose(dset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Try giving some bogus values to H5O_open_by_token */
+ /* Try opening an object using H5O_TOKEN_UNDEF (should fail) */
+ H5E_BEGIN_TRY
+ {
+ dtype = H5Oopen_by_token(fid, H5O_TOKEN_UNDEF);
+ }
+ H5E_END_TRY
+ VERIFY(dtype, FAIL, "H5Oopen_by_token");
+
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Also, trying to open an object without a valid location (should fail) */
+ H5E_BEGIN_TRY
+ {
+ dtype = H5Oopen_by_token(fid, li.u.token);
+ }
+ H5E_END_TRY
+ VERIFY(dtype, FAIL, "H5Oopen_by_token");
+
+} /* test_h5o_open_by_token() */
+
+/****************************************************************
+**
+** test_h5o_refcount(): Test H5O refcounting functions.
+**
+****************************************************************/
+static void
+test_h5o_refcount(void)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t grp, dset, dtype, dspace; /* Object identifiers */
+ char filename[1024];
+ H5O_info2_t oinfo; /* Object info struct */
+ hsize_t dims[RANK];
+ herr_t ret; /* Value returned from API calls */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing retrieval of object reference count with H5Oget_info\n"));
+
+ h5_fixname(TEST_FILENAME, H5P_DEFAULT, filename, sizeof filename);
+
+ /* Create a new HDF5 file */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create a group, dataset, and committed datatype within the file */
+ /* Create the group */
+ grp = H5Gcreate2(fid, "group", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(grp, FAIL, "H5Gcreate2");
+
+ /* Commit the type inside the group */
+ dtype = H5Tcopy(H5T_NATIVE_INT);
+ CHECK(dtype, FAIL, "H5Tcopy");
+ ret = H5Tcommit2(fid, "datatype", dtype, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+
+ /* Create the data space for the dataset. */
+ dims[0] = DIM0;
+ dims[1] = DIM1;
+ dspace = H5Screate_simple(RANK, dims, NULL);
+ CHECK(dspace, FAIL, "H5Screate_simple");
+
+ /* Create the dataset. */
+ dset = H5Dcreate2(fid, "dataset", H5T_NATIVE_INT, dspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset, FAIL, "H5Dcreate2");
+ ret = H5Sclose(dspace);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Get ref counts for each object. They should all be 1, since each object has a hard link. */
+ ret = H5Oget_info_by_name3(fid, "group", &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+ VERIFY(oinfo.rc, 1, "reference count in H5Oget_info_by_name3");
+ ret = H5Oget_info_by_name3(fid, "datatype", &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+ VERIFY(oinfo.rc, 1, "reference count in H5Oget_info_by_name3");
+ ret = H5Oget_info_by_name3(fid, "dataset", &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+ VERIFY(oinfo.rc, 1, "reference count in H5Oget_info_by_name3");
+
+ /* Increment each object's reference count. */
+ ret = H5Oincr_refcount(grp);
+ CHECK(ret, FAIL, "H5Oincr_refcount");
+ ret = H5Oincr_refcount(dtype);
+ CHECK(ret, FAIL, "H5Oincr_refcount");
+ ret = H5Oincr_refcount(dset);
+ CHECK(ret, FAIL, "H5Oincr_refcount");
+
+ /* Get ref counts for each object. They should all be 2 now. */
+ ret = H5Oget_info_by_name3(fid, "group", &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+ VERIFY(oinfo.rc, 2, "reference count in H5Oget_info_by_name3");
+ ret = H5Oget_info_by_name3(fid, "datatype", &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+ VERIFY(oinfo.rc, 2, "reference count in H5Oget_info_by_name3");
+ ret = H5Oget_info_by_name3(fid, "dataset", &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+ VERIFY(oinfo.rc, 2, "reference count in H5Oget_info_by_name3");
+
+ /* Decrement the reference counts and check that they decrease back to 1. */
+ ret = H5Odecr_refcount(grp);
+ CHECK(ret, FAIL, "H5Odecr_refcount");
+ ret = H5Odecr_refcount(dtype);
+ CHECK(ret, FAIL, "H5Odecr_refcount");
+ ret = H5Odecr_refcount(dset);
+ CHECK(ret, FAIL, "H5Odecr_refcount");
+
+ ret = H5Oget_info_by_name3(fid, "group", &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+ VERIFY(oinfo.rc, 1, "reference count in H5Oget_info_by_name3");
+ ret = H5Oget_info_by_name3(fid, "datatype", &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+ VERIFY(oinfo.rc, 1, "reference count in H5Oget_info_by_name3");
+ ret = H5Oget_info_by_name3(fid, "dataset", &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+ VERIFY(oinfo.rc, 1, "reference count in H5Oget_info_by_name3");
+
+ /* Increment the reference counts and then close the file to make sure the increment is permanent */
+ ret = H5Oincr_refcount(grp);
+ CHECK(ret, FAIL, "H5Oincr_refcount");
+ ret = H5Oincr_refcount(dtype);
+ CHECK(ret, FAIL, "H5Oincr_refcount");
+ ret = H5Oincr_refcount(dset);
+ CHECK(ret, FAIL, "H5Oincr_refcount");
+
+ ret = H5Gclose(grp);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Tclose(dtype);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Dclose(dset);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open the file and check that the reference counts were really incremented */
+ fid = H5Fopen(filename, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ grp = H5Gopen2(fid, "group", H5P_DEFAULT);
+ CHECK(grp, FAIL, "H5Gopen2");
+ dtype = H5Topen2(fid, "datatype", H5P_DEFAULT);
+ CHECK(dtype, FAIL, "H5Topen2");
+ dset = H5Dopen2(fid, "dataset", H5P_DEFAULT);
+ CHECK(dset, FAIL, "H5Dopen2");
+
+ ret = H5Oget_info_by_name3(fid, "group", &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+ VERIFY(oinfo.rc, 2, "reference count in H5Oget_info_by_name3");
+ ret = H5Oget_info_by_name3(fid, "datatype", &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+ VERIFY(oinfo.rc, 2, "reference count in H5Oget_info_by_name3");
+ ret = H5Oget_info_by_name3(fid, "dataset", &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+ VERIFY(oinfo.rc, 2, "reference count in H5Oget_info_by_name3");
+
+ /* Decrement the reference counts and close the file */
+ ret = H5Odecr_refcount(grp);
+ CHECK(ret, FAIL, "H5Odecr_refcount");
+ ret = H5Odecr_refcount(dtype);
+ CHECK(ret, FAIL, "H5Odecr_refcount");
+ ret = H5Odecr_refcount(dset);
+ CHECK(ret, FAIL, "H5Odecr_refcount");
+
+ ret = H5Gclose(grp);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Tclose(dtype);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Dclose(dset);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open the file and check that the reference counts were really decremented */
+ fid = H5Fopen(filename, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ grp = H5Gopen2(fid, "group", H5P_DEFAULT);
+ CHECK(grp, FAIL, "H5Gopen2");
+ dtype = H5Topen2(fid, "datatype", H5P_DEFAULT);
+ CHECK(dtype, FAIL, "H5Topen2");
+ dset = H5Dopen2(fid, "dataset", H5P_DEFAULT);
+ CHECK(dset, FAIL, "H5Dopen2");
+
+ ret = H5Oget_info_by_name3(fid, "group", &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+ VERIFY(oinfo.rc, 1, "reference count in H5Oget_info_by_name3");
+ ret = H5Oget_info_by_name3(fid, "datatype", &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+ VERIFY(oinfo.rc, 1, "reference count in H5Oget_info_by_name3");
+ ret = H5Oget_info_by_name3(fid, "dataset", &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+ VERIFY(oinfo.rc, 1, "reference count in H5Oget_info_by_name3");
+
+ /* Close the IDs */
+ ret = H5Gclose(grp);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Tclose(dtype);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Dclose(dset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Make sure that bogus IDs return errors properly */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Oincr_refcount(grp);
+ VERIFY(ret, FAIL, "H5Oincr_refcount");
+ ret = H5Oincr_refcount(dtype);
+ VERIFY(ret, FAIL, "H5Oincr_refcount");
+ ret = H5Oincr_refcount(dset);
+ VERIFY(ret, FAIL, "H5Oincr_refcount");
+ ret = H5Odecr_refcount(grp);
+ VERIFY(ret, FAIL, "H5Odecr_refcount");
+ ret = H5Odecr_refcount(dtype);
+ VERIFY(ret, FAIL, "H5Odecr_refcount");
+ ret = H5Odecr_refcount(dset);
+ VERIFY(ret, FAIL, "H5Odecr_refcount");
+ }
+ H5E_END_TRY
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_h5o_refcount() */
+
+/****************************************************************
+**
+** test_h5o_plist(): Test object creation properties
+**
+****************************************************************/
+static void
+test_h5o_plist(void)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t grp, dset, dtype, dspace; /* Object identifiers */
+ hid_t fapl; /* File access property list */
+ hid_t gcpl, dcpl, tcpl; /* Object creation properties */
+ char filename[1024];
+ unsigned def_max_compact, def_min_dense; /* Default phase change parameters */
+ unsigned max_compact, min_dense; /* Actual phase change parameters */
+ herr_t ret; /* Value returned from API calls */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Object creation properties\n"));
+
+ /* Make a FAPL that uses the "use the latest version of the format" flag */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+
+ /* Set the "use the latest version of the format" bounds for creating objects in the file */
+ ret = H5Pset_libver_bounds(fapl, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST);
+ CHECK(ret, FAIL, "H5Pset_libver_bounds");
+
+ h5_fixname(TEST_FILENAME, fapl, filename, sizeof filename);
+
+ /* Create a new HDF5 file */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create group, dataset & named datatype creation property lists */
+ gcpl = H5Pcreate(H5P_GROUP_CREATE);
+ CHECK(gcpl, FAIL, "H5Pcreate");
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ tcpl = H5Pcreate(H5P_DATATYPE_CREATE);
+ CHECK(tcpl, FAIL, "H5Pcreate");
+
+ /* Retrieve default attribute phase change values */
+ ret = H5Pget_attr_phase_change(gcpl, &def_max_compact, &def_min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+
+ /* Set non-default attribute phase change values on each creation property list */
+ ret = H5Pset_attr_phase_change(gcpl, def_max_compact + 1, def_min_dense - 1);
+ CHECK(ret, FAIL, "H5Pset_attr_phase_change");
+ ret = H5Pset_attr_phase_change(dcpl, def_max_compact + 1, def_min_dense - 1);
+ CHECK(ret, FAIL, "H5Pset_attr_phase_change");
+ ret = H5Pset_attr_phase_change(tcpl, def_max_compact + 1, def_min_dense - 1);
+ CHECK(ret, FAIL, "H5Pset_attr_phase_change");
+
+ /* Retrieve attribute phase change values on each creation property list and verify */
+ ret = H5Pget_attr_phase_change(gcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+ VERIFY(max_compact, (def_max_compact + 1), "H5Pget_attr_phase_change");
+ VERIFY(min_dense, (def_min_dense - 1), "H5Pget_attr_phase_change");
+ ret = H5Pget_attr_phase_change(dcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+ VERIFY(max_compact, (def_max_compact + 1), "H5Pget_attr_phase_change");
+ VERIFY(min_dense, (def_min_dense - 1), "H5Pget_attr_phase_change");
+ ret = H5Pget_attr_phase_change(tcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+ VERIFY(max_compact, (def_max_compact + 1), "H5Pget_attr_phase_change");
+ VERIFY(min_dense, (def_min_dense - 1), "H5Pget_attr_phase_change");
+
+ /* Create a group, dataset, and committed datatype within the file,
+ * using the respective type of creation property lists.
+ */
+
+ /* Create the group anonymously and link it in */
+ grp = H5Gcreate_anon(fid, gcpl, H5P_DEFAULT);
+ CHECK(grp, FAIL, "H5Gcreate_anon");
+ ret = H5Olink(grp, fid, "group", H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Olink");
+
+ /* Commit the type inside the group anonymously and link it in */
+ dtype = H5Tcopy(H5T_NATIVE_INT);
+ CHECK(dtype, FAIL, "H5Tcopy");
+ ret = H5Tcommit_anon(fid, dtype, tcpl, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit_anon");
+ ret = H5Olink(dtype, fid, "datatype", H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Olink");
+
+ /* Create the dataspace for the dataset. */
+ dspace = H5Screate(H5S_SCALAR);
+ CHECK(dspace, FAIL, "H5Screate");
+
+ /* Create the dataset anonymously and link it in */
+ dset = H5Dcreate_anon(fid, H5T_NATIVE_INT, dspace, dcpl, H5P_DEFAULT);
+ CHECK(dset, FAIL, "H5Dcreate_anon");
+ ret = H5Olink(dset, fid, "dataset", H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Olink");
+ ret = H5Sclose(dspace);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close current creation property lists */
+ ret = H5Pclose(gcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(tcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Retrieve each object's creation property list */
+ gcpl = H5Gget_create_plist(grp);
+ CHECK(gcpl, FAIL, "H5Gget_create_plist");
+ tcpl = H5Tget_create_plist(dtype);
+ CHECK(tcpl, FAIL, "H5Tget_create_plist");
+ dcpl = H5Dget_create_plist(dset);
+ CHECK(dcpl, FAIL, "H5Dget_create_plist");
+
+ /* Retrieve attribute phase change values on each creation property list and verify */
+ ret = H5Pget_attr_phase_change(gcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+ VERIFY(max_compact, (def_max_compact + 1), "H5Pget_attr_phase_change");
+ VERIFY(min_dense, (def_min_dense - 1), "H5Pget_attr_phase_change");
+ ret = H5Pget_attr_phase_change(dcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+ VERIFY(max_compact, (def_max_compact + 1), "H5Pget_attr_phase_change");
+ VERIFY(min_dense, (def_min_dense - 1), "H5Pget_attr_phase_change");
+ ret = H5Pget_attr_phase_change(tcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+ VERIFY(max_compact, (def_max_compact + 1), "H5Pget_attr_phase_change");
+ VERIFY(min_dense, (def_min_dense - 1), "H5Pget_attr_phase_change");
+
+ /* Close current objects */
+ ret = H5Pclose(gcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(tcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Gclose(grp);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Tclose(dtype);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Dclose(dset);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open the file and check that the object creation properties persist */
+ fid = H5Fopen(filename, H5F_ACC_RDONLY, fapl);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Re-open objects */
+ grp = H5Gopen2(fid, "group", H5P_DEFAULT);
+ CHECK(grp, FAIL, "H5Gopen2");
+ dtype = H5Topen2(fid, "datatype", H5P_DEFAULT);
+ CHECK(dtype, FAIL, "H5Topen2");
+ dset = H5Dopen2(fid, "dataset", H5P_DEFAULT);
+ CHECK(dset, FAIL, "H5Dopen2");
+
+ /* Retrieve each object's creation property list */
+ gcpl = H5Gget_create_plist(grp);
+ CHECK(gcpl, FAIL, "H5Gget_create_plist");
+ tcpl = H5Tget_create_plist(dtype);
+ CHECK(tcpl, FAIL, "H5Tget_create_plist");
+ dcpl = H5Dget_create_plist(dset);
+ CHECK(dcpl, FAIL, "H5Dget_create_plist");
+
+ /* Retrieve attribute phase change values on each creation property list and verify */
+ ret = H5Pget_attr_phase_change(gcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+ VERIFY(max_compact, (def_max_compact + 1), "H5Pget_attr_phase_change");
+ VERIFY(min_dense, (def_min_dense - 1), "H5Pget_attr_phase_change");
+ ret = H5Pget_attr_phase_change(dcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+ VERIFY(max_compact, (def_max_compact + 1), "H5Pget_attr_phase_change");
+ VERIFY(min_dense, (def_min_dense - 1), "H5Pget_attr_phase_change");
+ ret = H5Pget_attr_phase_change(tcpl, &max_compact, &min_dense);
+ CHECK(ret, FAIL, "H5Pget_attr_phase_change");
+ VERIFY(max_compact, (def_max_compact + 1), "H5Pget_attr_phase_change");
+ VERIFY(min_dense, (def_min_dense - 1), "H5Pget_attr_phase_change");
+
+ /* Close current objects */
+ ret = H5Pclose(gcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(tcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Gclose(grp);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Tclose(dtype);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Dclose(dset);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Close the FAPL */
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+} /* test_h5o_plist() */
+
+/****************************************************************
+**
+** test_h5o_link(): Test creating link to object
+**
+****************************************************************/
+static void
+test_h5o_link(void)
+{
+ hid_t file_id = -1;
+ hid_t group_id = -1;
+ hid_t space_id = -1;
+ hid_t dset_id = -1;
+ hid_t type_id = -1;
+ hid_t fapl_id = -1;
+ hid_t lcpl_id = -1;
+ char filename[1024];
+ hsize_t dims[2] = {TEST6_DIM1, TEST6_DIM2};
+ htri_t committed; /* Whether the named datatype is committed */
+ H5F_libver_t low, high; /* File format bounds */
+ int *wdata;
+ int *rdata;
+ int i, n;
+ herr_t ret; /* Value returned from API calls */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing H5Olink\n"));
+
+ h5_fixname(TEST_FILENAME, H5P_DEFAULT, filename, sizeof filename);
+
+ /* Allocate memory buffers */
+ /* (These are treated as 2-D buffers) */
+ wdata = (int *)HDmalloc((size_t)(TEST6_DIM1 * TEST6_DIM2) * sizeof(int));
+ CHECK_PTR(wdata, "HDmalloc");
+ rdata = (int *)HDmalloc((size_t)(TEST6_DIM1 * TEST6_DIM2) * sizeof(int));
+ CHECK_PTR(rdata, "HDmalloc");
+
+ /* Initialize the raw data */
+ for (i = n = 0; i < (TEST6_DIM1 * TEST6_DIM2); i++)
+ wdata[i] = n++;
+
+ /* Create the dataspace */
+ space_id = H5Screate_simple(2, dims, NULL);
+ CHECK(space_id, FAIL, "H5Screate_simple");
+
+ /* Create LCPL with intermediate group creation flag set */
+ lcpl_id = H5Pcreate(H5P_LINK_CREATE);
+ CHECK(lcpl_id, FAIL, "H5Pcreate");
+ ret = H5Pset_create_intermediate_group(lcpl_id, TRUE);
+ CHECK(ret, FAIL, "H5Pset_create_intermediate_group");
+
+ /* Create a file access property list */
+ fapl_id = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl_id, FAIL, "H5Pcreate");
+
+ /* Loop through all the combinations of low/high library format bounds */
+ for (low = H5F_LIBVER_EARLIEST; low < H5F_LIBVER_NBOUNDS; low++) {
+ for (high = H5F_LIBVER_EARLIEST; high < H5F_LIBVER_NBOUNDS; high++) {
+
+ /* Set version bounds */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Pset_libver_bounds(fapl_id, low, high);
+ }
+ H5E_END_TRY;
+
+ if (ret < 0) /* Invalid low/high combinations */
+ continue;
+
+ /* Create a new HDF5 file */
+ file_id = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id);
+ CHECK(file_id, FAIL, "H5Fcreate");
+
+ /* Close the FAPL */
+ ret = H5Pclose(fapl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Create and commit a datatype with no name */
+ type_id = H5Tcopy(H5T_NATIVE_INT);
+ CHECK(type_id, FAIL, "H5Fcreate");
+ ret = H5Tcommit_anon(file_id, type_id, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit_anon");
+ committed = H5Tcommitted(type_id);
+ VERIFY(committed, TRUE, "H5Tcommitted");
+
+ /* Create a dataset with no name using the committed datatype*/
+ dset_id = H5Dcreate_anon(file_id, type_id, space_id, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset_id, FAIL, "H5Dcreate_anon");
+
+ /* Verify that we can write to and read from the dataset */
+
+ /* Write the data to the dataset */
+ ret = H5Dwrite(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Read the data back */
+ ret = H5Dread(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Verify the data */
+ for (i = 0; i < (TEST6_DIM1 * TEST6_DIM2); i++)
+ VERIFY(wdata[i], rdata[i], "H5Dread");
+
+ /* Create a group with no name*/
+ group_id = H5Gcreate_anon(file_id, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(group_id, FAIL, "H5Gcreate_anon");
+
+ /* Link nameless datatype into nameless group */
+ ret = H5Olink(type_id, group_id, "datatype", H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Olink");
+
+ /* Link nameless dataset into nameless group with intermediate group */
+ ret = H5Olink(dset_id, group_id, "inter_group/dataset", lcpl_id, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Olink");
+
+ /* Close IDs for dataset and datatype */
+ ret = H5Dclose(dset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Tclose(type_id);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Re-open datatype using new link */
+ type_id = H5Topen2(group_id, "datatype", H5P_DEFAULT);
+ CHECK(type_id, FAIL, "H5Topen2");
+
+ /* Link nameless group to root group and close the group ID*/
+ ret = H5Olink(group_id, file_id, "/group", H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Olink");
+ ret = H5Gclose(group_id);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Open dataset through root group and verify its data */
+ dset_id = H5Dopen2(file_id, "/group/inter_group/dataset", H5P_DEFAULT);
+ CHECK(dset_id, FAIL, "H5Dopen2");
+
+ /* Read data from dataset */
+ ret = H5Dread(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+ for (i = 0; i < (TEST6_DIM1 * TEST6_DIM2); i++)
+ VERIFY(wdata[i], rdata[i], "H5Dread");
+
+ /* Close open IDs */
+ ret = H5Dclose(dset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Tclose(type_id);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Fclose(file_id);
+ CHECK(ret, FAIL, "H5Fclose");
+ } /* for high */
+ } /* for low */
+
+ /* Close remaining IDs */
+ ret = H5Sclose(space_id);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Pclose(lcpl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Release buffers */
+ HDfree(wdata);
+ HDfree(rdata);
+} /* end test_h5o_link() */
+
+#if 0
+/****************************************************************
+**
+** test_h5o_comment(): Test H5Oset(get)_comment functions.
+**
+****************************************************************/
+static void
+test_h5o_comment(void)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t grp, dset, dtype, dspace; /* Object identifiers */
+ hid_t attr_space, attr_id;
+ char filename[1024];
+ hsize_t dims[RANK];
+ hsize_t attr_dims = 1;
+ int attr_value = 5;
+ const char *file_comment = "file comment";
+ const char *grp_comment = "group comment";
+ const char *dset_comment = "dataset comment";
+ const char *dtype_comment = "datatype comment";
+ char check_comment[64];
+ ssize_t comment_len = 0;
+ ssize_t len;
+ herr_t ret; /* Value returned from API calls */
+ int ret_value;
+
+ h5_fixname(TEST_FILENAME, H5P_DEFAULT, filename, sizeof filename);
+
+ /* Create a new HDF5 file */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create an attribute for the file */
+ attr_space = H5Screate_simple(1, &attr_dims, NULL);
+ CHECK(attr_space, FAIL, "H5Screate_simple");
+ attr_id = H5Acreate2(fid, "file attribute", H5T_NATIVE_INT, attr_space, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr_id, FAIL, "H5Acreate2");
+ ret = H5Awrite(attr_id, H5T_NATIVE_INT, &attr_value);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Putting a comment on the file through its attribute */
+ ret = H5Oset_comment(attr_id, file_comment);
+ CHECK(ret, FAIL, "H5Oset_comment");
+
+ ret = H5Sclose(attr_space);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Aclose(attr_id);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Create a group, dataset, and committed datatype within the file */
+ /* Create the group */
+ grp = H5Gcreate2(fid, "group", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(grp, FAIL, "H5Gcreate2");
+
+ /* Putting a comment on the group */
+ ret = H5Oset_comment(grp, grp_comment);
+ CHECK(ret, FAIL, "H5Oset_comment");
+
+ ret = H5Gclose(grp);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Commit the type inside the group */
+ dtype = H5Tcopy(H5T_NATIVE_INT);
+ CHECK(dtype, FAIL, "H5Tcopy");
+ ret = H5Tcommit2(fid, "group/datatype", dtype, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+
+ /* Putting a comment on the committed data type */
+ ret = H5Oset_comment(dtype, dtype_comment);
+ CHECK(ret, FAIL, "H5Oset_comment");
+
+ ret = H5Tclose(dtype);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Create the data space for the dataset. */
+ dims[0] = DIM0;
+ dims[1] = DIM1;
+ dspace = H5Screate_simple(RANK, dims, NULL);
+ CHECK(dspace, FAIL, "H5Screate_simple");
+
+ /* Create the dataset. */
+ dset = H5Dcreate2(fid, "dataset", H5T_NATIVE_INT, dspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset, FAIL, "H5Dcreate2");
+
+ /* Putting a comment on the dataset */
+ ret = H5Oset_comment(dset, dset_comment);
+ CHECK(ret, FAIL, "H5Oset_comment");
+
+ /* Putting a comment on the dataspace. It's supposed to fail. */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Oset_comment(dspace, "dataspace comment");
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Oset_comment");
+
+ /* Close the file */
+ ret = H5Dclose(dset);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Sclose(dspace);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Now make sure that the comments are correct all 4 types of objects */
+ /* Open file */
+ fid = H5Fopen(filename, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Getting the comment on the file and verify it */
+ comment_len = H5Oget_comment(fid, NULL, (size_t)0);
+ CHECK(comment_len, FAIL, "H5Oget_comment");
+
+ len = H5Oget_comment(fid, check_comment, (size_t)comment_len + 1);
+ CHECK(len, FAIL, "H5Oget_comment");
+
+ ret_value = HDstrcmp(file_comment, check_comment);
+ VERIFY(ret_value, 0, "H5Oget_comment");
+
+ /* Open the group */
+ grp = H5Gopen2(fid, "group", H5P_DEFAULT);
+ CHECK(grp, FAIL, "H5Gopen2");
+
+ /* Getting the comment on the group and verify it */
+ comment_len = H5Oget_comment(grp, NULL, (size_t)0);
+ CHECK(comment_len, FAIL, "H5Oget_comment");
+
+ len = H5Oget_comment(grp, check_comment, (size_t)comment_len + 1);
+ CHECK(len, FAIL, "H5Oget_comment");
+
+ ret_value = HDstrcmp(grp_comment, check_comment);
+ VERIFY(ret_value, 0, "H5Oget_comment");
+
+ /* Open the datatype */
+ dtype = H5Topen2(fid, "group/datatype", H5P_DEFAULT);
+ CHECK(dtype, FAIL, "H5Topen2");
+
+ /* Getting the comment on the datatype and verify it */
+ comment_len = H5Oget_comment(dtype, NULL, (size_t)0);
+ CHECK(comment_len, FAIL, "H5Oget_comment");
+
+ len = H5Oget_comment(dtype, check_comment, (size_t)comment_len + 1);
+ CHECK(len, FAIL, "H5Oget_comment");
+
+ ret_value = HDstrcmp(dtype_comment, check_comment);
+ VERIFY(ret_value, 0, "H5Oget_comment");
+
+ /* Open the dataset */
+ dset = H5Dopen2(fid, "dataset", H5P_DEFAULT);
+ CHECK(dset, FAIL, "H5Dopen2");
+
+ /* Getting the comment on the dataset and verify it */
+ comment_len = H5Oget_comment(dset, NULL, (size_t)0);
+ CHECK(comment_len, FAIL, "H5Oget_comment");
+
+ len = H5Oget_comment(dset, check_comment, (size_t)comment_len + 1);
+ CHECK(ret, len, "H5Oget_comment");
+
+ ret_value = HDstrcmp(dset_comment, check_comment);
+ VERIFY(ret_value, 0, "H5Oget_comment");
+
+ /* Close the IDs */
+ ret = H5Gclose(grp);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Tclose(dtype);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Dclose(dset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* test_h5o_comment() */
+
+/****************************************************************
+**
+** test_h5o_comment_by_name(): Test H5Oset(get)_comment_by_name functions.
+**
+****************************************************************/
+static void
+test_h5o_comment_by_name(void)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t grp, dset, dtype, dspace; /* Object identifiers */
+ hid_t attr_space, attr_id;
+ char filename[1024];
+ hsize_t dims[RANK];
+ hsize_t attr_dims = 1;
+ int attr_value = 5;
+ const char *file_comment = "file comment by name";
+ const char *grp_comment = "group comment by name";
+ const char *dset_comment = "dataset comment by name";
+ const char *dtype_comment = "datatype comment by name";
+ char check_comment[64];
+ ssize_t comment_len = 0;
+ ssize_t len;
+ herr_t ret; /* Value returned from API calls */
+ int ret_value;
+
+ h5_fixname(TEST_FILENAME, H5P_DEFAULT, filename, sizeof filename);
+
+ /* Create a new HDF5 file */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create an attribute for the file */
+ attr_space = H5Screate_simple(1, &attr_dims, NULL);
+ CHECK(attr_space, FAIL, "H5Screate_simple");
+ attr_id = H5Acreate2(fid, "file attribute", H5T_NATIVE_INT, attr_space, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr_id, FAIL, "H5Acreate2");
+ ret = H5Awrite(attr_id, H5T_NATIVE_INT, &attr_value);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Putting a comment on the file through its attribute */
+ ret = H5Oset_comment_by_name(attr_id, ".", file_comment, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oset_comment_by_name");
+
+ ret = H5Sclose(attr_space);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Aclose(attr_id);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Create a group, dataset, and committed datatype within the file */
+ /* Create the group */
+ grp = H5Gcreate2(fid, "group", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(grp, FAIL, "H5Gcreate2");
+
+ /* Putting a comment on the group */
+ ret = H5Oset_comment_by_name(fid, "group", grp_comment, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oset_comment_by_name");
+
+ /* Commit the type inside the group */
+ dtype = H5Tcopy(H5T_NATIVE_INT);
+ CHECK(dtype, FAIL, "H5Tcopy");
+ ret = H5Tcommit2(fid, "group/datatype", dtype, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+
+ /* Putting a comment on the committed data type */
+ ret = H5Oset_comment_by_name(grp, "datatype", dtype_comment, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oset_comment_by_name");
+
+ ret = H5Tclose(dtype);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ ret = H5Gclose(grp);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Create the data space for the dataset. */
+ dims[0] = DIM0;
+ dims[1] = DIM1;
+ dspace = H5Screate_simple(RANK, dims, NULL);
+ CHECK(dspace, FAIL, "H5Screate_simple");
+
+ /* Create the dataset. */
+ dset = H5Dcreate2(fid, "dataset", H5T_NATIVE_INT, dspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset, FAIL, "H5Dcreate2");
+
+ /* Putting a comment on the dataset */
+ ret = H5Oset_comment_by_name(fid, "dataset", dset_comment, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oset_comment_by_name");
+
+ /* Putting a comment on the dataspace. It's supposed to fail. */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Oset_comment_by_name(dspace, ".", "dataspace comment", H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Oset_comment");
+
+ /* Close the file */
+ ret = H5Dclose(dset);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Sclose(dspace);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Now make sure that the comments are correct all 4 types of objects */
+ /* Open file */
+ fid = H5Fopen(filename, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Getting the comment on the file and verify it */
+ comment_len = H5Oget_comment_by_name(fid, ".", NULL, (size_t)0, H5P_DEFAULT);
+ CHECK(comment_len, FAIL, "H5Oget_comment_by_name");
+
+ len = H5Oget_comment_by_name(fid, ".", check_comment, (size_t)comment_len + 1, H5P_DEFAULT);
+ CHECK(len, FAIL, "H5Oget_comment_by_name");
+
+ ret_value = HDstrcmp(file_comment, check_comment);
+ VERIFY(ret_value, 0, "H5Oget_comment_by_name");
+
+ /* Open the group */
+ grp = H5Gopen2(fid, "group", H5P_DEFAULT);
+ CHECK(grp, FAIL, "H5Gopen2");
+
+ /* Getting the comment on the group and verify it */
+ comment_len = H5Oget_comment_by_name(fid, "group", NULL, (size_t)0, H5P_DEFAULT);
+ CHECK(comment_len, FAIL, "H5Oget_comment_by_name");
+
+ len = H5Oget_comment_by_name(fid, "group", check_comment, (size_t)comment_len + 1, H5P_DEFAULT);
+ CHECK(len, FAIL, "H5Oget_comment_by_name");
+
+ ret_value = HDstrcmp(grp_comment, check_comment);
+ VERIFY(ret_value, 0, "H5Oget_comment_by_name");
+
+ /* Getting the comment on the datatype and verify it */
+ comment_len = H5Oget_comment_by_name(grp, "datatype", NULL, (size_t)0, H5P_DEFAULT);
+ CHECK(comment_len, FAIL, "H5Oget_comment_by_name");
+
+ len = H5Oget_comment_by_name(grp, "datatype", check_comment, (size_t)comment_len + 1, H5P_DEFAULT);
+ CHECK(len, FAIL, "H5Oget_comment");
+
+ ret_value = HDstrcmp(dtype_comment, check_comment);
+ VERIFY(ret_value, 0, "H5Oget_comment_by_name");
+
+ /* Getting the comment on the dataset and verify it */
+ comment_len = H5Oget_comment_by_name(fid, "dataset", NULL, (size_t)0, H5P_DEFAULT);
+ CHECK(comment_len, FAIL, "H5Oget_comment_by_name");
+
+ len = H5Oget_comment_by_name(fid, "dataset", check_comment, (size_t)comment_len + 1, H5P_DEFAULT);
+ CHECK(len, FAIL, "H5Oget_comment_by_name");
+
+ ret_value = HDstrcmp(dset_comment, check_comment);
+ VERIFY(ret_value, 0, "H5Oget_comment_by_name");
+
+ /* Close the IDs */
+ ret = H5Gclose(grp);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* test_h5o_comment_by_name() */
+#endif
+
+/****************************************************************
+**
+** test_h5o_getinfo_same_file(): Test that querying the object info for
+** objects in the same file will return the same file "number"
+**
+****************************************************************/
+static void
+test_h5o_getinfo_same_file(void)
+{
+ hid_t fid1, fid2; /* HDF5 File ID */
+ hid_t gid1, gid2; /* Group IDs */
+ char filename[1024];
+ H5O_info2_t oinfo1, oinfo2; /* Object info structs */
+ herr_t ret; /* Value returned from API calls */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing H5Oget_info on objects in same file\n"));
+
+ h5_fixname(TEST_FILENAME, H5P_DEFAULT, filename, sizeof filename);
+
+ /* Create a new HDF5 file */
+ fid1 = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create two groups in the file */
+ gid1 = H5Gcreate2(fid1, "group1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid1, FAIL, "H5Gcreate2");
+ gid2 = H5Gcreate2(fid1, "group2", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid2, FAIL, "H5Gcreate2");
+
+ /* Reset object info */
+ HDmemset(&oinfo1, 0, sizeof(oinfo1));
+ HDmemset(&oinfo2, 0, sizeof(oinfo2));
+
+ /* Query the object info for each object, through group IDs */
+ ret = H5Oget_info3(gid1, &oinfo1, H5O_INFO_BASIC);
+ CHECK(ret, FAIL, "H5Oget_info3");
+ ret = H5Oget_info3(gid2, &oinfo2, H5O_INFO_BASIC);
+ CHECK(ret, FAIL, "H5Oget_info3");
+
+ VERIFY(oinfo1.fileno, oinfo2.fileno, "file number from H5Oget_info3");
+
+ /* Reset object info */
+ HDmemset(&oinfo1, 0, sizeof(oinfo1));
+ HDmemset(&oinfo2, 0, sizeof(oinfo2));
+
+ /* Query the object info for each object, by name */
+ ret = H5Oget_info_by_name3(fid1, "group1", &oinfo1, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+ ret = H5Oget_info_by_name3(fid1, "group2", &oinfo2, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+
+ VERIFY(oinfo1.fileno, oinfo2.fileno, "file number from H5Oget_info3");
+
+ /* Close everything */
+ ret = H5Gclose(gid1);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Gclose(gid2);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Open file twice */
+ fid1 = H5Fopen(filename, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fopen");
+ fid2 = H5Fopen(filename, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid2, FAIL, "H5Fopen");
+
+ /* Open the two groups in the file */
+ gid1 = H5Gopen2(fid1, "group1", H5P_DEFAULT);
+ CHECK(gid1, FAIL, "H5Gopen2");
+ gid2 = H5Gopen2(fid2, "group2", H5P_DEFAULT);
+ CHECK(gid2, FAIL, "H5Gopen2");
+
+ /* Reset object info */
+ HDmemset(&oinfo1, 0, sizeof(oinfo1));
+ HDmemset(&oinfo2, 0, sizeof(oinfo2));
+
+ /* Query the object info for each object, through group IDs */
+ ret = H5Oget_info3(gid1, &oinfo1, H5O_INFO_BASIC);
+ CHECK(ret, FAIL, "H5Oget_info3");
+ ret = H5Oget_info3(gid2, &oinfo2, H5O_INFO_BASIC);
+ CHECK(ret, FAIL, "H5Oget_info3");
+
+ VERIFY(oinfo1.fileno, oinfo2.fileno, "file number from H5Oget_info3");
+
+ /* Reset object info */
+ HDmemset(&oinfo1, 0, sizeof(oinfo1));
+ HDmemset(&oinfo2, 0, sizeof(oinfo2));
+
+ /* Query the object info for each object, by name */
+ ret = H5Oget_info_by_name3(fid1, "group1", &oinfo1, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+ ret = H5Oget_info_by_name3(fid1, "group2", &oinfo2, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+
+ VERIFY(oinfo1.fileno, oinfo2.fileno, "file number from H5Oget_info3");
+
+ /* Close everything */
+ ret = H5Gclose(gid1);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Gclose(gid2);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+ ret = H5Fclose(fid2);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* test_h5o_getinfo_same_file() */
+
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+#if 0
+/****************************************************************
+**
+** test_h5o_open_by_addr_deprec(): Test H5Oopen_by_addr function.
+**
+****************************************************************/
+static void
+test_h5o_open_by_addr_deprec(void)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t grp, dset, dtype, dspace; /* Object identifiers */
+ char filename[1024];
+ H5L_info1_t li; /* Buffer for H5Lget_info1 */
+ haddr_t grp_addr; /* Addresses for objects */
+ haddr_t dset_addr;
+ haddr_t dtype_addr;
+ hsize_t dims[RANK];
+ H5I_type_t id_type; /* Type of IDs returned from H5Oopen */
+ H5G_info_t ginfo; /* Group info struct */
+ H5T_class_t type_class; /* Class of the datatype */
+ herr_t ret; /* Value returned from API calls */
+
+ h5_fixname(TEST_FILENAME, H5P_DEFAULT, filename, sizeof filename);
+
+ /* Create a new HDF5 file */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create a group, dataset, and committed datatype within the file */
+ /* Create the group */
+ grp = H5Gcreate2(fid, "group", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(grp, FAIL, "H5Gcreate2");
+ ret = H5Gclose(grp);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Commit the type inside the group */
+ dtype = H5Tcopy(H5T_NATIVE_INT);
+ CHECK(dtype, FAIL, "H5Tcopy");
+ ret = H5Tcommit2(fid, "group/datatype", dtype, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+ ret = H5Tclose(dtype);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Create the data space for the dataset. */
+ dims[0] = DIM0;
+ dims[1] = DIM1;
+ dspace = H5Screate_simple(RANK, dims, NULL);
+ CHECK(dspace, FAIL, "H5Screate_simple");
+
+ /* Create the dataset. */
+ dset = H5Dcreate2(fid, "dataset", H5T_NATIVE_INT, dspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset, FAIL, "H5Dcreate2");
+ ret = H5Dclose(dset);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Sclose(dspace);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Get address for each object */
+ ret = H5Lget_info1(fid, "group", &li, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lget_info");
+ grp_addr = li.u.address;
+ ret = H5Lget_info1(fid, "group/datatype", &li, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lget_info");
+ dtype_addr = li.u.address;
+ ret = H5Lget_info1(fid, "dataset", &li, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lget_info");
+ dset_addr = li.u.address;
+
+ /* Now make sure that H5Oopen_by_addr can open all three types of objects */
+ grp = H5Oopen_by_addr(fid, grp_addr);
+ CHECK(grp, FAIL, "H5Oopen_by_addr");
+ dtype = H5Oopen_by_addr(fid, dtype_addr);
+ CHECK(dtype, FAIL, "H5Oopen_by_addr");
+ /* Check that we can use the group ID as a valid location */
+ dset = H5Oopen_by_addr(grp, dset_addr);
+ CHECK(dset, FAIL, "H5Oopen_by_addr");
+
+ /* Make sure that each is the right kind of ID */
+ id_type = H5Iget_type(grp);
+ VERIFY(id_type, H5I_GROUP, "H5Iget_type for group ID");
+ id_type = H5Iget_type(dtype);
+ VERIFY(id_type, H5I_DATATYPE, "H5Iget_type for datatype ID");
+ id_type = H5Iget_type(dset);
+ VERIFY(id_type, H5I_DATASET, "H5Iget_type for dataset ID");
+
+ /* Do something more complex with each of the IDs to make sure they "work" */
+ ret = H5Gget_info(grp, &ginfo);
+ CHECK(ret, FAIL, "H5Gget_info");
+ VERIFY(ginfo.nlinks, 1, "H5Gget_info"); /* There should be one object, the datatype */
+
+ type_class = H5Tget_class(dtype);
+ VERIFY(type_class, H5T_INTEGER, "H5Tget_class");
+
+ dspace = H5Dget_space(dset);
+ CHECK(dspace, FAIL, "H5Dget_space");
+
+ /* Close the IDs */
+ ret = H5Sclose(dspace);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Gclose(grp);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Tclose(dtype);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Dclose(dset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Try giving some bogus values to H5O_open_by_addr. */
+ /* Try to open an object with a bad address */
+ grp_addr += 20;
+ H5E_BEGIN_TRY
+ {
+ grp = H5Oopen_by_addr(fid, grp_addr);
+ }
+ H5E_END_TRY
+ VERIFY(grp, FAIL, "H5Oopen_by_addr");
+
+ /* For instance, an objectno smaller than the end of the file's superblock should
+ * trigger an error */
+ grp_addr = 10;
+ H5E_BEGIN_TRY
+ {
+ grp = H5Oopen_by_addr(fid, grp_addr);
+ }
+ H5E_END_TRY
+ VERIFY(grp, FAIL, "H5Oopen_by_addr");
+
+ /* Likewise, an objectno larger than the size of the file should fail */
+ grp_addr = 0;
+ grp_addr = 1000000000;
+ H5E_BEGIN_TRY
+ {
+ grp = H5Oopen_by_addr(fid, grp_addr);
+ }
+ H5E_END_TRY
+ VERIFY(grp, FAIL, "H5Oopen_by_addr");
+
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Also, trying to open an object without a valid location should fail */
+ H5E_BEGIN_TRY
+ {
+ dtype = H5Oopen_by_addr(fid, dtype_addr);
+ }
+ H5E_END_TRY
+ VERIFY(dtype, FAIL, "H5Oopen_by_addr");
+} /* test_h5o_open_by_addr_deprec() */
+
+/****************************************************************
+**
+** visit_obj_cb():
+** This is the callback function invoked by H5Ovisit1() in
+** test_h5o_getinfo_visit():
+** --Verify that the object info returned to the callback
+** function is the same as H5Oget_info2().
+**
+****************************************************************/
+static int
+visit_obj_cb(hid_t group_id, const char *name, const H5O_info1_t *oinfo1, void H5_ATTR_UNUSED *_op_data)
+{
+ H5O_info1_t oinfo2; /* Object info structs */
+
+ /* Verify the object info for "group1", "group2" and the root group */
+ if (!(HDstrcmp(name, "group1"))) {
+ H5Oget_info_by_name2(group_id, name, &oinfo2, H5O_INFO_NUM_ATTRS, H5P_DEFAULT);
+ VERIFY(oinfo1->num_attrs, oinfo2.num_attrs, "obj info from H5Ovisit1");
+ }
+ else if (!(HDstrcmp(name, "group2"))) {
+ H5Oget_info_by_name2(group_id, name, &oinfo2, H5O_INFO_HDR, H5P_DEFAULT);
+ VERIFY(oinfo1->hdr.nmesgs, oinfo2.hdr.nmesgs, "obj info from H5Ovisit1/H5Oget_info2");
+ VERIFY(oinfo1->hdr.nchunks, oinfo2.hdr.nchunks, "obj info from H5Ovisit1/H5Oget_info2");
+ }
+ else if (!(HDstrcmp(name, "."))) {
+ H5Oget_info_by_name2(group_id, name, &oinfo2, H5O_INFO_META_SIZE, H5P_DEFAULT);
+ VERIFY(oinfo1->meta_size.obj.index_size, oinfo2.meta_size.obj.index_size,
+ "obj info from H5Ovisit1/H5Oget_info2");
+ VERIFY(oinfo1->meta_size.obj.heap_size, oinfo2.meta_size.obj.heap_size,
+ "obj info from H5Ovisit1/H5Oget_info2");
+ }
+
+ return (H5_ITER_CONT);
+} /* end visit_obj_cb() */
+
+/****************************************************************
+**
+** test_h5o_getinfo_visit():
+** Verify that the object info returned via H5Oget_info1()
+** and H5Oget_info2() are the same.
+** Verify that the object info retrieved via H5Ovisit1() is
+** the same as H5Oget_info2().
+**
+****************************************************************/
+static void
+test_h5o_getinfo_visit(void)
+{
+ hid_t fid = -1; /* HDF5 File ID */
+ hid_t gid1 = -1, gid2 = -1; /* Group IDs */
+ hid_t sid = -1; /* Dataspace ID */
+ hid_t aid = -1; /* Attribute ID */
+ char filename[1024];
+ H5O_info1_t oinfo1, oinfo2; /* Object info structs */
+ char attrname[25]; /* Attribute name */
+ int j; /* Local index variable */
+ herr_t ret; /* Value returned from API calls */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing info returned by H5Oget_info vs H5Ovisit\n"));
+
+ h5_fixname(TEST_FILENAME, H5P_DEFAULT, filename, sizeof filename);
+
+ /* Create an HDF5 file */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create "group1" in the file */
+ gid1 = H5Gcreate2(fid, "group1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid1, FAIL, "H5Gcreate2");
+
+ /* Create dataspace */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Attach 10 attributes to "group1" */
+ for (j = 0; j < 10; j++) {
+ /* Create the attribute name */
+ HDsnprintf(attrname, sizeof(attrname), "attr%u", j);
+ /* Create the attribute */
+ aid = H5Acreate2(gid1, attrname, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+ /* Close the attribute */
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+ }
+
+ /* Create "group2" in the file */
+ gid2 = H5Gcreate2(fid, "group2", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid2, FAIL, "H5Gcreate2");
+
+ /* Reset object info */
+ HDmemset(&oinfo1, 0, sizeof(oinfo1));
+ HDmemset(&oinfo2, 0, sizeof(oinfo2));
+
+ /* Query the object info for "group1" via H5Oget_info1 and H5Oget_info2 */
+ ret = H5Oget_info1(gid1, &oinfo1);
+ CHECK(ret, FAIL, "H5Oget_info1");
+ ret = H5Oget_info2(gid1, &oinfo2, H5O_INFO_BASIC | H5O_INFO_NUM_ATTRS);
+ CHECK(ret, FAIL, "H5Oget_info2");
+
+ /* Verify the object info for "group1" is correct */
+ VERIFY(oinfo1.fileno, oinfo2.fileno, "obj info from H5Oget_info1/2");
+ VERIFY(oinfo1.num_attrs, oinfo2.num_attrs, "obj info from H5Oget_info1/2");
+
+ /* Reset object info */
+ HDmemset(&oinfo1, 0, sizeof(oinfo1));
+ HDmemset(&oinfo2, 0, sizeof(oinfo2));
+
+ /* Query the object info for "group2" via H5Oget_info1 and H5Oget_info2 */
+ ret = H5Oget_info_by_name1(fid, "group2", &oinfo1, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_native_info_by_name");
+ ret = H5Oget_info_by_name2(fid, "group2", &oinfo2, H5O_INFO_HDR | H5O_INFO_META_SIZE, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_native_info_by_name");
+
+ /* Verify the object info for "group2" is correct */
+ VERIFY(oinfo1.hdr.nmesgs, oinfo2.hdr.nmesgs, "obj info from H5Oget_info1/2");
+ VERIFY(oinfo1.hdr.nchunks, oinfo2.hdr.nchunks, "obj info from H5Oget_info1/2");
+ VERIFY(oinfo1.meta_size.obj.index_size, oinfo2.meta_size.obj.index_size, "obj info from H5Oget_info1/2");
+ VERIFY(oinfo1.meta_size.obj.heap_size, oinfo2.meta_size.obj.heap_size, "obj info from H5Oget_info1/2");
+
+ /* Close everything */
+ ret = H5Gclose(gid1);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Gclose(gid2);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Verify the object info returned to the callback function is correct */
+ ret = H5Ovisit1(fid, H5_INDEX_NAME, H5_ITER_INC, visit_obj_cb, NULL);
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* test_h5o_getinfo_visit() */
+#endif
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
+
+/****************************************************************
+**
+** test_h5o(): Main H5O (generic object) testing routine.
+**
+****************************************************************/
+void
+test_h5o(void)
+{
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Objects\n"));
+
+ test_h5o_open(); /* Test generic open function */
+#if 0
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+ test_h5o_open_by_addr(); /* Test opening objects by address */
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
+#endif
+ test_h5o_open_by_token(); /* Test opening objects by token */
+ test_h5o_close(); /* Test generic close function */
+ test_h5o_refcount(); /* Test incrementing and decrementing reference count */
+ test_h5o_plist(); /* Test object creation properties */
+ test_h5o_link(); /* Test object link routine */
+#if 0
+ test_h5o_comment(); /* Test routines for comment */
+ test_h5o_comment_by_name(); /* Test routines for comment by name */
+#endif
+ test_h5o_getinfo_same_file(); /* Test info for objects in the same file */
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+#if 0
+ test_h5o_open_by_addr_deprec(); /* Test opening objects by address with H5Lget_info1 */
+ test_h5o_getinfo_visit(); /* Test object info for H5Oget_info1/2 and H5Ovisit1 */
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
+#endif
+} /* test_h5o() */
+
+/*-------------------------------------------------------------------------
+ * Function: cleanup_h5o
+ *
+ * Purpose: Cleanup temporary test files
+ *
+ * Return: none
+ *
+ * Programmer: James Laird
+ * June 3, 2006
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+cleanup_h5o(void)
+{
+ char filename[1024];
+
+ H5E_BEGIN_TRY
+ {
+ h5_fixname(TEST_FILENAME, H5P_DEFAULT, filename, sizeof filename);
+ H5Fdelete(filename, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+}
diff --git a/test/API/th5s.c b/test/API/th5s.c
new file mode 100644
index 0000000..cb1c899
--- /dev/null
+++ b/test/API/th5s.c
@@ -0,0 +1,3538 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/***********************************************************
+ *
+ * Test program: th5s
+ *
+ * Test the dataspace functionality
+ *
+ *************************************************************/
+
+#include "testhdf5.h"
+/* #include "H5srcdir.h" */
+
+/* #include "H5Iprivate.h" */
+/* #include "H5Pprivate.h" */
+
+#if 0
+/*
+ * This file needs to access private information from the H5S package.
+ * This file also needs to access the dataspace testing code.
+ */
+#define H5S_FRIEND /*suppress error about including H5Spkg */
+#define H5S_TESTING /*suppress warning about H5S testing funcs*/
+#include "H5Spkg.h" /* Dataspaces */
+
+/*
+ * This file needs to access private information from the H5O package.
+ * This file also needs to access the dataspace testing code.
+ */
+#define H5O_FRIEND /*suppress error about including H5Opkg */
+#define H5O_TESTING
+#include "H5Opkg.h" /* Object header */
+#endif
+
+#define TESTFILE "th5s.h5"
+#define DATAFILE "th5s1.h5"
+#define NULLFILE "th5s2.h5"
+#define BASICFILE "th5s3.h5"
+#define ZEROFILE "th5s4.h5"
+#define BASICDATASET "basic_dataset"
+#define BASICDATASET1 "basic_dataset1"
+#define BASICDATASET2 "basic_dataset2"
+#define BASICDATASET3 "basic_dataset3"
+#define BASICDATASET4 "basic_dataset4"
+#define BASICATTR "basic_attribute"
+#define NULLDATASET "null_dataset"
+#define NULLATTR "null_attribute"
+#define EXTFILE_NAME "ext_file"
+
+/* 3-D dataset with fixed dimensions */
+#define SPACE1_RANK 3
+#define SPACE1_DIM1 3
+#define SPACE1_DIM2 15
+#define SPACE1_DIM3 13
+
+/* 4-D dataset with one unlimited dimension */
+#define SPACE2_RANK 4
+#define SPACE2_DIM1 0
+#define SPACE2_DIM2 15
+#define SPACE2_DIM3 13
+#define SPACE2_DIM4 23
+#define SPACE2_MAX1 H5S_UNLIMITED
+#define SPACE2_MAX2 15
+#define SPACE2_MAX3 13
+#define SPACE2_MAX4 23
+
+/* Scalar dataset with simple datatype */
+#define SPACE3_RANK 0
+unsigned space3_data = 65;
+
+/* Scalar dataset with compound datatype */
+#define SPACE4_FIELDNAME1 "c1"
+#define SPACE4_FIELDNAME2 "u"
+#define SPACE4_FIELDNAME3 "f"
+#define SPACE4_FIELDNAME4 "c2"
+size_t space4_field1_off = 0;
+size_t space4_field2_off = 0;
+size_t space4_field3_off = 0;
+size_t space4_field4_off = 0;
+struct space4_struct {
+ char c1;
+ unsigned u;
+ float f;
+ char c2;
+} space4_data = {'v', 987123, -3.14F, 'g'}; /* Test data for 4th dataspace */
+
+/*
+ * Testing configuration defines used by:
+ * test_h5s_encode_regular_hyper()
+ * test_h5s_encode_irregular_hyper()
+ * test_h5s_encode_points()
+ */
+#define CONFIG_8 1
+#define CONFIG_16 2
+#define CONFIG_32 3
+#define POWER8 256 /* 2^8 */
+#define POWER16 65536 /* 2^16 */
+#define POWER32 4294967296 /* 2^32 */
+
+/****************************************************************
+**
+** test_h5s_basic(): Test basic H5S (dataspace) code.
+**
+****************************************************************/
+static void
+test_h5s_basic(void)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t sid1, sid2; /* Dataspace ID */
+ hid_t dset1; /* Dataset ID */
+#ifndef NO_VALIDATE_DATASPACE
+ hid_t aid1; /* Attribute ID */
+#endif
+ int rank; /* Logical rank of dataspace */
+ hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
+ hsize_t dims2[] = {SPACE2_DIM1, SPACE2_DIM2, SPACE2_DIM3, SPACE2_DIM4};
+ hsize_t dims3[H5S_MAX_RANK + 1];
+ hsize_t max2[] = {SPACE2_MAX1, SPACE2_MAX2, SPACE2_MAX3, SPACE2_MAX4};
+ hsize_t tdims[4]; /* Dimension array to test with */
+ hsize_t tmax[4];
+ hssize_t n; /* Number of dataspace elements */
+#if 0
+ hbool_t driver_is_default_compatible;
+#endif
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Dataspace Manipulation\n"));
+
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, max2);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ n = H5Sget_simple_extent_npoints(sid1);
+ CHECK(n, FAIL, "H5Sget_simple_extent_npoints");
+ VERIFY(n, SPACE1_DIM1 * SPACE1_DIM2 * SPACE1_DIM3, "H5Sget_simple_extent_npoints");
+
+ rank = H5Sget_simple_extent_ndims(sid1);
+ CHECK(rank, FAIL, "H5Sget_simple_extent_ndims");
+ VERIFY(rank, SPACE1_RANK, "H5Sget_simple_extent_ndims");
+
+ rank = H5Sget_simple_extent_dims(sid1, tdims, NULL);
+ CHECK(rank, FAIL, "H5Sget_simple_extent_dims");
+ VERIFY(HDmemcmp(tdims, dims1, SPACE1_RANK * sizeof(hsize_t)), 0, "H5Sget_simple_extent_dims");
+
+ sid2 = H5Screate_simple(SPACE2_RANK, dims2, max2);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ n = H5Sget_simple_extent_npoints(sid2);
+ CHECK(n, FAIL, "H5Sget_simple_extent_npoints");
+ VERIFY(n, SPACE2_DIM1 * SPACE2_DIM2 * SPACE2_DIM3 * SPACE2_DIM4, "H5Sget_simple_extent_npoints");
+
+ rank = H5Sget_simple_extent_ndims(sid2);
+ CHECK(rank, FAIL, "H5Sget_simple_extent_ndims");
+ VERIFY(rank, SPACE2_RANK, "H5Sget_simple_extent_ndims");
+
+ rank = H5Sget_simple_extent_dims(sid2, tdims, tmax);
+ CHECK(rank, FAIL, "H5Sget_simple_extent_dims");
+ VERIFY(HDmemcmp(tdims, dims2, SPACE2_RANK * sizeof(hsize_t)), 0, "H5Sget_simple_extent_dims");
+ VERIFY(HDmemcmp(tmax, max2, SPACE2_RANK * sizeof(hsize_t)), 0, "H5Sget_simple_extent_dims");
+
+ /* Change max dims to be equal to the dimensions */
+ ret = H5Sset_extent_simple(sid1, SPACE1_RANK, dims1, NULL);
+ CHECK(ret, FAIL, "H5Sset_extent_simple");
+ rank = H5Sget_simple_extent_dims(sid1, tdims, tmax);
+ CHECK(rank, FAIL, "H5Sget_simple_extent_dims");
+ VERIFY(HDmemcmp(tdims, dims1, SPACE1_RANK * sizeof(hsize_t)), 0, "H5Sget_simple_extent_dims");
+ VERIFY(HDmemcmp(tmax, dims1, SPACE1_RANK * sizeof(hsize_t)), 0, "H5Sget_simple_extent_dims");
+
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /*
+ * Check to be sure we can't create a simple dataspace that has too many
+ * dimensions.
+ */
+ H5E_BEGIN_TRY
+ {
+ sid1 = H5Screate_simple(H5S_MAX_RANK + 1, dims3, NULL);
+ }
+ H5E_END_TRY;
+ VERIFY(sid1, FAIL, "H5Screate_simple");
+#if 0
+ /*
+ * Try reading a file that has been prepared that has a dataset with a
+ * higher dimensionality than what the library can handle.
+ *
+ * If this test fails and the H5S_MAX_RANK variable has changed, follow
+ * the instructions in space_overflow.c for regenerating the th5s.h5 file.
+ */
+ ret = h5_driver_is_default_vfd_compatible(H5P_DEFAULT, &driver_is_default_compatible);
+ CHECK_I(ret, "h5_driver_is_default_vfd_compatible");
+
+ if (driver_is_default_compatible) {
+ const char *testfile = H5_get_srcdir_filename(TESTFILE); /* Corrected test file name */
+
+ fid1 = H5Fopen(testfile, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK_I(fid1, "H5Fopen");
+ if (fid1 >= 0) {
+ dset1 = H5Dopen2(fid1, "dset", H5P_DEFAULT);
+ VERIFY(dset1, FAIL, "H5Dopen2");
+ ret = H5Fclose(fid1);
+ CHECK_I(ret, "H5Fclose");
+ }
+ else
+ HDprintf("***cannot open the pre-created H5S_MAX_RANK test file (%s)\n", testfile);
+ }
+#endif
+ /* Verify that incorrect dimensions don't work */
+ dims1[0] = H5S_UNLIMITED;
+ H5E_BEGIN_TRY
+ {
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ }
+ H5E_END_TRY;
+ VERIFY(sid1, FAIL, "H5Screate_simple");
+
+ dims1[0] = H5S_UNLIMITED;
+ sid1 = H5Screate(H5S_SIMPLE);
+ CHECK(sid1, FAIL, "H5Screate");
+
+ H5E_BEGIN_TRY
+ {
+ ret = H5Sset_extent_simple(sid1, SPACE1_RANK, dims1, NULL);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Sset_extent_simple");
+
+ ret = H5Sclose(sid1);
+ CHECK_I(ret, "H5Sclose");
+
+ /*
+ * Try writing simple dataspaces without setting their extents
+ */
+ /* Create the file */
+ fid1 = H5Fcreate(BASICFILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ dims1[0] = SPACE1_DIM1;
+
+ sid1 = H5Screate(H5S_SIMPLE);
+ CHECK(sid1, FAIL, "H5Screate");
+ sid2 = H5Screate_simple(1, dims1, dims1);
+ CHECK(sid2, FAIL, "H5Screate");
+#ifndef NO_VALIDATE_DATASPACE
+ /* This dataset's space has no extent; it should not be created */
+ H5E_BEGIN_TRY
+ {
+ dset1 = H5Dcreate2(fid1, BASICDATASET, H5T_NATIVE_INT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY
+ VERIFY(dset1, FAIL, "H5Dcreate2");
+#endif
+ dset1 = H5Dcreate2(fid1, BASICDATASET2, H5T_NATIVE_INT, sid2, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset1, FAIL, "H5Dcreate2");
+
+ /* Try some writes with the bad dataspace (sid1) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Dwrite(dset1, H5T_NATIVE_INT, sid1, H5S_ALL, H5P_DEFAULT, &n);
+ }
+ H5E_END_TRY
+ VERIFY(ret, FAIL, "H5Dwrite");
+#ifndef NO_VALIDATE_DATASPACE
+ H5E_BEGIN_TRY
+ {
+ ret = H5Dwrite(dset1, H5T_NATIVE_INT, H5S_ALL, sid1, H5P_DEFAULT, &n);
+ }
+ H5E_END_TRY
+ VERIFY(ret, FAIL, "H5Dwrite");
+
+ H5E_BEGIN_TRY
+ {
+ ret = H5Dwrite(dset1, H5T_NATIVE_INT, sid1, sid1, H5P_DEFAULT, &n);
+ }
+ H5E_END_TRY
+ VERIFY(ret, FAIL, "H5Dwrite");
+#endif
+ /* Try to iterate using the bad dataspace */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Diterate(&n, H5T_NATIVE_INT, sid1, NULL, NULL);
+ }
+ H5E_END_TRY
+ VERIFY(ret, FAIL, "H5Diterate");
+
+ /* Try to fill using the bad dataspace */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Dfill(NULL, H5T_NATIVE_INT, &n, H5T_NATIVE_INT, sid1);
+ }
+ H5E_END_TRY
+ VERIFY(ret, FAIL, "H5Dfill");
+#ifndef NO_VALIDATE_DATASPACE
+ /* Now use the bad dataspace as the space for an attribute */
+ H5E_BEGIN_TRY
+ {
+ aid1 = H5Acreate2(dset1, BASICATTR, H5T_NATIVE_INT, sid1, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY
+ VERIFY(aid1, FAIL, "H5Acreate2");
+#endif
+ /* Make sure that dataspace reads using the bad dataspace fail */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Dread(dset1, H5T_NATIVE_INT, sid1, H5S_ALL, H5P_DEFAULT, &n);
+ }
+ H5E_END_TRY
+ VERIFY(ret, FAIL, "H5Dread");
+#ifndef NO_VALIDATE_DATASPACE
+ H5E_BEGIN_TRY
+ {
+ ret = H5Dread(dset1, H5T_NATIVE_INT, H5S_ALL, sid1, H5P_DEFAULT, &n);
+ }
+ H5E_END_TRY
+ VERIFY(ret, FAIL, "H5Dread");
+
+ H5E_BEGIN_TRY
+ {
+ ret = H5Dread(dset1, H5T_NATIVE_INT, sid1, sid1, H5P_DEFAULT, &n);
+ }
+ H5E_END_TRY
+ VERIFY(ret, FAIL, "H5Dread");
+#endif
+ /* Clean up */
+ ret = H5Dclose(dset1);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_h5s_basic() */
+
+/****************************************************************
+**
+** test_h5s_null(): Test NULL dataspace
+**
+****************************************************************/
+static void
+test_h5s_null(void)
+{
+ hid_t fid; /* File ID */
+ hid_t sid; /* Dataspace IDs */
+ hid_t dset_sid, dset_sid2; /* Dataspace IDs */
+ hid_t attr_sid; /* Dataspace IDs */
+ hid_t did; /* Dataset ID */
+ hid_t attr; /*Attribute ID */
+ H5S_class_t stype; /* dataspace type */
+ hssize_t nelem; /* Number of elements */
+ unsigned uval = 2; /* Buffer for writing to dataset */
+ int val = 1; /* Buffer for writing to attribute */
+ H5S_sel_type sel_type; /* Type of selection currently */
+ hsize_t dims[1] = {10}; /* Dimensions for converting null dataspace to simple */
+ H5S_class_t space_type; /* Type of dataspace */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Null Dataspace\n"));
+
+ /* Create the file */
+ fid = H5Fcreate(NULLFILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ sid = H5Screate(H5S_NULL);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Check that the null dataspace actually has 0 elements */
+ nelem = H5Sget_simple_extent_npoints(sid);
+ VERIFY(nelem, 0, "H5Sget_simple_extent_npoints");
+
+ /* Check that the dataspace was created with an "all" selection */
+ sel_type = H5Sget_select_type(sid);
+ VERIFY(sel_type, H5S_SEL_ALL, "H5Sget_select_type");
+
+ /* Check that the null dataspace has 0 elements selected */
+ nelem = H5Sget_select_npoints(sid);
+ VERIFY(nelem, 0, "H5Sget_select_npoints");
+
+ /* Change to "none" selection */
+ ret = H5Sselect_none(sid);
+ CHECK(ret, FAIL, "H5Sselect_none");
+
+ /* Check that the null dataspace has 0 elements selected */
+ nelem = H5Sget_select_npoints(sid);
+ VERIFY(nelem, 0, "H5Sget_select_npoints");
+
+ /* Check to be sure we can't set a hyperslab selection on a null dataspace */
+ H5E_BEGIN_TRY
+ {
+ hsize_t start[1] = {0};
+ hsize_t count[1] = {0};
+
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, NULL, count, NULL);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Check to be sure we can't set a point selection on a null dataspace */
+ H5E_BEGIN_TRY
+ {
+ hsize_t coord[1][1]; /* Coordinates for point selection */
+
+ coord[0][0] = 0;
+ ret = H5Sselect_elements(sid, H5S_SELECT_SET, (size_t)1, (const hsize_t *)coord);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Sselect_elements");
+
+ /* Create first dataset */
+ did = H5Dcreate2(fid, NULLDATASET, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+
+ /* Write "nothing" to the dataset */
+ ret = H5Dwrite(did, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &uval);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Write "nothing" to the dataset (with type conversion :-) */
+ ret = H5Dwrite(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &val);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Try reading from the dataset (make certain our buffer is unmodified) */
+ ret = H5Dread(did, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &uval);
+ CHECK(ret, FAIL, "H5Dread");
+ VERIFY(uval, 2, "H5Dread");
+
+ /* Try reading from the dataset (with type conversion :-) (make certain our buffer is unmodified) */
+ ret = H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &val);
+ CHECK(ret, FAIL, "H5Dread");
+ VERIFY(val, 1, "H5Dread");
+
+ /* Create an attribute for the group */
+ attr = H5Acreate2(did, NULLATTR, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write "nothing" to the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_INT, &val);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Write "nothing" to the attribute (with type conversion :-) */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, &uval);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Try reading from the attribute (make certain our buffer is unmodified) */
+ ret = H5Aread(attr, H5T_NATIVE_INT, &val);
+ CHECK(ret, FAIL, "H5Aread");
+ VERIFY(val, 1, "H5Aread");
+
+ /* Try reading from the attribute (with type conversion :-) (make certain our buffer is unmodified) */
+ ret = H5Aread(attr, H5T_NATIVE_UINT, &uval);
+ CHECK(ret, FAIL, "H5Aread");
+ VERIFY(uval, 2, "H5Aread");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close the dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Verify that we've got the right kind of dataspace */
+ space_type = H5Sget_simple_extent_type(sid);
+ VERIFY(space_type, H5S_NULL, "H5Sget_simple_extent_type");
+
+ /* Convert the null dataspace to a simple dataspace */
+ ret = H5Sset_extent_simple(sid, 1, dims, NULL);
+ CHECK(ret, FAIL, "H5Sset_extent_simple");
+
+ /* Verify that we've got the right kind of dataspace now */
+ space_type = H5Sget_simple_extent_type(sid);
+ VERIFY(space_type, H5S_SIMPLE, "H5Sget_simple_extent_type");
+
+ /* Close the dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /*============================================
+ * Reopen the file to check the dataspace
+ *============================================
+ */
+ fid = H5Fopen(NULLFILE, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Reopen the dataset */
+ did = H5Dopen2(fid, NULLDATASET, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen2");
+
+ /* Get the space of the dataset */
+ dset_sid = H5Dget_space(did);
+ CHECK(dset_sid, FAIL, "H5Dget_space");
+
+ /* Query the NULL dataspace */
+ dset_sid2 = H5Scopy(dset_sid);
+ CHECK(dset_sid2, FAIL, "H5Scopy");
+
+ /* Verify the class type of dataspace */
+ stype = H5Sget_simple_extent_type(dset_sid2);
+ VERIFY(stype, H5S_NULL, "H5Sget_simple_extent_type");
+
+ /* Verify there is zero element in the dataspace */
+ ret = (herr_t)H5Sget_simple_extent_npoints(dset_sid2);
+ VERIFY(ret, 0, "H5Sget_simple_extent_npoints");
+
+ /* Try reading from the dataset (make certain our buffer is unmodified) */
+ ret = H5Dread(did, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &uval);
+ CHECK(ret, FAIL, "H5Dread");
+ VERIFY(uval, 2, "H5Dread");
+
+ /* Close the dataspace */
+ ret = H5Sclose(dset_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(dset_sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Open the attribute for the dataset */
+ attr = H5Aopen(did, NULLATTR, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+
+ /* Get the space of the dataset */
+ attr_sid = H5Aget_space(attr);
+ CHECK(attr_sid, FAIL, "H5Aget_space");
+
+ /* Verify the class type of dataspace */
+ stype = H5Sget_simple_extent_type(attr_sid);
+ VERIFY(stype, H5S_NULL, "H5Sget_simple_extent_type");
+
+ /* Verify there is zero element in the dataspace */
+ ret = (herr_t)H5Sget_simple_extent_npoints(attr_sid);
+ VERIFY(ret, 0, "H5Sget_simple_extent_npoints");
+
+ /* Close the dataspace */
+ ret = H5Sclose(attr_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Try reading from the attribute (make certain our buffer is unmodified) */
+ ret = H5Aread(attr, H5T_NATIVE_INT, &val);
+ CHECK(ret, FAIL, "H5Aread");
+ VERIFY(val, 1, "H5Aread");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close the dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_h5s_null() */
+
+/****************************************************************
+**
+** test_h5s_zero_dim(): Test the code for dataspace with zero dimension size
+**
+****************************************************************/
+static void
+test_h5s_zero_dim(void)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t sid1, attr_sid; /* Dataspace ID */
+ hid_t sid_chunk; /* Dataspace ID for chunked dataset */
+ hid_t dset1; /* Dataset ID */
+ hid_t plist_id; /* Dataset creation property list */
+ hid_t attr; /* Attribute ID */
+ int rank; /* Logical rank of dataspace */
+ hsize_t dims1[] = {0, SPACE1_DIM2, SPACE1_DIM3};
+ hsize_t max_dims[] = {SPACE1_DIM1 + 1, SPACE1_DIM2, SPACE1_DIM3};
+ hsize_t extend_dims[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
+ hsize_t chunk_dims[] = {SPACE1_DIM1, SPACE1_DIM2 / 3, SPACE1_DIM3};
+ hsize_t tdims[SPACE1_RANK]; /* Dimension array to test with */
+ int wdata[SPACE1_DIM2][SPACE1_DIM3];
+ int rdata[SPACE1_DIM2][SPACE1_DIM3];
+ short wdata_short[SPACE1_DIM2][SPACE1_DIM3];
+ short rdata_short[SPACE1_DIM2][SPACE1_DIM3];
+ int wdata_real[SPACE1_DIM1][SPACE1_DIM2][SPACE1_DIM3];
+ int rdata_real[SPACE1_DIM1][SPACE1_DIM2][SPACE1_DIM3];
+#ifndef NO_CHECK_SELECTION_BOUNDS
+ int val = 3;
+ hsize_t start[] = {0, 0, 0};
+ hsize_t count[] = {3, 15, 13};
+ hsize_t coord[1][3]; /* Coordinates for point selection */
+#endif
+ hssize_t nelem; /* Number of elements */
+ H5S_sel_type sel_type; /* Type of selection currently */
+ H5S_class_t stype; /* dataspace type */
+ H5D_alloc_time_t alloc_time; /* Space allocation time */
+ herr_t ret; /* Generic return value */
+ unsigned int i, j, k;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Dataspace with zero dimension size\n"));
+
+ /* Initialize the data */
+ for (i = 0; i < SPACE1_DIM2; i++)
+ for (j = 0; j < SPACE1_DIM3; j++) {
+ wdata[i][j] = (int)(i + j);
+ rdata[i][j] = 7;
+ wdata_short[i][j] = (short)(i + j);
+ rdata_short[i][j] = 7;
+ }
+
+ for (i = 0; i < SPACE1_DIM1; i++)
+ for (j = 0; j < SPACE1_DIM2; j++)
+ for (k = 0; k < SPACE1_DIM3; k++)
+ wdata_real[i][j][k] = (int)(i + j + k);
+
+ /* Test with different space allocation times */
+ for (alloc_time = H5D_ALLOC_TIME_EARLY; alloc_time <= H5D_ALLOC_TIME_INCR; alloc_time++) {
+
+ /* Make sure we can create the space with the dimension size 0 (starting from v1.8.7).
+ * The dimension doesn't need to be unlimited. */
+ dims1[0] = 0;
+ dims1[1] = SPACE1_DIM2;
+ dims1[2] = SPACE1_DIM3;
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ sid1 = H5Screate(H5S_SIMPLE);
+ CHECK(sid1, FAIL, "H5Screate");
+
+ /* SID1 has the 1st dimension size as zero. The maximal dimension will be
+ * the same as the dimension because of the NULL passed in. */
+ ret = H5Sset_extent_simple(sid1, SPACE1_RANK, dims1, NULL);
+ CHECK(ret, FAIL, "H5Sset_extent_simple");
+
+ /* Check that the dataspace actually has 0 elements */
+ nelem = H5Sget_simple_extent_npoints(sid1);
+ VERIFY(nelem, 0, "H5Sget_simple_extent_npoints");
+
+ /* Check that the dataspace was created with an "all" selection */
+ sel_type = H5Sget_select_type(sid1);
+ VERIFY(sel_type, H5S_SEL_ALL, "H5Sget_select_type");
+
+ /* Check that the dataspace has 0 elements selected */
+ nelem = H5Sget_select_npoints(sid1);
+ VERIFY(nelem, 0, "H5Sget_select_npoints");
+
+ /* Change to "none" selection */
+ ret = H5Sselect_none(sid1);
+ CHECK(ret, FAIL, "H5Sselect_none");
+
+ /* Check that the dataspace has 0 elements selected */
+ nelem = H5Sget_select_npoints(sid1);
+ VERIFY(nelem, 0, "H5Sget_select_npoints");
+
+ /* Try to select all dataspace */
+ ret = H5Sselect_all(sid1);
+ CHECK(ret, FAIL, "H5Sselect_all");
+
+ /* Check that the dataspace has 0 elements selected */
+ nelem = H5Sget_select_npoints(sid1);
+ VERIFY(nelem, 0, "H5Sget_select_npoints");
+
+ /* Create the dataspace for chunked dataset with the first dimension size as zero.
+ * The maximal dimensions are bigger than the dimensions for later expansion. */
+ sid_chunk = H5Screate_simple(SPACE1_RANK, dims1, max_dims);
+ CHECK(sid_chunk, FAIL, "H5Screate_simple");
+
+ /*============================================
+ * Make sure we can use 0-dimension to create
+ * contiguous, chunked, compact, and external
+ * datasets, and also attribute.
+ *============================================
+ */
+ fid1 = H5Fcreate(ZEROFILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /*===================== Contiguous dataset =======================*/
+ plist_id = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(plist_id, FAIL, "H5Pcreate");
+
+ ret = H5Pset_alloc_time(plist_id, alloc_time);
+ CHECK(ret, FAIL, "H5Pset_alloc_time");
+
+ dset1 = H5Dcreate2(fid1, BASICDATASET, H5T_NATIVE_INT, sid1, H5P_DEFAULT, plist_id, H5P_DEFAULT);
+ CHECK(dset1, FAIL, "H5Dcreate2");
+
+ ret = H5Pclose(plist_id);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Write "nothing" to the dataset */
+ ret = H5Dwrite(dset1, H5T_NATIVE_INT, sid1, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ ret = H5Fflush(fid1, H5F_SCOPE_GLOBAL);
+ CHECK(ret, FAIL, "H5Fflush");
+
+ /* Try reading from the dataset (make certain our buffer is unmodified) */
+ ret = H5Dread(dset1, H5T_NATIVE_INT, sid1, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check results */
+ for (i = 0; i < SPACE1_DIM2; i++) {
+ for (j = 0; j < SPACE1_DIM3; j++) {
+ if (rdata[i][j] != 7) {
+ H5_FAILED();
+ HDprintf("element [%d][%d] is %d but should have been 7\n", i, j, rdata[i][j]);
+ }
+ }
+ }
+
+ /* Write "nothing" to the dataset (with type conversion :-) */
+ ret = H5Dwrite(dset1, H5T_NATIVE_SHORT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata_short);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ ret = H5Fflush(fid1, H5F_SCOPE_GLOBAL);
+ CHECK(ret, FAIL, "H5Fflush");
+
+ /* Try reading from the dataset (make certain our buffer is unmodified) */
+ ret = H5Dread(dset1, H5T_NATIVE_INT, sid1, H5S_ALL, H5P_DEFAULT, rdata_short);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check results */
+ for (i = 0; i < SPACE1_DIM2; i++) {
+ for (j = 0; j < SPACE1_DIM3; j++) {
+ if (rdata_short[i][j] != 7) {
+ H5_FAILED();
+ HDprintf("element [%d][%d] is %d but should have been 7\n", i, j, rdata_short[i][j]);
+ }
+ }
+ }
+#ifndef NO_CHECK_SELECTION_BOUNDS
+ /* Select a hyperslab beyond its current dimension sizes, then try to write
+ * the data. It should fail. */
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ H5E_BEGIN_TRY
+ {
+ ret = H5Dwrite(dset1, H5T_NATIVE_INT, H5S_ALL, sid1, H5P_DEFAULT, wdata);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Dwrite");
+#endif
+ /* Change to "none" selection */
+ ret = H5Sselect_none(sid1);
+ CHECK(ret, FAIL, "H5Sselect_none");
+#ifndef NO_CHECK_SELECTION_BOUNDS
+ /* Select a point beyond the dimension size, then try to write the data.
+ * It should fail. */
+ coord[0][0] = 2;
+ coord[0][1] = 5;
+ coord[0][2] = 3;
+ ret = H5Sselect_elements(sid1, H5S_SELECT_SET, (size_t)1, (const hsize_t *)coord);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ H5E_BEGIN_TRY
+ {
+ ret = H5Dwrite(dset1, H5T_NATIVE_INT, H5S_ALL, sid1, H5P_DEFAULT, &val);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Dwrite");
+#endif
+ /* Restore the selection to all */
+ ret = H5Sselect_all(sid1);
+ CHECK(ret, FAIL, "H5Sselect_all");
+
+ ret = H5Dclose(dset1);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /*=================== Chunked dataset ====================*/
+ plist_id = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(plist_id, FAIL, "H5Pcreate");
+
+ ret = H5Pset_chunk(plist_id, SPACE1_RANK, chunk_dims);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+
+ /* ret = H5Pset_alloc_time(plist_id, alloc_time); */
+ /* CHECK(ret, FAIL, "H5Pset_alloc_time"); */
+
+ dset1 =
+ H5Dcreate2(fid1, BASICDATASET1, H5T_NATIVE_INT, sid_chunk, H5P_DEFAULT, plist_id, H5P_DEFAULT);
+ CHECK(dset1, FAIL, "H5Dcreate2");
+
+ /* Write "nothing" to the dataset */
+ ret = H5Dwrite(dset1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ ret = H5Fflush(fid1, H5F_SCOPE_GLOBAL);
+ CHECK(ret, FAIL, "H5Fflush");
+
+ /* Try reading from the dataset (make certain our buffer is unmodified) */
+ ret = H5Dread(dset1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check results */
+ for (i = 0; i < SPACE1_DIM2; i++)
+ for (j = 0; j < SPACE1_DIM3; j++) {
+ if (rdata[i][j] != 7) {
+ H5_FAILED();
+ HDprintf("element [%d][%d] is %d but should have been 7\n", i, j, rdata[i][j]);
+ }
+ }
+
+ /* Now extend the dataset to SPACE1_DIM1*SPACE1_DIM2*SPACE1_DIM3 and make sure
+ * we can write data to it */
+ extend_dims[0] = SPACE1_DIM1;
+ ret = H5Dset_extent(dset1, extend_dims);
+ CHECK(ret, FAIL, "H5Dset_extent");
+
+ ret = H5Dwrite(dset1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata_real);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ ret = H5Fflush(fid1, H5F_SCOPE_GLOBAL);
+ CHECK(ret, FAIL, "H5Fflush");
+
+ ret = H5Dread(dset1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata_real);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check results */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ for (j = 0; j < SPACE1_DIM2; j++) {
+ for (k = 0; k < SPACE1_DIM3; k++) {
+ if (rdata_real[i][j][k] != wdata_real[i][j][k]) {
+ H5_FAILED();
+ HDprintf("element [%d][%d][%d] is %d but should have been %d\n", i, j, k,
+ rdata_real[i][j][k], wdata_real[i][j][k]);
+ }
+ }
+ }
+ }
+
+ /* Now shrink the first dimension size of the dataset to 0 and make sure no data is in it */
+ extend_dims[0] = 0;
+ ret = H5Dset_extent(dset1, extend_dims);
+ CHECK(ret, FAIL, "H5Dset_extent");
+
+ ret = H5Fflush(fid1, H5F_SCOPE_GLOBAL);
+ CHECK(ret, FAIL, "H5Fflush");
+
+ /* Try reading from the dataset (make certain our buffer is unmodified) */
+ ret = H5Dread(dset1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check results */
+ for (i = 0; i < SPACE1_DIM2; i++)
+ for (j = 0; j < SPACE1_DIM3; j++) {
+ if (rdata[i][j] != 7) {
+ H5_FAILED();
+ HDprintf("element [%d][%d] is %d but should have been 7\n", i, j, rdata[i][j]);
+ }
+ }
+#ifndef NO_CHECK_SELECTION_BOUNDS
+ /* Now extend the first dimension size of the dataset to SPACE1_DIM1*3 past the maximal size.
+ * It is supposed to fail. */
+ extend_dims[0] = SPACE1_DIM1 * 3;
+ H5E_BEGIN_TRY
+ {
+ ret = H5Dset_extent(dset1, extend_dims);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Dset_extent");
+#endif
+ ret = H5Pclose(plist_id);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ ret = H5Dclose(dset1);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /*=================== Compact dataset =====================*/
+ plist_id = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(plist_id, FAIL, "H5Pcreate");
+
+ ret = H5Pset_layout(plist_id, H5D_COMPACT);
+ CHECK(ret, FAIL, "H5Pset_layout");
+
+ /* Don't set the allocation time for compact storage datasets (must be early) */
+
+ dset1 = H5Dcreate2(fid1, BASICDATASET2, H5T_NATIVE_INT, sid1, H5P_DEFAULT, plist_id, H5P_DEFAULT);
+ CHECK(dset1, FAIL, "H5Dcreate2");
+
+ /* Write "nothing" to the dataset */
+ ret = H5Dwrite(dset1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ ret = H5Fflush(fid1, H5F_SCOPE_GLOBAL);
+ CHECK(ret, FAIL, "H5Fflush");
+
+ /* Try reading from the dataset (make certain our buffer is unmodified) */
+ ret = H5Dread(dset1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check results */
+ for (i = 0; i < SPACE1_DIM2; i++)
+ for (j = 0; j < SPACE1_DIM3; j++) {
+ if (rdata[i][j] != 7) {
+ H5_FAILED();
+ HDprintf("element [%d][%d] is %d but should have been 7\n", i, j, rdata[i][j]);
+ }
+ }
+
+ ret = H5Pclose(plist_id);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ ret = H5Dclose(dset1);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /*=========== Contiguous dataset with external storage ============*/
+ plist_id = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(plist_id, FAIL, "H5Pcreate");
+
+ /* Change the DCPL for contiguous layout with external storage. The size of the reserved
+ * space in the external file is the size of the dataset (zero because one dimension size is zero).
+ * There's no need to clean up the external file since the library doesn't create it
+ * until the data is written to it. */
+ ret = H5Pset_external(plist_id, EXTFILE_NAME, (off_t)0, (hsize_t)0);
+ CHECK(ret, FAIL, "H5Pset_external");
+
+ ret = H5Pset_alloc_time(plist_id, alloc_time);
+ CHECK(ret, FAIL, "H5Pset_alloc_time");
+
+ dset1 = H5Dcreate2(fid1, BASICDATASET3, H5T_NATIVE_INT, sid1, H5P_DEFAULT, plist_id, H5P_DEFAULT);
+ CHECK(dset1, FAIL, "H5Dcreate2");
+
+ /* Write "nothing" to the dataset */
+ ret = H5Dwrite(dset1, H5T_NATIVE_INT, sid1, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ ret = H5Fflush(fid1, H5F_SCOPE_GLOBAL);
+ CHECK(ret, FAIL, "H5Fflush");
+
+ /* Try reading from the dataset (make certain our buffer is unmodified) */
+ ret = H5Dread(dset1, H5T_NATIVE_INT, sid1, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check results */
+ for (i = 0; i < SPACE1_DIM2; i++) {
+ for (j = 0; j < SPACE1_DIM3; j++) {
+ if (rdata[i][j] != 7) {
+ H5_FAILED();
+ HDprintf("element [%d][%d] is %d but should have been 7\n", i, j, rdata[i][j]);
+ }
+ }
+ }
+
+ ret = H5Pclose(plist_id);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ ret = H5Dclose(dset1);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /*=============== Create an attribute for the file ================*/
+ attr = H5Acreate2(fid1, NULLATTR, H5T_NATIVE_INT, sid1, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Acreate2");
+
+ /* Write "nothing" to the attribute */
+ ret = H5Awrite(attr, H5T_NATIVE_INT, wdata);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ ret = H5Fflush(fid1, H5F_SCOPE_GLOBAL);
+ CHECK(ret, FAIL, "H5Fflush");
+
+ /* Try reading from the attribute (make certain our buffer is unmodified) */
+ ret = H5Aread(attr, H5T_NATIVE_INT, rdata);
+ CHECK(ret, FAIL, "H5Aread");
+
+ /* Check results */
+ for (i = 0; i < SPACE1_DIM2; i++) {
+ for (j = 0; j < SPACE1_DIM3; j++) {
+ if (rdata[i][j] != 7) {
+ H5_FAILED();
+ HDprintf("element [%d][%d] is %d but should have been 7\n", i, j, rdata[i][j]);
+ }
+ }
+ }
+
+ /* Write "nothing" to the attribute (with type conversion :-) */
+ ret = H5Awrite(attr, H5T_NATIVE_SHORT, wdata_short);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ ret = H5Fflush(fid1, H5F_SCOPE_GLOBAL);
+ CHECK(ret, FAIL, "H5Fflush");
+
+ /* Try reading from the attribute (with type conversion :-) (make certain our buffer is unmodified) */
+ ret = H5Aread(attr, H5T_NATIVE_SHORT, rdata_short);
+ CHECK(ret, FAIL, "H5Aread");
+
+ /* Check results */
+ for (i = 0; i < SPACE1_DIM2; i++) {
+ for (j = 0; j < SPACE1_DIM3; j++) {
+ if (rdata_short[i][j] != 7) {
+ H5_FAILED();
+ HDprintf("element [%d][%d] is %d but should have been 7\n", i, j, rdata_short[i][j]);
+ }
+ }
+ }
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /*===============================================================
+ * Extend the dimension to make it a normal dataspace (3x15x13).
+ * Verify that data can be written to and read from the chunked
+ * dataset now.
+ *===============================================================
+ */
+ dims1[0] = SPACE1_DIM1;
+ ret = H5Sset_extent_simple(sid_chunk, SPACE1_RANK, dims1, max_dims);
+ CHECK(ret, FAIL, "H5Sset_extent_simple");
+
+ nelem = H5Sget_simple_extent_npoints(sid_chunk);
+ CHECK(nelem, FAIL, "H5Sget_simple_extent_npoints");
+ VERIFY(nelem, SPACE1_DIM1 * SPACE1_DIM2 * SPACE1_DIM3, "H5Sget_simple_extent_npoints");
+
+ rank = H5Sget_simple_extent_ndims(sid_chunk);
+ CHECK(rank, FAIL, "H5Sget_simple_extent_ndims");
+ VERIFY(rank, SPACE1_RANK, "H5Sget_simple_extent_ndims");
+
+ rank = H5Sget_simple_extent_dims(sid_chunk, tdims, NULL);
+ CHECK(rank, FAIL, "H5Sget_simple_extent_dims");
+ VERIFY(HDmemcmp(tdims, dims1, SPACE1_RANK * sizeof(hsize_t)), 0, "H5Sget_simple_extent_dims");
+
+ /* Set it to chunked dataset */
+ plist_id = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(plist_id, FAIL, "H5Pcreate");
+
+ ret = H5Pset_chunk(plist_id, SPACE1_RANK, chunk_dims);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+
+ ret = H5Pset_alloc_time(plist_id, alloc_time);
+ CHECK(ret, FAIL, "H5Pset_alloc_time");
+
+ dset1 =
+ H5Dcreate2(fid1, BASICDATASET4, H5T_NATIVE_INT, sid_chunk, H5P_DEFAULT, plist_id, H5P_DEFAULT);
+ CHECK(dset1, FAIL, "H5Dcreate2");
+
+ ret = H5Dwrite(dset1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata_real);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ ret = H5Fflush(fid1, H5F_SCOPE_GLOBAL);
+ CHECK(ret, FAIL, "H5Fflush");
+
+ ret = H5Dread(dset1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata_real);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check results */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ for (j = 0; j < SPACE1_DIM2; j++) {
+ for (k = 0; k < SPACE1_DIM3; k++) {
+ if (rdata_real[i][j][k] != wdata_real[i][j][k]) {
+ H5_FAILED();
+ HDprintf("element [%d][%d][%d] is %d but should have been %d\n", i, j, k,
+ rdata_real[i][j][k], wdata_real[i][j][k]);
+ }
+ }
+ }
+ }
+
+ ret = H5Pclose(plist_id);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ ret = H5Dclose(dset1);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Change the dimensions to make them zero size again (0x0x0). Verify that
+ * no element is in the dataspace. */
+ dims1[0] = dims1[1] = dims1[2] = 0;
+ ret = H5Sset_extent_simple(sid_chunk, SPACE1_RANK, dims1, NULL);
+ CHECK(ret, FAIL, "H5Sset_extent_simple");
+
+ /* Check that the dataspace actually has 0 elements */
+ nelem = H5Sget_simple_extent_npoints(sid_chunk);
+ VERIFY(nelem, 0, "H5Sget_simple_extent_npoints");
+
+ /* Check that the dataspace was created with an "all" selection */
+ sel_type = H5Sget_select_type(sid_chunk);
+ VERIFY(sel_type, H5S_SEL_ALL, "H5Sget_select_type");
+
+ /* Check that the dataspace has 0 elements selected */
+ nelem = H5Sget_select_npoints(sid_chunk);
+ VERIFY(nelem, 0, "H5Sget_select_npoints");
+
+ /* Change to "none" selection */
+ ret = H5Sselect_none(sid_chunk);
+ CHECK(ret, FAIL, "H5Sselect_none");
+
+ /* Check that the dataspace has 0 elements selected */
+ nelem = H5Sget_select_npoints(sid_chunk);
+ VERIFY(nelem, 0, "H5Sget_select_npoints");
+
+ ret = H5Sclose(sid_chunk);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /*============================================
+ * Reopen the file to check the dataspace
+ *============================================
+ */
+ fid1 = H5Fopen(ZEROFILE, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Reopen the chunked dataset */
+ dset1 = H5Dopen2(fid1, BASICDATASET1, H5P_DEFAULT);
+ CHECK(dset1, FAIL, "H5Dopen2");
+
+ /* Get the space of the dataset and query it */
+ sid1 = H5Dget_space(dset1);
+ CHECK(sid1, FAIL, "H5Dget_space");
+
+ /* Verify the class type of dataspace */
+ stype = H5Sget_simple_extent_type(sid1);
+ VERIFY(stype, H5S_SIMPLE, "H5Sget_simple_extent_type");
+
+ /* Verify there is zero element in the dataspace */
+ nelem = H5Sget_simple_extent_npoints(sid1);
+ VERIFY(nelem, 0, "H5Sget_simple_extent_npoints");
+
+ /* Verify the dimension sizes are correct */
+ rank = H5Sget_simple_extent_dims(sid1, tdims, NULL);
+ CHECK(rank, FAIL, "H5Sget_simple_extent_dims");
+ VERIFY(tdims[0], 0, "H5Sget_simple_extent_dims");
+ VERIFY(tdims[1], SPACE1_DIM2, "H5Sget_simple_extent_dims");
+ VERIFY(tdims[2], SPACE1_DIM3, "H5Sget_simple_extent_dims");
+
+ /* Try reading from the dataset (make certain our buffer is unmodified) */
+ ret = H5Dread(dset1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check results */
+ for (i = 0; i < SPACE1_DIM2; i++) {
+ for (j = 0; j < SPACE1_DIM3; j++) {
+ if (rdata[i][j] != 7) {
+ H5_FAILED();
+ HDprintf("element [%d][%d] is %d but should have been 7\n", i, j, rdata[i][j]);
+ }
+ }
+ }
+
+ /* Close the dataset and its dataspace */
+ ret = H5Dclose(dset1);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Open the attribute for the file */
+ attr = H5Aopen(fid1, NULLATTR, H5P_DEFAULT);
+ CHECK(attr, FAIL, "H5Aopen");
+
+ /* Get the space of the dataset */
+ attr_sid = H5Aget_space(attr);
+ CHECK(attr_sid, FAIL, "H5Aget_space");
+
+ /* Verify the class type of dataspace */
+ stype = H5Sget_simple_extent_type(attr_sid);
+ VERIFY(stype, H5S_SIMPLE, "H5Sget_simple_extent_type");
+
+ /* Verify there is zero element in the dataspace */
+ nelem = H5Sget_simple_extent_npoints(attr_sid);
+ VERIFY(nelem, 0, "H5Sget_simple_extent_npoints");
+
+ /* Try reading from the attribute (make certain our buffer is unmodified) */
+ ret = H5Aread(attr, H5T_NATIVE_SHORT, rdata_short);
+ CHECK(ret, FAIL, "H5Aread");
+
+ /* Check results */
+ for (i = 0; i < SPACE1_DIM2; i++) {
+ for (j = 0; j < SPACE1_DIM3; j++) {
+ if (rdata_short[i][j] != 7) {
+ H5_FAILED();
+ HDprintf("element [%d][%d] is %d but should have been 7\n", i, j, rdata_short[i][j]);
+ }
+ }
+ }
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close the dataspace */
+ ret = H5Sclose(attr_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+ } /* end for */
+} /* test_h5s_zero_dim() */
+
+/****************************************************************
+**
+** test_h5s_encode(): Test H5S (dataspace) encoding and decoding.
+**
+** Note: See "RFC: H5Sencode/H5Sdecode Format Change".
+**
+****************************************************************/
+static void
+test_h5s_encode(H5F_libver_t low, H5F_libver_t high)
+{
+ hid_t sid1, sid2, sid3; /* Dataspace ID */
+ hid_t decoded_sid1, decoded_sid2, decoded_sid3;
+ int rank; /* Logical rank of dataspace */
+ hid_t fapl = -1; /* File access property list ID */
+ hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
+ size_t sbuf_size = 0, null_size = 0, scalar_size = 0;
+ unsigned char *sbuf = NULL, *null_sbuf = NULL, *scalar_buf = NULL;
+ hsize_t tdims[4]; /* Dimension array to test with */
+ hssize_t n; /* Number of dataspace elements */
+ hsize_t start[] = {0, 0, 0};
+ hsize_t stride[] = {2, 5, 3};
+ hsize_t count[] = {2, 2, 2};
+ hsize_t block[] = {1, 3, 1};
+ H5S_sel_type sel_type;
+ H5S_class_t space_type;
+ hssize_t nblocks;
+ hid_t ret_id; /* Generic hid_t return value */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Dataspace Encoding and Decoding\n"));
+
+ /*-------------------------------------------------------------------------
+ * Test encoding and decoding of simple dataspace and hyperslab selection.
+ *-------------------------------------------------------------------------
+ */
+
+ /* Create the file access property list */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+
+ /* Set low/high bounds in the fapl */
+ ret = H5Pset_libver_bounds(fapl, low, high);
+ CHECK(ret, FAIL, "H5Pset_libver_bounds");
+
+ /* Create the dataspace */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Set the hyperslab selection */
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Encode simple dataspace in a buffer with the fapl setting */
+ ret = H5Sencode2(sid1, NULL, &sbuf_size, fapl);
+ CHECK(ret, FAIL, "H5Sencode2");
+
+ if (sbuf_size > 0) {
+ sbuf = (unsigned char *)HDcalloc((size_t)1, sbuf_size);
+ CHECK_PTR(sbuf, "HDcalloc");
+ }
+
+ /* Try decoding bogus buffer */
+ H5E_BEGIN_TRY
+ {
+ ret_id = H5Sdecode(sbuf);
+ }
+ H5E_END_TRY;
+ VERIFY(ret_id, FAIL, "H5Sdecode");
+
+ /* Encode the simple dataspace in a buffer with the fapl setting */
+ ret = H5Sencode2(sid1, sbuf, &sbuf_size, fapl);
+ CHECK(ret, FAIL, "H5Sencode");
+
+ /* Decode from the dataspace buffer and return an object handle */
+ decoded_sid1 = H5Sdecode(sbuf);
+ CHECK(decoded_sid1, FAIL, "H5Sdecode");
+
+ /* Verify the decoded dataspace */
+ n = H5Sget_simple_extent_npoints(decoded_sid1);
+ CHECK(n, FAIL, "H5Sget_simple_extent_npoints");
+ VERIFY(n, SPACE1_DIM1 * SPACE1_DIM2 * SPACE1_DIM3, "H5Sget_simple_extent_npoints");
+
+ /* Retrieve and verify the dataspace rank */
+ rank = H5Sget_simple_extent_ndims(decoded_sid1);
+ CHECK(rank, FAIL, "H5Sget_simple_extent_ndims");
+ VERIFY(rank, SPACE1_RANK, "H5Sget_simple_extent_ndims");
+
+ /* Retrieve and verify the dataspace dimensions */
+ rank = H5Sget_simple_extent_dims(decoded_sid1, tdims, NULL);
+ CHECK(rank, FAIL, "H5Sget_simple_extent_dims");
+ VERIFY(HDmemcmp(tdims, dims1, SPACE1_RANK * sizeof(hsize_t)), 0, "H5Sget_simple_extent_dims");
+
+ /* Verify the type of dataspace selection */
+ sel_type = H5Sget_select_type(decoded_sid1);
+ VERIFY(sel_type, H5S_SEL_HYPERSLABS, "H5Sget_select_type");
+
+ /* Verify the number of hyperslab blocks */
+ nblocks = H5Sget_select_hyper_nblocks(decoded_sid1);
+ VERIFY(nblocks, 2 * 2 * 2, "H5Sget_select_hyper_nblocks");
+
+ /* Close the dataspaces */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(decoded_sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /*-------------------------------------------------------------------------
+ * Test encoding and decoding of null dataspace.
+ *-------------------------------------------------------------------------
+ */
+ sid2 = H5Screate(H5S_NULL);
+ CHECK(sid2, FAIL, "H5Screate");
+
+ /* Encode null dataspace in a buffer */
+ ret = H5Sencode2(sid2, NULL, &null_size, fapl);
+ CHECK(ret, FAIL, "H5Sencode");
+
+ if (null_size > 0) {
+ null_sbuf = (unsigned char *)HDcalloc((size_t)1, null_size);
+ CHECK_PTR(null_sbuf, "HDcalloc");
+ }
+
+ /* Encode the null dataspace in the buffer */
+ ret = H5Sencode2(sid2, null_sbuf, &null_size, fapl);
+ CHECK(ret, FAIL, "H5Sencode2");
+
+ /* Decode from the dataspace buffer and return an object handle */
+ decoded_sid2 = H5Sdecode(null_sbuf);
+ CHECK(decoded_sid2, FAIL, "H5Sdecode");
+
+ /* Verify the decoded dataspace type */
+ space_type = H5Sget_simple_extent_type(decoded_sid2);
+ VERIFY(space_type, H5S_NULL, "H5Sget_simple_extent_type");
+
+ /* Close the dataspaces */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(decoded_sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /*-------------------------------------------------------------------------
+ * Test encoding and decoding of scalar dataspace.
+ *-------------------------------------------------------------------------
+ */
+ /* Create scalar dataspace */
+ sid3 = H5Screate(H5S_SCALAR);
+ CHECK(sid3, FAIL, "H5Screate_simple");
+
+ /* Encode scalar dataspace in a buffer */
+ ret = H5Sencode2(sid3, NULL, &scalar_size, fapl);
+ CHECK(ret, FAIL, "H5Sencode");
+
+ if (scalar_size > 0) {
+ scalar_buf = (unsigned char *)HDcalloc((size_t)1, scalar_size);
+ CHECK_PTR(scalar_buf, "HDcalloc");
+ }
+
+ /* Encode the scalar dataspace in the buffer */
+ ret = H5Sencode2(sid3, scalar_buf, &scalar_size, fapl);
+ CHECK(ret, FAIL, "H5Sencode2");
+
+ /* Decode from the dataspace buffer and return an object handle */
+ decoded_sid3 = H5Sdecode(scalar_buf);
+ CHECK(decoded_sid3, FAIL, "H5Sdecode");
+
+ /* Verify extent type */
+ space_type = H5Sget_simple_extent_type(decoded_sid3);
+ VERIFY(space_type, H5S_SCALAR, "H5Sget_simple_extent_type");
+
+ /* Verify decoded dataspace */
+ n = H5Sget_simple_extent_npoints(decoded_sid3);
+ CHECK(n, FAIL, "H5Sget_simple_extent_npoints");
+ VERIFY(n, 1, "H5Sget_simple_extent_npoints");
+
+ /* Retrieve and verify the dataspace rank */
+ rank = H5Sget_simple_extent_ndims(decoded_sid3);
+ CHECK(rank, FAIL, "H5Sget_simple_extent_ndims");
+ VERIFY(rank, 0, "H5Sget_simple_extent_ndims");
+
+ /* Close the dataspaces */
+ ret = H5Sclose(sid3);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(decoded_sid3);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close the file access property list */
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Release resources */
+ if (sbuf)
+ HDfree(sbuf);
+ if (null_sbuf)
+ HDfree(null_sbuf);
+ if (scalar_buf)
+ HDfree(scalar_buf);
+} /* test_h5s_encode() */
+
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+
+/****************************************************************
+**
+** test_h5s_encode(): Test H5S (dataspace) encoding and decoding.
+**
+****************************************************************/
+static void
+test_h5s_encode1(void)
+{
+ hid_t sid1, sid2, sid3; /* Dataspace ID */
+ hid_t decoded_sid1, decoded_sid2, decoded_sid3;
+ int rank; /* Logical rank of dataspace */
+ hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
+ size_t sbuf_size = 0, null_size = 0, scalar_size = 0;
+ unsigned char *sbuf = NULL, *null_sbuf = NULL, *scalar_buf = NULL;
+ hsize_t tdims[4]; /* Dimension array to test with */
+ hssize_t n; /* Number of dataspace elements */
+ hsize_t start[] = {0, 0, 0};
+ hsize_t stride[] = {2, 5, 3};
+ hsize_t count[] = {2, 2, 2};
+ hsize_t block[] = {1, 3, 1};
+ H5S_sel_type sel_type;
+ H5S_class_t space_type;
+ hssize_t nblocks;
+ hid_t ret_id; /* Generic hid_t return value */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Dataspace Encoding (H5Sencode1) and Decoding\n"));
+
+ /*-------------------------------------------------------------------------
+ * Test encoding and decoding of simple dataspace and hyperslab selection.
+ *-------------------------------------------------------------------------
+ */
+ /* Create the dataspace */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Set the hyperslab selection */
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Encode simple dataspace in a buffer with the fapl setting */
+ ret = H5Sencode1(sid1, NULL, &sbuf_size);
+ CHECK(ret, FAIL, "H5Sencode2");
+
+ if (sbuf_size > 0) {
+ sbuf = (unsigned char *)HDcalloc((size_t)1, sbuf_size);
+ CHECK_PTR(sbuf, "HDcalloc");
+ }
+
+ /* Try decoding bogus buffer */
+ H5E_BEGIN_TRY
+ {
+ ret_id = H5Sdecode(sbuf);
+ }
+ H5E_END_TRY;
+ VERIFY(ret_id, FAIL, "H5Sdecode");
+
+ /* Encode the simple dataspace in a buffer */
+ ret = H5Sencode1(sid1, sbuf, &sbuf_size);
+ CHECK(ret, FAIL, "H5Sencode");
+
+ /* Decode from the dataspace buffer and return an object handle */
+ decoded_sid1 = H5Sdecode(sbuf);
+ CHECK(decoded_sid1, FAIL, "H5Sdecode");
+
+ /* Verify the decoded dataspace */
+ n = H5Sget_simple_extent_npoints(decoded_sid1);
+ CHECK(n, FAIL, "H5Sget_simple_extent_npoints");
+ VERIFY(n, SPACE1_DIM1 * SPACE1_DIM2 * SPACE1_DIM3, "H5Sget_simple_extent_npoints");
+
+ /* Retrieve and verify the dataspace rank */
+ rank = H5Sget_simple_extent_ndims(decoded_sid1);
+ CHECK(rank, FAIL, "H5Sget_simple_extent_ndims");
+ VERIFY(rank, SPACE1_RANK, "H5Sget_simple_extent_ndims");
+
+ /* Retrieve and verify the dataspace dimensions */
+ rank = H5Sget_simple_extent_dims(decoded_sid1, tdims, NULL);
+ CHECK(rank, FAIL, "H5Sget_simple_extent_dims");
+ VERIFY(HDmemcmp(tdims, dims1, SPACE1_RANK * sizeof(hsize_t)), 0, "H5Sget_simple_extent_dims");
+
+ /* Verify the type of dataspace selection */
+ sel_type = H5Sget_select_type(decoded_sid1);
+ VERIFY(sel_type, H5S_SEL_HYPERSLABS, "H5Sget_select_type");
+
+ /* Verify the number of hyperslab blocks */
+ nblocks = H5Sget_select_hyper_nblocks(decoded_sid1);
+ VERIFY(nblocks, 2 * 2 * 2, "H5Sget_select_hyper_nblocks");
+
+ /* Close the dataspaces */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(decoded_sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /*-------------------------------------------------------------------------
+ * Test encoding and decoding of null dataspace.
+ *-------------------------------------------------------------------------
+ */
+ sid2 = H5Screate(H5S_NULL);
+ CHECK(sid2, FAIL, "H5Screate");
+
+ /* Encode null dataspace in a buffer */
+ ret = H5Sencode1(sid2, NULL, &null_size);
+ CHECK(ret, FAIL, "H5Sencode");
+
+ if (null_size > 0) {
+ null_sbuf = (unsigned char *)HDcalloc((size_t)1, null_size);
+ CHECK_PTR(null_sbuf, "HDcalloc");
+ }
+
+ /* Encode the null dataspace in the buffer */
+ ret = H5Sencode1(sid2, null_sbuf, &null_size);
+ CHECK(ret, FAIL, "H5Sencode2");
+
+ /* Decode from the dataspace buffer and return an object handle */
+ decoded_sid2 = H5Sdecode(null_sbuf);
+ CHECK(decoded_sid2, FAIL, "H5Sdecode");
+
+ /* Verify the decoded dataspace type */
+ space_type = H5Sget_simple_extent_type(decoded_sid2);
+ VERIFY(space_type, H5S_NULL, "H5Sget_simple_extent_type");
+
+ /* Close the dataspaces */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(decoded_sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /*-------------------------------------------------------------------------
+ * Test encoding and decoding of scalar dataspace.
+ *-------------------------------------------------------------------------
+ */
+ /* Create scalar dataspace */
+ sid3 = H5Screate(H5S_SCALAR);
+ CHECK(sid3, FAIL, "H5Screate");
+
+ /* Encode scalar dataspace in a buffer */
+ ret = H5Sencode1(sid3, NULL, &scalar_size);
+ CHECK(ret, FAIL, "H5Sencode");
+
+ if (scalar_size > 0) {
+ scalar_buf = (unsigned char *)HDcalloc((size_t)1, scalar_size);
+ CHECK_PTR(scalar_buf, "HDcalloc");
+ }
+
+ /* Encode the scalar dataspace in the buffer */
+ ret = H5Sencode1(sid3, scalar_buf, &scalar_size);
+ CHECK(ret, FAIL, "H5Sencode2");
+
+ /* Decode from the dataspace buffer and return an object handle */
+ decoded_sid3 = H5Sdecode(scalar_buf);
+ CHECK(decoded_sid3, FAIL, "H5Sdecode");
+
+ /* Verify extent type */
+ space_type = H5Sget_simple_extent_type(decoded_sid3);
+ VERIFY(space_type, H5S_SCALAR, "H5Sget_simple_extent_type");
+
+ /* Verify decoded dataspace */
+ n = H5Sget_simple_extent_npoints(decoded_sid3);
+ CHECK(n, FAIL, "H5Sget_simple_extent_npoints");
+ VERIFY(n, 1, "H5Sget_simple_extent_npoints");
+
+ /* Retrieve and verify the dataspace rank */
+ rank = H5Sget_simple_extent_ndims(decoded_sid3);
+ CHECK(rank, FAIL, "H5Sget_simple_extent_ndims");
+ VERIFY(rank, 0, "H5Sget_simple_extent_ndims");
+
+ /* Close the dataspaces */
+ ret = H5Sclose(sid3);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(decoded_sid3);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Release resources */
+ if (sbuf)
+ HDfree(sbuf);
+ if (null_sbuf)
+ HDfree(null_sbuf);
+ if (scalar_buf)
+ HDfree(scalar_buf);
+} /* test_h5s_encode1() */
+
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
+
+/****************************************************************
+**
+** test_h5s_check_encoding():
+** This is the helper routine to verify that H5Sencode2()
+** works as specified in the RFC for the library format setting
+** in the file access property list.
+** See "RFC: H5Sencode/H5Sdeocde Format Change".
+**
+** This routine is used by:
+** test_h5s_encode_regular_hyper()
+** test_h5s_encode_irregular_hyper()
+** test_h5s_encode_points()
+**
+****************************************************************/
+static herr_t
+test_h5s_check_encoding(hid_t in_fapl, hid_t in_sid, uint32_t expected_version, uint8_t expected_enc_size,
+ hbool_t expected_to_fail)
+{
+ char *buf = NULL; /* Pointer to the encoded buffer */
+ size_t buf_size; /* Size of the encoded buffer */
+ hid_t d_sid = -1; /* The decoded dataspace ID */
+ htri_t check;
+ hsize_t in_low_bounds[1]; /* The low bounds for the selection for in_sid */
+ hsize_t in_high_bounds[1]; /* The high bounds for the selection for in_sid */
+ hsize_t d_low_bounds[1]; /* The low bounds for the selection for d_sid */
+ hsize_t d_high_bounds[1]; /* The high bounds for the selection for d_sid */
+ herr_t ret; /* Return value */
+
+ /* Get buffer size for encoding with the format setting in in_fapl */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Sencode2(in_sid, NULL, &buf_size, in_fapl);
+ }
+ H5E_END_TRY
+
+ if (expected_to_fail) {
+ VERIFY(ret, FAIL, "H5Screate_simple");
+ }
+ else {
+
+ CHECK(ret, FAIL, "H5Sencode2");
+
+ /* Allocate the buffer for encoding */
+ buf = (char *)HDmalloc(buf_size);
+ CHECK_PTR(buf, "HDmalloc");
+
+ /* Encode according to the setting in in_fapl */
+ ret = H5Sencode2(in_sid, buf, &buf_size, in_fapl);
+ CHECK(ret, FAIL, "H5Sencode2");
+
+ /* Decode the buffer */
+ d_sid = H5Sdecode(buf);
+ CHECK(d_sid, FAIL, "H5Sdecode");
+
+ /* Verify the number of selected points for in_sid and d_sid */
+ VERIFY(H5Sget_select_npoints(in_sid), H5Sget_select_npoints(d_sid), "Compare npoints");
+
+ /* Verify if the two dataspace selections (in_sid, d_sid) are the same shape */
+ check = H5Sselect_shape_same(in_sid, d_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* Compare the starting/ending coordinates of the bounding box for in_sid and d_sid */
+ ret = H5Sget_select_bounds(in_sid, in_low_bounds, in_high_bounds);
+ CHECK(ret, FAIL, "H5Sget_select_bounds");
+ ret = H5Sget_select_bounds(d_sid, d_low_bounds, d_high_bounds);
+ CHECK(ret, FAIL, "H5Sget_select_bounds");
+ VERIFY(in_low_bounds[0], d_low_bounds[0], "Compare selection low bounds");
+ VERIFY(in_high_bounds[0], d_high_bounds[0], "Compare selection high bounds");
+
+ /*
+ * See "RFC: H5Sencode/H5Sdeocde Format Change" for the verification of:
+ * H5S_SEL_POINTS:
+ * --the expected version for point selection info
+ * --the expected encoded size (version 2 points selection info)
+ * H5S_SEL_HYPERSLABS:
+ * --the expected version for hyperslab selection info
+ * --the expected encoded size (version 3 hyperslab selection info)
+ */
+
+ if (H5Sget_select_type(in_sid) == H5S_SEL_POINTS) {
+
+ /* Verify the version */
+ VERIFY((uint32_t)buf[35], expected_version, "Version for point selection");
+
+ /* Verify the encoded size for version 2 */
+ if (expected_version == 2)
+ VERIFY((uint8_t)buf[39], expected_enc_size, "Encoded size of point selection info");
+ }
+
+ if (H5Sget_select_type(in_sid) == H5S_SEL_HYPERSLABS) {
+
+ /* Verify the version */
+ VERIFY((uint32_t)buf[35], expected_version, "Version for hyperslab selection info");
+
+ /* Verify the encoded size for version 3 */
+ if (expected_version == 3)
+ VERIFY((uint8_t)buf[40], expected_enc_size, "Encoded size of selection info");
+
+ } /* hyperslab selection */
+
+ ret = H5Sclose(d_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ if (buf)
+ HDfree(buf);
+ }
+
+ return (0);
+
+} /* test_h5s_check_encoding */
+
+/****************************************************************
+**
+** test_h5s_encode_regular_hyper():
+** This test verifies that H5Sencode2() works as specified in
+** the RFC for regular hyperslabs.
+** See "RFC: H5Sencode/H5Sdeocde Format Change".
+**
+****************************************************************/
+static void
+test_h5s_encode_regular_hyper(H5F_libver_t low, H5F_libver_t high)
+{
+ hid_t fapl = -1; /* File access property list ID */
+ hid_t sid = -1; /* Dataspace ID */
+ hsize_t numparticles = 8388608; /* Used to calculate dimension size */
+ unsigned num_dsets = 513; /* Used to calculate dimension size */
+ hsize_t total_particles = numparticles * num_dsets;
+ hsize_t vdsdims[1] = {total_particles}; /* Dimension size */
+ hsize_t start, stride, count, block; /* Selection info */
+ unsigned config; /* Testing configuration */
+ unsigned unlim; /* H5S_UNLIMITED setting or not */
+ herr_t ret; /* Generic return value */
+ uint32_t expected_version = 0; /* Expected version for selection info */
+ uint8_t expected_enc_size = 0; /* Expected encoded size for selection info */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Dataspace encoding of regular hyperslabs\n"));
+
+ /* Create the file access property list */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+
+ /* Set the low/high bounds in the fapl */
+ ret = H5Pset_libver_bounds(fapl, low, high);
+ CHECK(ret, FAIL, "H5Pset_libver_bounds");
+
+ /* Create the dataspace */
+ sid = H5Screate_simple(1, vdsdims, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Testing with each configuration */
+ for (config = CONFIG_16; config <= CONFIG_32; config++) {
+ hbool_t expected_to_fail = FALSE;
+
+ /* Testing with unlimited or not */
+ for (unlim = 0; unlim <= 1; unlim++) {
+ start = 0;
+ count = unlim ? H5S_UNLIMITED : 2;
+
+ if ((high <= H5F_LIBVER_V18) && (unlim || config == CONFIG_32))
+ expected_to_fail = TRUE;
+
+ if (low >= H5F_LIBVER_V112)
+ expected_version = 3;
+ else if (config == CONFIG_16 && !unlim)
+ expected_version = 1;
+ else
+ expected_version = 2;
+
+ /* test 1 */
+ switch (config) {
+ case CONFIG_16:
+ stride = POWER16 - 1;
+ block = 4;
+ expected_enc_size = (uint8_t)(expected_version == 3 ? 2 : 4);
+ break;
+ case CONFIG_32:
+ stride = POWER32 - 1;
+ block = 4;
+ expected_enc_size = (uint8_t)(expected_version == 3 ? 4 : 8);
+
+ break;
+ default:
+ HDassert(0);
+ break;
+ } /* end switch */
+
+ /* Set the hyperslab selection */
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, &start, &stride, &count, &block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Verify the version and encoded size expected for this configuration */
+ ret = test_h5s_check_encoding(fapl, sid, expected_version, expected_enc_size, expected_to_fail);
+ CHECK(ret, FAIL, "test_h5s_check_encoding");
+
+ /* test 2 */
+ switch (config) {
+ case CONFIG_16:
+ stride = POWER16 - 1;
+ block = POWER16 - 2;
+ expected_enc_size = (uint8_t)(expected_version == 3 ? 2 : 4);
+ break;
+ case CONFIG_32:
+ stride = POWER32 - 1;
+ block = POWER32 - 2;
+ expected_enc_size = (uint8_t)(expected_version == 3 ? 4 : 8);
+ break;
+ default:
+ HDassert(0);
+ break;
+ } /* end switch */
+
+ /* Set the hyperslab selection */
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, &start, &stride, &count, &block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Verify the version and encoded size for this configuration */
+ ret = test_h5s_check_encoding(fapl, sid, expected_version, expected_enc_size, expected_to_fail);
+ CHECK(ret, FAIL, "test_h5s_check_encoding");
+
+ /* test 3 */
+ switch (config) {
+ case CONFIG_16:
+ stride = POWER16 - 1;
+ block = POWER16 - 1;
+ expected_enc_size = 4;
+ break;
+ case CONFIG_32:
+ stride = POWER32 - 1;
+ block = POWER32 - 1;
+ expected_enc_size = 8;
+ break;
+ default:
+ HDassert(0);
+ break;
+ }
+
+ /* Set the hyperslab selection */
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, &start, &stride, &count, &block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Verify the version and encoded size expected for this configuration */
+ ret = test_h5s_check_encoding(fapl, sid, expected_version, expected_enc_size, expected_to_fail);
+ CHECK(ret, FAIL, "test_h5s_check_encoding");
+
+ /* test 4 */
+ switch (config) {
+ case CONFIG_16:
+ stride = POWER16;
+ block = POWER16 - 2;
+ expected_enc_size = 4;
+ break;
+ case CONFIG_32:
+ stride = POWER32;
+ block = POWER32 - 2;
+ expected_enc_size = 8;
+ break;
+ default:
+ HDassert(0);
+ break;
+ } /* end switch */
+
+ /* Set the hyperslab selection */
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, &start, &stride, &count, &block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Verify the version and encoded size expected for this configuration */
+ ret = test_h5s_check_encoding(fapl, sid, expected_version, expected_enc_size, expected_to_fail);
+ CHECK(ret, FAIL, "test_h5s_check_encoding");
+
+ /* test 5 */
+ switch (config) {
+ case CONFIG_16:
+ stride = POWER16;
+ block = 1;
+ expected_enc_size = 4;
+ break;
+ case CONFIG_32:
+ stride = POWER32;
+ block = 1;
+ expected_enc_size = 8;
+ break;
+ default:
+ HDassert(0);
+ break;
+ }
+
+ /* Set the hyperslab selection */
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, &start, &stride, &count, &block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Verify the version and encoded size expected for this configuration */
+ ret = test_h5s_check_encoding(fapl, sid, expected_version, expected_enc_size, expected_to_fail);
+ CHECK(ret, FAIL, "test_h5s_check_encoding");
+
+ } /* for unlim */
+ } /* for config */
+
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+} /* test_h5s_encode_regular_hyper() */
+
+/****************************************************************
+**
+** test_h5s_encode_irregular_hyper():
+** This test verifies that H5Sencode2() works as specified in
+** the RFC for irregular hyperslabs.
+** See "RFC: H5Sencode/H5Sdeocde Format Change".
+**
+****************************************************************/
+static void
+test_h5s_encode_irregular_hyper(H5F_libver_t low, H5F_libver_t high)
+{
+ hid_t fapl = -1; /* File access property list ID */
+ hid_t sid; /* Dataspace ID */
+ hsize_t numparticles = 8388608; /* Used to calculate dimension size */
+ unsigned num_dsets = 513; /* Used to calculate dimension size */
+ hsize_t total_particles = numparticles * num_dsets;
+ hsize_t vdsdims[1] = {total_particles}; /* Dimension size */
+ hsize_t start, stride, count, block; /* Selection info */
+ htri_t is_regular; /* Is this a regular hyperslab */
+ unsigned config; /* Testing configuration */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Dataspace encoding of irregular hyperslabs\n"));
+
+ /* Create the file access property list */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+
+ /* Set the low/high bounds in the fapl */
+ ret = H5Pset_libver_bounds(fapl, low, high);
+ CHECK(ret, FAIL, "H5Pset_libver_bounds");
+
+ /* Create the dataspace */
+ sid = H5Screate_simple(1, vdsdims, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Testing with each configuration */
+ for (config = CONFIG_8; config <= CONFIG_32; config++) {
+ hbool_t expected_to_fail = FALSE; /* Whether H5Sencode2 is expected to fail */
+ uint32_t expected_version = 0; /* Expected version for selection info */
+ uint32_t expected_enc_size = 0; /* Expected encoded size for selection info */
+
+ start = 0;
+ count = 2;
+ block = 4;
+
+ /* H5Sencode2 is expected to fail for library v110 and below
+ when the selection exceeds the 32 bits integer limit */
+ if (high <= H5F_LIBVER_V110 && config == CONFIG_32)
+ expected_to_fail = TRUE;
+
+ if (low >= H5F_LIBVER_V112 || config == CONFIG_32)
+ expected_version = 3;
+ else
+ expected_version = 1;
+
+ switch (config) {
+ case CONFIG_8:
+ stride = POWER8 - 2;
+ break;
+
+ case CONFIG_16:
+ stride = POWER16 - 2;
+ break;
+
+ case CONFIG_32:
+ stride = POWER32 - 2;
+ break;
+
+ default:
+ HDassert(0);
+ break;
+ }
+
+ /* Set the hyperslab selection */
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, &start, &stride, &count, &block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start = 8;
+ count = 5;
+ block = 2;
+
+ switch (config) {
+ case CONFIG_8:
+ stride = POWER8;
+ expected_enc_size = expected_version == 3 ? 2 : 4;
+ break;
+
+ case CONFIG_16:
+ stride = POWER16;
+ expected_enc_size = 4;
+ break;
+
+ case CONFIG_32:
+ stride = POWER32;
+ expected_enc_size = 8;
+ break;
+
+ default:
+ HDassert(0);
+ break;
+ }
+
+ /* Set the hyperslab selection */
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_OR, &start, &stride, &count, &block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Should be irregular hyperslab */
+ is_regular = H5Sis_regular_hyperslab(sid);
+ VERIFY(is_regular, FALSE, "H5Sis_regular_hyperslab");
+
+ /* Verify the version and encoded size expected for the configuration */
+ HDassert(expected_enc_size <= 255);
+ ret = test_h5s_check_encoding(fapl, sid, expected_version, (uint8_t)expected_enc_size,
+ expected_to_fail);
+ CHECK(ret, FAIL, "test_h5s_check_encoding");
+
+ } /* for config */
+
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+} /* test_h5s_encode_irregular_hyper() */
+
+/****************************************************************
+**
+** test_h5s_encode_points():
+** This test verifies that H5Sencode2() works as specified in
+** the RFC for point selection.
+** See "RFC: H5Sencode/H5Sdeocde Format Change".
+**
+****************************************************************/
+static void
+test_h5s_encode_points(H5F_libver_t low, H5F_libver_t high)
+{
+ hid_t fapl = -1; /* File access property list ID */
+ hid_t sid; /* Dataspace ID */
+ hsize_t numparticles = 8388608; /* Used to calculate dimension size */
+ unsigned num_dsets = 513; /* used to calculate dimension size */
+ hsize_t total_particles = numparticles * num_dsets;
+ hsize_t vdsdims[1] = {total_particles}; /* Dimension size */
+ hsize_t coord[4]; /* The point coordinates */
+ herr_t ret; /* Generic return value */
+ hbool_t expected_to_fail = FALSE; /* Expected to fail or not */
+ uint32_t expected_version = 0; /* Expected version for selection info */
+ uint8_t expected_enc_size = 0; /* Expected encoded size of selection info */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Dataspace encoding of points selection\n"));
+
+ /* Create the file access property list */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+
+ /* Set the low/high bounds in the fapl */
+ ret = H5Pset_libver_bounds(fapl, low, high);
+ CHECK(ret, FAIL, "H5Pset_libver_bounds");
+
+ /* Create the dataspace */
+ sid = H5Screate_simple(1, vdsdims, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* test 1 */
+ coord[0] = 5;
+ coord[1] = 15;
+ coord[2] = POWER16;
+ coord[3] = 19;
+ ret = H5Sselect_elements(sid, H5S_SELECT_SET, (size_t)4, coord);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ expected_to_fail = FALSE;
+ expected_enc_size = 4;
+ expected_version = 1;
+
+ if (low >= H5F_LIBVER_V112)
+ expected_version = 2;
+
+ /* Verify the version and encoded size expected for the configuration */
+ ret = test_h5s_check_encoding(fapl, sid, expected_version, expected_enc_size, expected_to_fail);
+ CHECK(ret, FAIL, "test_h5s_check_encoding");
+
+ /* test 2 */
+ coord[0] = 5;
+ coord[1] = 15;
+ coord[2] = POWER32 - 1;
+ coord[3] = 19;
+ ret = H5Sselect_elements(sid, H5S_SELECT_SET, (size_t)4, coord);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Expected result same as test 1 */
+ ret = test_h5s_check_encoding(fapl, sid, expected_version, expected_enc_size, expected_to_fail);
+ CHECK(ret, FAIL, "test_h5s_check_encoding");
+
+ /* test 3 */
+ if (high <= H5F_LIBVER_V110)
+ expected_to_fail = TRUE;
+
+ if (high >= H5F_LIBVER_V112) {
+ expected_version = 2;
+ expected_enc_size = 8;
+ }
+
+ coord[0] = 5;
+ coord[1] = 15;
+ coord[2] = POWER32 + 1;
+ coord[3] = 19;
+ ret = H5Sselect_elements(sid, H5S_SELECT_SET, (size_t)4, coord);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Verify the version and encoded size expected for the configuration */
+ ret = test_h5s_check_encoding(fapl, sid, expected_version, expected_enc_size, expected_to_fail);
+ CHECK(ret, FAIL, "test_h5s_check_encoding");
+
+ /* Close the dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+} /* test_h5s_encode_points() */
+
+/****************************************************************
+**
+** test_h5s_encode_length():
+** Test to verify HDFFV-10271 is fixed.
+** Verify that version 2 hyperslab encoding length is correct.
+**
+** See "RFC: H5Sencode/H5Sdecode Format Change" for the
+** description of the encoding format.
+**
+****************************************************************/
+static void
+test_h5s_encode_length(void)
+{
+ hid_t sid; /* Dataspace ID */
+ hid_t decoded_sid; /* Dataspace ID from H5Sdecode2 */
+ size_t sbuf_size = 0; /* Buffer size for H5Sencode2/1 */
+ unsigned char *sbuf = NULL; /* Buffer for H5Sencode2/1 */
+ hsize_t dims[1] = {500}; /* Dimension size */
+ hsize_t start, count, block, stride; /* Hyperslab selection specifications */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Version 2 Hyperslab Encoding Length is correct\n"));
+
+ /* Create dataspace */
+ sid = H5Screate_simple(1, dims, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Setting H5S_UNLIMITED in count will use version 2 for hyperslab encoding */
+ start = 0;
+ stride = 10;
+ block = 4;
+ count = H5S_UNLIMITED;
+
+ /* Set hyperslab selection */
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, &start, &stride, &count, &block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Encode simple dataspace in a buffer */
+ ret = H5Sencode2(sid, NULL, &sbuf_size, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Sencode");
+
+ /* Allocate the buffer */
+ if (sbuf_size > 0) {
+ sbuf = (unsigned char *)HDcalloc((size_t)1, sbuf_size);
+ CHECK_PTR(sbuf, "H5Sencode2");
+ }
+
+ /* Encode the dataspace */
+ ret = H5Sencode2(sid, sbuf, &sbuf_size, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Sencode");
+
+ /* Verify that length stored at this location in the buffer is correct */
+ VERIFY((uint32_t)sbuf[40], 36, "Length for encoding version 2");
+ VERIFY((uint32_t)sbuf[35], 2, "Hyperslab encoding version is 2");
+
+ /* Decode from the dataspace buffer and return an object handle */
+ decoded_sid = H5Sdecode(sbuf);
+ CHECK(decoded_sid, FAIL, "H5Sdecode");
+
+ /* Verify that the original and the decoded dataspace are equal */
+ VERIFY(H5Sget_select_npoints(sid), H5Sget_select_npoints(decoded_sid), "Compare npoints");
+
+ /* Close the decoded dataspace */
+ ret = H5Sclose(decoded_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Free the buffer */
+ if (sbuf)
+ HDfree(sbuf);
+
+ /* Close the original dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+} /* test_h5s_encode_length() */
+
+/****************************************************************
+**
+** test_h5s_scalar_write(): Test scalar H5S (dataspace) writing code.
+**
+****************************************************************/
+static void
+test_h5s_scalar_write(void)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ int rank; /* Logical rank of dataspace */
+ hsize_t tdims[4]; /* Dimension array to test with */
+ hssize_t n; /* Number of dataspace elements */
+ H5S_class_t ext_type; /* Extent type */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Scalar Dataspace Manipulation during Writing\n"));
+
+ /* Create file */
+ fid1 = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Verify a non-zero rank fails with a NULL dimension. */
+ H5E_BEGIN_TRY
+ {
+ sid1 = H5Screate_simple(SPACE1_RANK, NULL, NULL);
+ }
+ H5E_END_TRY
+ VERIFY(sid1, FAIL, "H5Screate_simple");
+
+ /* Create scalar dataspace */
+ sid1 = H5Screate_simple(SPACE3_RANK, NULL, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Retrieve the number of elements in the dataspace selection */
+ n = H5Sget_simple_extent_npoints(sid1);
+ CHECK(n, FAIL, "H5Sget_simple_extent_npoints");
+ VERIFY(n, 1, "H5Sget_simple_extent_npoints");
+
+ /* Get the dataspace rank */
+ rank = H5Sget_simple_extent_ndims(sid1);
+ CHECK(rank, FAIL, "H5Sget_simple_extent_ndims");
+ VERIFY(rank, SPACE3_RANK, "H5Sget_simple_extent_ndims");
+
+ /* Get the dataspace dimension sizes */
+ rank = H5Sget_simple_extent_dims(sid1, tdims, NULL);
+ VERIFY(rank, 0, "H5Sget_simple_extent_dims");
+
+ /* Verify extent type */
+ ext_type = H5Sget_simple_extent_type(sid1);
+ VERIFY(ext_type, H5S_SCALAR, "H5Sget_simple_extent_type");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, "Dataset1", H5T_NATIVE_UINT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write to the dataset */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &space3_data);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close scalar dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_h5s_scalar_write() */
+
+/****************************************************************
+**
+** test_h5s_scalar_read(): Test scalar H5S (dataspace) reading code.
+**
+****************************************************************/
+static void
+test_h5s_scalar_read(void)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ int rank; /* Logical rank of dataspace */
+ hsize_t tdims[4]; /* Dimension array to test with */
+ hssize_t n; /* Number of dataspace elements */
+ unsigned rdata; /* Scalar data read in */
+ herr_t ret; /* Generic return value */
+ H5S_class_t ext_type; /* Extent type */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Scalar Dataspace Manipulation during Reading\n"));
+
+ /* Create file */
+ fid1 = H5Fopen(DATAFILE, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Create a dataset */
+ dataset = H5Dopen2(fid1, "Dataset1", H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ sid1 = H5Dget_space(dataset);
+ CHECK(sid1, FAIL, "H5Dget_space");
+
+ n = H5Sget_simple_extent_npoints(sid1);
+ CHECK(n, FAIL, "H5Sget_simple_extent_npoints");
+ VERIFY(n, 1, "H5Sget_simple_extent_npoints");
+
+ rank = H5Sget_simple_extent_ndims(sid1);
+ CHECK(rank, FAIL, "H5Sget_simple_extent_ndims");
+ VERIFY(rank, SPACE3_RANK, "H5Sget_simple_extent_ndims");
+
+ rank = H5Sget_simple_extent_dims(sid1, tdims, NULL);
+ VERIFY(rank, 0, "H5Sget_simple_extent_dims");
+
+ /* Verify extent type */
+ ext_type = H5Sget_simple_extent_type(sid1);
+ VERIFY(ext_type, H5S_SCALAR, "H5Sget_simple_extent_type");
+
+ ret = H5Dread(dataset, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &rdata);
+ CHECK(ret, FAIL, "H5Dread");
+ VERIFY(rdata, space3_data, "H5Dread");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close scalar dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_h5s_scalar_read() */
+
+/****************************************************************
+**
+** test_h5s_compound_scalar_write(): Test scalar H5S (dataspace) writing for
+** compound datatypes.
+**
+****************************************************************/
+static void
+test_h5s_compound_scalar_write(void)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t tid1; /* Attribute datatype ID */
+ hid_t sid1; /* Dataspace ID */
+ int rank; /* Logical rank of dataspace */
+ hsize_t tdims[4]; /* Dimension array to test with */
+ hssize_t n; /* Number of dataspace elements */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Scalar Dataspace Manipulation for Writing Compound Datatypes\n"));
+
+ /* Create file */
+ fid1 = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create the compound datatype. */
+ tid1 = H5Tcreate(H5T_COMPOUND, sizeof(struct space4_struct));
+ CHECK(tid1, FAIL, "H5Tcreate");
+ space4_field1_off = HOFFSET(struct space4_struct, c1);
+ ret = H5Tinsert(tid1, SPACE4_FIELDNAME1, space4_field1_off, H5T_NATIVE_SCHAR);
+ CHECK(ret, FAIL, "H5Tinsert");
+ space4_field2_off = HOFFSET(struct space4_struct, u);
+ ret = H5Tinsert(tid1, SPACE4_FIELDNAME2, space4_field2_off, H5T_NATIVE_UINT);
+ CHECK(ret, FAIL, "H5Tinsert");
+ space4_field3_off = HOFFSET(struct space4_struct, f);
+ ret = H5Tinsert(tid1, SPACE4_FIELDNAME3, space4_field3_off, H5T_NATIVE_FLOAT);
+ CHECK(ret, FAIL, "H5Tinsert");
+ space4_field4_off = HOFFSET(struct space4_struct, c2);
+ ret = H5Tinsert(tid1, SPACE4_FIELDNAME4, space4_field4_off, H5T_NATIVE_SCHAR);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Create scalar dataspace */
+ sid1 = H5Screate_simple(SPACE3_RANK, NULL, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ n = H5Sget_simple_extent_npoints(sid1);
+ CHECK(n, FAIL, "H5Sget_simple_extent_npoints");
+ VERIFY(n, 1, "H5Sget_simple_extent_npoints");
+
+ rank = H5Sget_simple_extent_ndims(sid1);
+ CHECK(rank, FAIL, "H5Sget_simple_extent_ndims");
+ VERIFY(rank, SPACE3_RANK, "H5Sget_simple_extent_ndims");
+
+ rank = H5Sget_simple_extent_dims(sid1, tdims, NULL);
+ VERIFY(rank, 0, "H5Sget_simple_extent_dims");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, "Dataset1", tid1, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ ret = H5Dwrite(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, &space4_data);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close compound datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close scalar dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_h5s_compound_scalar_write() */
+
+/****************************************************************
+**
+** test_h5s_compound_scalar_read(): Test scalar H5S (dataspace) reading for
+** compound datatypes.
+**
+****************************************************************/
+static void
+test_h5s_compound_scalar_read(void)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t type; /* Datatype */
+ int rank; /* Logical rank of dataspace */
+ hsize_t tdims[4]; /* Dimension array to test with */
+ hssize_t n; /* Number of dataspace elements */
+ struct space4_struct rdata; /* Scalar data read in */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Scalar Dataspace Manipulation for Reading Compound Datatypes\n"));
+
+ /* Create file */
+ fid1 = H5Fopen(DATAFILE, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Create a dataset */
+ dataset = H5Dopen2(fid1, "Dataset1", H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ sid1 = H5Dget_space(dataset);
+ CHECK(sid1, FAIL, "H5Dget_space");
+
+ n = H5Sget_simple_extent_npoints(sid1);
+ CHECK(n, FAIL, "H5Sget_simple_extent_npoints");
+ VERIFY(n, 1, "H5Sget_simple_extent_npoints");
+
+ rank = H5Sget_simple_extent_ndims(sid1);
+ CHECK(rank, FAIL, "H5Sget_simple_extent_ndims");
+ VERIFY(rank, SPACE3_RANK, "H5Sget_simple_extent_ndims");
+
+ rank = H5Sget_simple_extent_dims(sid1, tdims, NULL);
+ VERIFY(rank, 0, "H5Sget_simple_extent_dims");
+
+ type = H5Dget_type(dataset);
+ CHECK(type, FAIL, "H5Dget_type");
+
+ ret = H5Dread(dataset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, &rdata);
+ CHECK(ret, FAIL, "H5Dread");
+ if (HDmemcmp(&space4_data, &rdata, sizeof(struct space4_struct)) != 0) {
+ HDprintf("scalar data different: space4_data.c1=%c, read_data4.c1=%c\n", space4_data.c1, rdata.c1);
+ HDprintf("scalar data different: space4_data.u=%u, read_data4.u=%u\n", space4_data.u, rdata.u);
+ HDprintf("scalar data different: space4_data.f=%f, read_data4.f=%f\n", (double)space4_data.f,
+ (double)rdata.f);
+ TestErrPrintf("scalar data different: space4_data.c1=%c, read_data4.c1=%c\n", space4_data.c1,
+ rdata.c2);
+ } /* end if */
+
+ /* Close datatype */
+ ret = H5Tclose(type);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close scalar dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_h5s_compound_scalar_read() */
+
+/* Data array sizes for chunk test */
+#define CHUNK_DATA_NX 50000
+#define CHUNK_DATA_NY 3
+
+/****************************************************************
+**
+** test_h5s_chunk(): Exercise chunked I/O, testing when data conversion
+** is necessary and the entire chunk read in doesn't fit into the
+** conversion buffer
+**
+****************************************************************/
+static void
+test_h5s_chunk(void)
+{
+ herr_t status;
+ hid_t fileID, dsetID;
+ hid_t plist_id;
+ hid_t space_id;
+ hsize_t dims[2];
+ hsize_t csize[2];
+ double **chunk_data_dbl = NULL;
+ double *chunk_data_dbl_data = NULL;
+ float **chunk_data_flt = NULL;
+ float *chunk_data_flt_data = NULL;
+ int i, j;
+
+ /* Allocate memory */
+ chunk_data_dbl_data = (double *)HDcalloc(CHUNK_DATA_NX * CHUNK_DATA_NY, sizeof(double));
+ CHECK_PTR(chunk_data_dbl_data, "HDcalloc");
+ chunk_data_dbl = (double **)HDcalloc(CHUNK_DATA_NX, sizeof(chunk_data_dbl_data));
+ CHECK_PTR(chunk_data_dbl, "HDcalloc");
+ for (i = 0; i < CHUNK_DATA_NX; i++)
+ chunk_data_dbl[i] = chunk_data_dbl_data + (i * CHUNK_DATA_NY);
+
+ chunk_data_flt_data = (float *)HDcalloc(CHUNK_DATA_NX * CHUNK_DATA_NY, sizeof(float));
+ CHECK_PTR(chunk_data_flt_data, "HDcalloc");
+ chunk_data_flt = (float **)HDcalloc(CHUNK_DATA_NX, sizeof(chunk_data_flt_data));
+ CHECK_PTR(chunk_data_flt, "HDcalloc");
+ for (i = 0; i < CHUNK_DATA_NX; i++)
+ chunk_data_flt[i] = chunk_data_flt_data + (i * CHUNK_DATA_NY);
+
+ fileID = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fileID, FAIL, "H5Fcreate");
+
+ plist_id = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(plist_id, FAIL, "H5Pcreate");
+
+ csize[0] = CHUNK_DATA_NX;
+ csize[1] = CHUNK_DATA_NY;
+ status = H5Pset_chunk(plist_id, 2, csize);
+ CHECK(status, FAIL, "H5Pset_chunk");
+
+ /* Create the dataspace */
+ dims[0] = CHUNK_DATA_NX;
+ dims[1] = CHUNK_DATA_NY;
+ space_id = H5Screate_simple(2, dims, NULL);
+ CHECK(space_id, FAIL, "H5Screate_simple");
+
+ dsetID = H5Dcreate2(fileID, "coords", H5T_NATIVE_FLOAT, space_id, H5P_DEFAULT, plist_id, H5P_DEFAULT);
+ CHECK(dsetID, FAIL, "H5Dcreate2");
+
+ /* Initialize float array */
+ for (i = 0; i < CHUNK_DATA_NX; i++)
+ for (j = 0; j < CHUNK_DATA_NY; j++)
+ chunk_data_flt[i][j] = (float)(i + 1) * 2.5F - (float)j * 100.3F;
+
+ status = H5Dwrite(dsetID, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT, chunk_data_flt_data);
+ CHECK(status, FAIL, "H5Dwrite");
+
+ status = H5Pclose(plist_id);
+ CHECK(status, FAIL, "H5Pclose");
+ status = H5Sclose(space_id);
+ CHECK(status, FAIL, "H5Sclose");
+ status = H5Dclose(dsetID);
+ CHECK(status, FAIL, "H5Dclose");
+ status = H5Fclose(fileID);
+ CHECK(status, FAIL, "H5Fclose");
+
+ /* Reset/initialize the data arrays to read in */
+ HDmemset(chunk_data_dbl_data, 0, sizeof(double) * CHUNK_DATA_NX * CHUNK_DATA_NY);
+ HDmemset(chunk_data_flt_data, 0, sizeof(float) * CHUNK_DATA_NX * CHUNK_DATA_NY);
+
+ fileID = H5Fopen(DATAFILE, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fileID, FAIL, "H5Fopen");
+ dsetID = H5Dopen2(fileID, "coords", H5P_DEFAULT);
+ CHECK(dsetID, FAIL, "H5Dopen2");
+
+ status = H5Dread(dsetID, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, chunk_data_dbl_data);
+ CHECK(status, FAIL, "H5Dread");
+ status = H5Dread(dsetID, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT, chunk_data_flt_data);
+ CHECK(status, FAIL, "H5Dread");
+
+ status = H5Dclose(dsetID);
+ CHECK(status, FAIL, "H5Dclose");
+ status = H5Fclose(fileID);
+ CHECK(status, FAIL, "H5Fclose");
+
+ for (i = 0; i < CHUNK_DATA_NX; i++) {
+ for (j = 0; j < CHUNK_DATA_NY; j++) {
+ /* Check if the two values are within 0.001% range. */
+ if (!H5_DBL_REL_EQUAL(chunk_data_dbl[i][j], (double)chunk_data_flt[i][j], 0.00001))
+ TestErrPrintf("%u: chunk_data_dbl[%d][%d]=%e, chunk_data_flt[%d][%d]=%e\n",
+ (unsigned)__LINE__, i, j, chunk_data_dbl[i][j], i, j,
+ (double)chunk_data_flt[i][j]);
+ } /* end for */
+ } /* end for */
+
+ HDfree(chunk_data_dbl);
+ HDfree(chunk_data_dbl_data);
+ HDfree(chunk_data_flt);
+ HDfree(chunk_data_flt_data);
+} /* test_h5s_chunk() */
+
+/****************************************************************
+**
+** test_h5s_extent_equal(): Exercise extent comparison code
+**
+****************************************************************/
+static void
+test_h5s_extent_equal(void)
+{
+ hid_t null_space; /* Null dataspace */
+ hid_t scalar_space; /* Scalar dataspace */
+ hid_t d1_space1, d1_space2, d1_space3, d1_space4; /* 1-D dataspaces */
+ hid_t d2_space1, d2_space2, d2_space3, d2_space4; /* 2-D dataspaces */
+ hid_t d3_space1, d3_space2, d3_space3, d3_space4; /* 3-D dataspaces */
+ hsize_t d1_dims1[1] = {10}, /* 1-D dimensions */
+ d1_dims2[1] = {20}, d1_dims3[1] = {H5S_UNLIMITED};
+ hsize_t d2_dims1[2] = {10, 10}, /* 2-D dimensions */
+ d2_dims2[2] = {20, 20}, d2_dims3[2] = {H5S_UNLIMITED, H5S_UNLIMITED};
+ hsize_t d3_dims1[3] = {10, 10, 10}, /* 3-D dimensions */
+ d3_dims2[3] = {20, 20, 20}, d3_dims3[3] = {H5S_UNLIMITED, H5S_UNLIMITED, H5S_UNLIMITED};
+ htri_t ext_equal; /* Whether two dataspace extents are equal */
+ herr_t ret; /* Generic error return */
+
+ /* Create dataspaces */
+ null_space = H5Screate(H5S_NULL);
+ CHECK(null_space, FAIL, "H5Screate");
+
+ scalar_space = H5Screate(H5S_SCALAR);
+ CHECK(scalar_space, FAIL, "H5Screate");
+
+ d1_space1 = H5Screate_simple(1, d1_dims1, NULL);
+ CHECK(d1_space1, FAIL, "H5Screate");
+ d1_space2 = H5Screate_simple(1, d1_dims2, NULL);
+ CHECK(d1_space2, FAIL, "H5Screate");
+ d1_space3 = H5Screate_simple(1, d1_dims1, d1_dims2);
+ CHECK(d1_space3, FAIL, "H5Screate");
+ d1_space4 = H5Screate_simple(1, d1_dims1, d1_dims3);
+ CHECK(d1_space4, FAIL, "H5Screate");
+
+ d2_space1 = H5Screate_simple(2, d2_dims1, NULL);
+ CHECK(d2_space1, FAIL, "H5Screate");
+ d2_space2 = H5Screate_simple(2, d2_dims2, NULL);
+ CHECK(d2_space2, FAIL, "H5Screate");
+ d2_space3 = H5Screate_simple(2, d2_dims1, d2_dims2);
+ CHECK(d2_space3, FAIL, "H5Screate");
+ d2_space4 = H5Screate_simple(2, d2_dims1, d2_dims3);
+ CHECK(d2_space4, FAIL, "H5Screate");
+
+ d3_space1 = H5Screate_simple(3, d3_dims1, NULL);
+ CHECK(d3_space1, FAIL, "H5Screate");
+ d3_space2 = H5Screate_simple(3, d3_dims2, NULL);
+ CHECK(d3_space2, FAIL, "H5Screate");
+ d3_space3 = H5Screate_simple(3, d3_dims1, d3_dims2);
+ CHECK(d3_space3, FAIL, "H5Screate");
+ d3_space4 = H5Screate_simple(3, d3_dims1, d3_dims3);
+ CHECK(d3_space4, FAIL, "H5Screate");
+
+ /* Compare all dataspace combinations */
+
+ /* Compare null dataspace against all others, including itself */
+ ext_equal = H5Sextent_equal(null_space, null_space);
+ VERIFY(ext_equal, TRUE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(null_space, scalar_space);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(null_space, d1_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(null_space, d1_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(null_space, d1_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(null_space, d1_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(null_space, d2_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(null_space, d2_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(null_space, d2_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(null_space, d2_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(null_space, d3_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(null_space, d3_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(null_space, d3_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(null_space, d3_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+
+ /* Compare scalar dataspace against all others, including itself */
+ ext_equal = H5Sextent_equal(scalar_space, null_space);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(scalar_space, scalar_space);
+ VERIFY(ext_equal, TRUE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(scalar_space, d1_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(scalar_space, d1_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(scalar_space, d1_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(scalar_space, d1_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(scalar_space, d2_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(scalar_space, d2_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(scalar_space, d2_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(scalar_space, d2_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(scalar_space, d3_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(scalar_space, d3_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(scalar_space, d3_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(scalar_space, d3_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+
+ /* Compare small 1-D dataspace w/no max. dims against all others, including itself */
+ ext_equal = H5Sextent_equal(d1_space1, null_space);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space1, scalar_space);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space1, d1_space1);
+ VERIFY(ext_equal, TRUE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space1, d1_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space1, d1_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space1, d1_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space1, d2_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space1, d2_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space1, d2_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space1, d2_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space1, d3_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space1, d3_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space1, d3_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space1, d3_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+
+ /* Compare larger 1-D dataspace w/no max. dims against all others, including itself */
+ ext_equal = H5Sextent_equal(d1_space2, null_space);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space2, scalar_space);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space2, d1_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space2, d1_space2);
+ VERIFY(ext_equal, TRUE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space2, d1_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space2, d1_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space2, d2_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space2, d2_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space2, d2_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space2, d2_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space2, d3_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space2, d3_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space2, d3_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space2, d3_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+
+ /* Compare small 1-D dataspace w/fixed max. dims against all others, including itself */
+ ext_equal = H5Sextent_equal(d1_space3, null_space);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space3, scalar_space);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space3, d1_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space3, d1_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space3, d1_space3);
+ VERIFY(ext_equal, TRUE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space3, d1_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space3, d2_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space3, d2_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space3, d2_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space3, d2_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space3, d3_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space3, d3_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space3, d3_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space3, d3_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+
+ /* Compare small 1-D dataspace w/unlimited max. dims against all others, including itself */
+ ext_equal = H5Sextent_equal(d1_space4, null_space);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space4, scalar_space);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space4, d1_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space4, d1_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space4, d1_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space4, d1_space4);
+ VERIFY(ext_equal, TRUE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space4, d2_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space4, d2_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space4, d2_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space4, d2_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space4, d3_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space4, d3_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space4, d3_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d1_space4, d3_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+
+ /* Compare small 2-D dataspace w/no max. dims against all others, including itself */
+ ext_equal = H5Sextent_equal(d2_space1, null_space);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space1, scalar_space);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space1, d1_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space1, d1_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space1, d1_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space1, d1_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space1, d2_space1);
+ VERIFY(ext_equal, TRUE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space1, d2_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space1, d2_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space1, d2_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space1, d3_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space1, d3_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space1, d3_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space1, d3_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+
+ /* Compare larger 2-D dataspace w/no max. dims against all others, including itself */
+ ext_equal = H5Sextent_equal(d2_space2, null_space);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space2, scalar_space);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space2, d1_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space2, d1_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space2, d1_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space2, d1_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space2, d2_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space2, d2_space2);
+ VERIFY(ext_equal, TRUE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space2, d2_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space2, d2_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space2, d3_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space2, d3_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space2, d3_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space2, d3_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+
+ /* Compare small 2-D dataspace w/fixed max. dims against all others, including itself */
+ ext_equal = H5Sextent_equal(d2_space3, null_space);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space3, scalar_space);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space3, d1_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space3, d1_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space3, d1_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space3, d1_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space3, d2_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space3, d2_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space3, d2_space3);
+ VERIFY(ext_equal, TRUE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space3, d2_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space3, d3_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space3, d3_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space3, d3_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space3, d3_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+
+ /* Compare small 2-D dataspace w/unlimited max. dims against all others, including itself */
+ ext_equal = H5Sextent_equal(d2_space4, null_space);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space4, scalar_space);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space4, d1_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space4, d1_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space4, d1_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space4, d1_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space4, d2_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space4, d2_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space4, d2_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space4, d2_space4);
+ VERIFY(ext_equal, TRUE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space4, d3_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space4, d3_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space4, d3_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d2_space4, d3_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+
+ /* Compare small 3-D dataspace w/no max. dims against all others, including itself */
+ ext_equal = H5Sextent_equal(d3_space1, null_space);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space1, scalar_space);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space1, d1_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space1, d1_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space1, d1_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space1, d1_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space1, d2_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space1, d2_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space1, d2_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space1, d2_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space1, d3_space1);
+ VERIFY(ext_equal, TRUE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space1, d3_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space1, d3_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space1, d3_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+
+ /* Compare larger 2-D dataspace w/no max. dims against all others, including itself */
+ ext_equal = H5Sextent_equal(d3_space2, null_space);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space2, scalar_space);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space2, d1_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space2, d1_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space2, d1_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space2, d1_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space2, d2_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space2, d2_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space2, d2_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space2, d2_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space2, d3_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space2, d3_space2);
+ VERIFY(ext_equal, TRUE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space2, d3_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space2, d3_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+
+ /* Compare small 2-D dataspace w/fixed max. dims against all others, including itself */
+ ext_equal = H5Sextent_equal(d3_space3, null_space);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space3, scalar_space);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space3, d1_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space3, d1_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space3, d1_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space3, d1_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space3, d2_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space3, d2_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space3, d2_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space3, d2_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space3, d3_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space3, d3_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space3, d3_space3);
+ VERIFY(ext_equal, TRUE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space3, d3_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+
+ /* Compare small 2-D dataspace w/unlimited max. dims against all others, including itself */
+ ext_equal = H5Sextent_equal(d3_space4, null_space);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space4, scalar_space);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space4, d1_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space4, d1_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space4, d1_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space4, d1_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space4, d2_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space4, d2_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space4, d2_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space4, d2_space4);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space4, d3_space1);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space4, d3_space2);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space4, d3_space3);
+ VERIFY(ext_equal, FALSE, "H5Sextent_equal");
+ ext_equal = H5Sextent_equal(d3_space4, d3_space4);
+ VERIFY(ext_equal, TRUE, "H5Sextent_equal");
+
+ /* Close dataspaces */
+ ret = H5Sclose(null_space);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(scalar_space);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(d1_space1);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(d1_space2);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(d1_space3);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(d1_space4);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(d2_space1);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(d2_space2);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(d2_space3);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(d2_space4);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(d3_space1);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(d3_space2);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(d3_space3);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(d3_space4);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_h5s_extent_equal() */
+
+/****************************************************************
+**
+** test_h5s_extent_copy(): Exercise extent copy code
+**
+****************************************************************/
+static void
+test_h5s_extent_copy(void)
+{
+ hid_t spaces[14] = {-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}; /* Array of all dataspaces */
+ hid_t tmp_space = -1;
+ hsize_t d1_dims1[1] = {10}, /* 1-D dimensions */
+ d1_dims2[1] = {20}, d1_dims3[1] = {H5S_UNLIMITED};
+ hsize_t d2_dims1[2] = {10, 10}, /* 2-D dimensions */
+ d2_dims2[2] = {20, 20}, d2_dims3[2] = {H5S_UNLIMITED, H5S_UNLIMITED};
+ hsize_t d3_dims1[3] = {10, 10, 10}, /* 3-D dimensions */
+ d3_dims2[3] = {20, 20, 20}, d3_dims3[3] = {H5S_UNLIMITED, H5S_UNLIMITED, H5S_UNLIMITED};
+ hsize_t npoints[14]; /* Expected number of points in selection for each element in spaces */
+ hssize_t npoints_ret; /* Number of points returned by H5Sget_select_npoints() */
+ htri_t ext_equal; /* Whether two dataspace extents are equal */
+ const unsigned num_spaces = sizeof(spaces) / sizeof(spaces[0]);
+ unsigned i, j;
+ herr_t ret; /* Generic error return */
+
+ /* Create dataspaces */
+ spaces[0] = H5Screate(H5S_NULL);
+ CHECK(spaces[0], FAIL, "H5Screate");
+ npoints[0] = (hsize_t)0;
+
+ spaces[1] = H5Screate(H5S_SCALAR);
+ CHECK(spaces[1], FAIL, "H5Screate");
+ npoints[1] = (hsize_t)1;
+
+ spaces[2] = H5Screate_simple(1, d1_dims1, NULL);
+ CHECK(spaces[2], FAIL, "H5Screate");
+ npoints[2] = d1_dims1[0];
+ spaces[3] = H5Screate_simple(1, d1_dims2, NULL);
+ CHECK(spaces[3], FAIL, "H5Screate");
+ npoints[3] = d1_dims2[0];
+ spaces[4] = H5Screate_simple(1, d1_dims1, d1_dims2);
+ CHECK(spaces[4], FAIL, "H5Screate");
+ npoints[4] = d1_dims1[0];
+ spaces[5] = H5Screate_simple(1, d1_dims1, d1_dims3);
+ CHECK(spaces[5], FAIL, "H5Screate");
+ npoints[5] = d1_dims1[0];
+
+ spaces[6] = H5Screate_simple(2, d2_dims1, NULL);
+ CHECK(spaces[6], FAIL, "H5Screate");
+ npoints[6] = d2_dims1[0] * d2_dims1[1];
+ spaces[7] = H5Screate_simple(2, d2_dims2, NULL);
+ CHECK(spaces[7], FAIL, "H5Screate");
+ npoints[7] = d2_dims2[0] * d2_dims2[1];
+ spaces[8] = H5Screate_simple(2, d2_dims1, d2_dims2);
+ CHECK(spaces[8], FAIL, "H5Screate");
+ npoints[8] = d2_dims1[0] * d2_dims1[1];
+ spaces[9] = H5Screate_simple(2, d2_dims1, d2_dims3);
+ CHECK(spaces[9], FAIL, "H5Screate");
+ npoints[9] = d2_dims1[0] * d2_dims1[1];
+
+ spaces[10] = H5Screate_simple(3, d3_dims1, NULL);
+ CHECK(spaces[10], FAIL, "H5Screate");
+ npoints[10] = d3_dims1[0] * d3_dims1[1] * d3_dims1[2];
+ spaces[11] = H5Screate_simple(3, d3_dims2, NULL);
+ CHECK(spaces[11], FAIL, "H5Screate");
+ npoints[11] = d3_dims2[0] * d3_dims2[1] * d3_dims2[2];
+ spaces[12] = H5Screate_simple(3, d3_dims1, d3_dims2);
+ CHECK(spaces[12], FAIL, "H5Screate");
+ npoints[12] = d3_dims1[0] * d3_dims1[1] * d3_dims1[2];
+ spaces[13] = H5Screate_simple(3, d3_dims1, d3_dims3);
+ CHECK(spaces[13], FAIL, "H5Screate");
+ npoints[13] = d3_dims1[0] * d3_dims1[1] * d3_dims1[2];
+
+ tmp_space = H5Screate(H5S_NULL);
+ CHECK(tmp_space, FAIL, "H5Screate");
+
+ /* Copy between all dataspace combinations. Note there are a few
+ * duplicates. */
+ for (i = 0; i < num_spaces; i++)
+ for (j = i; j < num_spaces; j++) {
+ /* Copy from i to j, unless the inner loop just restarted, in which
+ * case i and j are the same, so the second call to H5Sextent_copy()
+ * will test copying from i/j to i/j */
+ ret = H5Sextent_copy(tmp_space, spaces[j]);
+ CHECK(ret, FAIL, "H5Sextent_copy");
+
+ /* Verify that the extents are equal */
+ ext_equal = H5Sextent_equal(tmp_space, spaces[j]);
+ VERIFY(ext_equal, TRUE, "H5Sextent_equal");
+
+ /* Verify that the correct number of elements is selected */
+ npoints_ret = H5Sget_select_npoints(tmp_space);
+ VERIFY((hsize_t)npoints_ret, npoints[j], "H5Sget_select_npoints");
+
+ /* Copy from j to i */
+ ret = H5Sextent_copy(tmp_space, spaces[i]);
+ CHECK(ret, FAIL, "H5Sextent_copy");
+
+ /* Verify that the extents are equal */
+ ext_equal = H5Sextent_equal(tmp_space, spaces[i]);
+ VERIFY(ext_equal, TRUE, "H5Sextent_equal");
+
+ /* Verify that the correct number of elements is selected */
+ npoints_ret = H5Sget_select_npoints(tmp_space);
+ VERIFY((hsize_t)npoints_ret, npoints[i], "H5Sget_select_npoints");
+ } /* end for */
+
+ /* Close dataspaces */
+ for (i = 0; i < num_spaces; i++) {
+ ret = H5Sclose(spaces[i]);
+ CHECK(ret, FAIL, "H5Sclose");
+ spaces[i] = -1;
+ } /* end for */
+
+ ret = H5Sclose(tmp_space);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_h5s_extent_copy() */
+
+/****************************************************************
+**
+** test_h5s_bug1(): Test Creating dataspace with H5Screate then
+* setting extent with H5Sextent_copy.
+**
+****************************************************************/
+static void
+test_h5s_bug1(void)
+{
+ hid_t space1; /* Dataspace to copy extent to */
+ hid_t space2; /* Scalar dataspace */
+ hsize_t dims[2] = {10, 10}; /* Dimensions */
+ hsize_t start[2] = {0, 0}; /* Hyperslab start */
+ htri_t select_valid; /* Whether the dataspace selection is valid */
+ herr_t ret; /* Generic error return */
+
+ /* Create dataspaces */
+ space1 = H5Screate(H5S_SIMPLE);
+ CHECK(space1, FAIL, "H5Screate");
+ space2 = H5Screate_simple(2, dims, NULL);
+ CHECK(space2, FAIL, "H5Screate");
+
+ /* Copy extent to space1 */
+ ret = H5Sextent_copy(space1, space2);
+ CHECK(ret, FAIL, "H5Sextent_copy");
+
+ /* Select hyperslab in space1 containing entire extent */
+ ret = H5Sselect_hyperslab(space1, H5S_SELECT_SET, start, NULL, dims, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Check that space1's selection is valid */
+ select_valid = H5Sselect_valid(space1);
+ CHECK(select_valid, FAIL, "H5Sselect_valid");
+ VERIFY(select_valid, TRUE, "H5Sselect_valid result");
+
+ /* Close dataspaces */
+ ret = H5Sclose(space1);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(space2);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_h5s_bug1() */
+
+/****************************************************************
+**
+** test_h5s_bug2(): Test combining hyperslabs in a way that used
+** to trip up H5S__hyper_update_diminfo()
+**
+****************************************************************/
+static void
+test_h5s_bug2(void)
+{
+ hid_t space; /* Dataspace to copy extent to */
+ hsize_t dims[2] = {1, 5}; /* Dimensions */
+ hsize_t start[2] = {0, 0}; /* Hyperslab start */
+ hsize_t count[2] = {1, 1}; /* Hyperslab start */
+ htri_t select_valid; /* Whether the dataspace selection is valid */
+ hssize_t elements_selected; /* Number of elements selected */
+ herr_t ret; /* Generic error return */
+
+ /* Create dataspace */
+ space = H5Screate_simple(2, dims, NULL);
+ CHECK(space, FAIL, "H5Screate");
+
+ /* Select hyperslab in space containing first element */
+ ret = H5Sselect_hyperslab(space, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Add hyperslab in space containing last element */
+ start[1] = 4;
+ ret = H5Sselect_hyperslab(space, H5S_SELECT_OR, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Add hyperslab in space containing the first 3 elements */
+ start[1] = 0;
+ count[1] = 3;
+ ret = H5Sselect_hyperslab(space, H5S_SELECT_OR, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Check that space's selection is valid */
+ select_valid = H5Sselect_valid(space);
+ CHECK(select_valid, FAIL, "H5Sselect_valid");
+ VERIFY(select_valid, TRUE, "H5Sselect_valid result");
+
+ /* Check that 4 elements are selected */
+ elements_selected = H5Sget_select_npoints(space);
+ CHECK(elements_selected, FAIL, "H5Sselect_valid");
+ VERIFY(elements_selected, 4, "H5Sselect_valid result");
+
+ /* Close dataspaces */
+ ret = H5Sclose(space);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_h5s_bug2() */
+
+/*-------------------------------------------------------------------------
+ * Function: test_versionbounds
+ *
+ * Purpose: Tests version bounds with dataspace.
+ *
+ * Description:
+ * This function creates a file with lower bounds then later
+ * reopens it with higher bounds to show that the dataspace
+ * version is upgraded appropriately.
+ *
+ * Return: Success: 0
+ * Failure: number of errors
+ *
+ *-------------------------------------------------------------------------
+ */
+#define VERBFNAME "tverbounds_dspace.h5"
+#define BASIC_DSET "Basic Dataset"
+#define LATEST_DSET "Latest Dataset"
+static void
+test_versionbounds(void)
+{
+ hid_t file = -1; /* File ID */
+ hid_t space = -1; /* Dataspace ID */
+ hid_t dset = -1; /* Dataset ID */
+ hid_t fapl = -1; /* File access property list ID */
+ hid_t dset_space = -1; /* Retrieved dataset's dataspace ID */
+ hsize_t dim[1]; /* Dataset dimensions */
+ H5F_libver_t low, high; /* File format bounds */
+#if 0
+ H5S_t *spacep = NULL; /* Pointer to internal dataspace */
+#endif
+ herr_t ret = 0; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Version Bounds\n"));
+
+ /* Create a file access property list */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+
+ /* Create dataspace */
+ dim[0] = 10;
+ space = H5Screate_simple(1, dim, NULL);
+ CHECK(space, FAIL, "H5Screate");
+#if 0
+ /* Its version should be H5O_SDSPACE_VERSION_1 */
+ spacep = (H5S_t *)H5I_object(space);
+ CHECK_PTR(spacep, "H5I_object");
+ VERIFY(spacep->extent.version, H5O_SDSPACE_VERSION_1, "basic dataspace version bound");
+#endif
+
+ /* Set high bound to V18 */
+ low = H5F_LIBVER_EARLIEST;
+ high = H5F_LIBVER_V18;
+ ret = H5Pset_libver_bounds(fapl, low, high);
+ CHECK(ret, FAIL, "H5Pset_libver_bounds");
+
+ /* Create the file */
+ file = H5Fcreate(VERBFNAME, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(file, FAIL, "H5Fcreate");
+
+ /* Create a basic dataset */
+ dset = H5Dcreate2(file, BASIC_DSET, H5T_NATIVE_INT, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ if (dset > 0) /* dataset created successfully */
+ {
+ /* Get the internal dataspace pointer */
+ dset_space = H5Dget_space(dset);
+ CHECK(dset_space, FAIL, "H5Dget_space");
+#if 0
+ spacep = (H5S_t *)H5I_object(dset_space);
+ CHECK_PTR(spacep, "H5I_object");
+
+ /* Dataspace version should remain as H5O_SDSPACE_VERSION_1 */
+ VERIFY(spacep->extent.version, H5O_SDSPACE_VERSION_1, "basic dataspace version bound");
+#endif
+ /* Close dataspace */
+ ret = H5Sclose(dset_space);
+ CHECK(ret, FAIL, "H5Sclose");
+ }
+
+ /* Close basic dataset and the file */
+ ret = H5Dclose(dset);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Set low and high bounds to latest to trigger the increment of the
+ dataspace version */
+ low = H5F_LIBVER_LATEST;
+ high = H5F_LIBVER_LATEST;
+ ret = H5Pset_libver_bounds(fapl, low, high);
+ CHECK(ret, FAIL, "H5Pset_libver_bounds");
+
+ /* Reopen the file with new version bounds, LATEST/LATEST */
+ file = H5Fopen(VERBFNAME, H5F_ACC_RDWR, fapl);
+
+ /* Create another dataset using the same dspace as the previous dataset */
+ dset = H5Dcreate2(file, LATEST_DSET, H5T_NATIVE_INT, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset, FAIL, "H5Dcreate2");
+
+ /* Dataset created successfully. Verify that dataspace version has been
+ upgraded per the low bound */
+
+ /* Get the internal dataspace pointer */
+ dset_space = H5Dget_space(dset);
+ CHECK(dset_space, FAIL, "H5Dget_space");
+#if 0
+ spacep = (H5S_t *)H5I_object(dset_space);
+ CHECK_PTR(spacep, "H5I_object");
+
+ /* Verify the dataspace version */
+ VERIFY(spacep->extent.version, H5O_sdspace_ver_bounds[low], "upgraded dataspace version");
+#endif
+ /* Close everything */
+ ret = H5Sclose(dset_space);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Dclose(dset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Sclose(space);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_versionbounds() */
+
+/****************************************************************
+**
+** test_h5s(): Main H5S (dataspace) testing routine.
+**
+****************************************************************/
+void
+test_h5s(void)
+{
+ H5F_libver_t low, high; /* Low and high bounds */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Dataspaces\n"));
+
+ test_h5s_basic(); /* Test basic H5S code */
+ test_h5s_null(); /* Test Null dataspace H5S code */
+ test_h5s_zero_dim(); /* Test dataspace with zero dimension size */
+#if 0
+ /* Loop through all the combinations of low/high version bounds */
+ for (low = H5F_LIBVER_EARLIEST; low < H5F_LIBVER_NBOUNDS; low++) {
+ for (high = H5F_LIBVER_EARLIEST; high < H5F_LIBVER_NBOUNDS; high++) {
+
+ /* Invalid combinations, just continue */
+ if (high == H5F_LIBVER_EARLIEST || high < low)
+ continue;
+#else
+ low = H5F_LIBVER_LATEST;
+ high = H5F_LIBVER_LATEST;
+#endif
+ test_h5s_encode(low, high); /* Test encoding and decoding */
+ test_h5s_encode_regular_hyper(low, high); /* Test encoding regular hyperslabs */
+ test_h5s_encode_irregular_hyper(low, high); /* Test encoding irregular hyperslabs */
+ test_h5s_encode_points(low, high); /* Test encoding points */
+#if 0
+ } /* end high bound */
+ } /* end low bound */
+#endif
+ test_h5s_encode_length(); /* Test version 2 hyperslab encoding length is correct */
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+ test_h5s_encode1(); /* Test operations with old API routine (H5Sencode1) */
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
+
+ test_h5s_scalar_write(); /* Test scalar H5S writing code */
+ test_h5s_scalar_read(); /* Test scalar H5S reading code */
+
+ test_h5s_compound_scalar_write(); /* Test compound datatype scalar H5S writing code */
+ test_h5s_compound_scalar_read(); /* Test compound datatype scalar H5S reading code */
+
+ /* This test was added later to exercise a bug in chunked I/O */
+ test_h5s_chunk(); /* Exercise bug fix for chunked I/O */
+
+ test_h5s_extent_equal(); /* Test extent comparison code */
+ test_h5s_extent_copy(); /* Test extent copy code */
+ test_h5s_bug1(); /* Test bug in offset initialization */
+ test_h5s_bug2(); /* Test bug found in H5S__hyper_update_diminfo() */
+ test_versionbounds(); /* Test version bounds with dataspace */
+} /* test_h5s() */
+
+/*-------------------------------------------------------------------------
+ * Function: cleanup_h5s
+ *
+ * Purpose: Cleanup temporary test files
+ *
+ * Return: none
+ *
+ * Programmer: Albert Cheng
+ * July 2, 1998
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+cleanup_h5s(void)
+{
+ H5Fdelete(DATAFILE, H5P_DEFAULT);
+ H5Fdelete(NULLFILE, H5P_DEFAULT);
+ H5Fdelete(BASICFILE, H5P_DEFAULT);
+ H5Fdelete(ZEROFILE, H5P_DEFAULT);
+ H5Fdelete(VERBFNAME, H5P_DEFAULT);
+}
diff --git a/test/API/tid.c b/test/API/tid.c
new file mode 100644
index 0000000..2dd8851
--- /dev/null
+++ b/test/API/tid.c
@@ -0,0 +1,1413 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/* Test user-created identifiers (hid_t's) and identifier types. */
+
+#include "testhdf5.h"
+
+#if 0
+/* Include H5Ipkg.h to calculate max number of groups */
+#define H5I_FRIEND /*suppress error about including H5Ipkg */
+#include "H5Ipkg.h"
+#endif
+
+/*
+ * Number of bits to use for ID Type in each ID. Increase if more types
+ * are needed (though this will decrease the number of available IDs per
+ * type). This is the only number that must be changed since all other bit
+ * field sizes and masks are calculated from TYPE_BITS.
+ */
+#define TYPE_BITS 7
+#define TYPE_MASK (((hid_t)1 << TYPE_BITS) - 1)
+
+#define H5I_MAX_NUM_TYPES TYPE_MASK
+
+static herr_t
+free_wrapper(void *p, void H5_ATTR_UNUSED **_ctx)
+{
+ HDfree(p);
+ return SUCCEED;
+}
+
+/* Test basic functionality of registering and deleting types and IDs */
+static int
+basic_id_test(void)
+{
+ H5I_type_t myType = H5I_BADID;
+ hid_t arrayID = H5I_INVALID_HID;
+ void *testObj = NULL;
+ void *testPtr = NULL;
+ char nameString[10];
+ hid_t testID;
+ ssize_t testSize = -1;
+ herr_t err;
+ int num_ref;
+ hsize_t num_members;
+
+ /* Try to register an ID with fictitious types */
+ H5E_BEGIN_TRY
+ arrayID = H5Iregister((H5I_type_t)420, testObj);
+ H5E_END_TRY
+
+ VERIFY(arrayID, H5I_INVALID_HID, "H5Iregister");
+ if (arrayID != H5I_INVALID_HID)
+ goto out;
+
+ H5E_BEGIN_TRY
+ arrayID = H5Iregister((H5I_type_t)-1, testObj);
+ H5E_END_TRY
+
+ VERIFY(arrayID, H5I_INVALID_HID, "H5Iregister");
+ if (arrayID != H5I_INVALID_HID)
+ goto out;
+
+ /* Try to access IDs with fictitious types */
+ H5E_BEGIN_TRY
+ testPtr = H5Iobject_verify((hid_t)100, (H5I_type_t)0);
+ H5E_END_TRY
+
+ CHECK_PTR_NULL(testPtr, "H5Iobject_verify");
+ if (testPtr != NULL)
+ goto out;
+
+ H5E_BEGIN_TRY
+ testPtr = H5Iobject_verify((hid_t)700, (H5I_type_t)700);
+ H5E_END_TRY
+
+ CHECK_PTR_NULL(testPtr, "H5Iobject_verify");
+ if (testPtr != NULL)
+ goto out;
+
+ /* Register a type */
+ myType = H5Iregister_type((size_t)64, 0, free_wrapper);
+
+ CHECK(myType, H5I_BADID, "H5Iregister_type");
+ if (myType == H5I_BADID)
+ goto out;
+
+ /* Register an ID and retrieve the object it points to.
+ * Once the ID has been registered, testObj will be freed when
+ * its ID type is destroyed.
+ */
+ testObj = HDmalloc(7 * sizeof(int));
+ arrayID = H5Iregister(myType, testObj);
+
+ CHECK(arrayID, H5I_INVALID_HID, "H5Iregister");
+ if (arrayID == H5I_INVALID_HID) {
+ HDfree(testObj);
+ goto out;
+ }
+
+ testPtr = (int *)H5Iobject_verify(arrayID, myType);
+
+ CHECK_PTR_EQ(testPtr, testObj, "H5Iobject_verify");
+ if (testPtr != testObj)
+ goto out;
+
+ /* Ensure that H5Iget_file_id and H5Iget_name() fail, since this
+ * is an hid_t for the wrong kind of object
+ */
+ H5E_BEGIN_TRY
+ testID = H5Iget_file_id(arrayID);
+ H5E_END_TRY
+
+ VERIFY(testID, H5I_INVALID_HID, "H5Iget_file_id");
+ if (testID != H5I_INVALID_HID)
+ goto out;
+
+ H5E_BEGIN_TRY
+ testSize = H5Iget_name(arrayID, nameString, (size_t)9);
+ H5E_END_TRY
+
+ VERIFY(testSize, -1, "H5Iget_name");
+ if (testSize != -1)
+ goto out;
+
+ /* Make sure H5Iremove_verify catches objects of the wrong type */
+ H5E_BEGIN_TRY
+ testPtr = (int *)H5Iremove_verify(arrayID, (H5I_type_t)0);
+ H5E_END_TRY
+
+ CHECK_PTR_NULL(testPtr, "H5Iremove_verify");
+ if (testPtr != NULL)
+ goto out;
+
+ H5E_BEGIN_TRY
+ testPtr = (int *)H5Iremove_verify(arrayID, (H5I_type_t)((int)myType - 1));
+ H5E_END_TRY
+
+ CHECK_PTR_NULL(testPtr, "H5Iremove_verify");
+ if (testPtr != NULL)
+ goto out;
+
+ /* Remove an ID and make sure we can't access it */
+ testPtr = (int *)H5Iremove_verify(arrayID, myType);
+
+ CHECK_PTR(testPtr, "H5Iremove_verify");
+ if (testPtr == NULL)
+ goto out;
+
+ H5E_BEGIN_TRY
+ testPtr = (int *)H5Iobject_verify(arrayID, myType);
+ H5E_END_TRY
+
+ CHECK_PTR_NULL(testPtr, "H5Iobject_verify");
+ if (testPtr != NULL)
+ goto out;
+
+ /* Delete the type and make sure we can't access objects within it */
+ arrayID = H5Iregister(myType, testObj);
+
+ err = H5Idestroy_type(myType);
+ VERIFY(err, 0, "H5Idestroy_type");
+ if (err != 0)
+ goto out;
+ VERIFY(H5Itype_exists(myType), 0, "H5Itype_exists");
+ if (H5Itype_exists(myType) != 0)
+ goto out;
+
+ H5E_BEGIN_TRY
+ VERIFY(H5Inmembers(myType, NULL), -1, "H5Inmembers");
+ if (H5Inmembers(myType, NULL) != -1)
+ goto out;
+ H5E_END_TRY
+
+ /* Register another type and another object in that type */
+ myType = H5Iregister_type((size_t)64, 0, free_wrapper);
+
+ CHECK(myType, H5I_BADID, "H5Iregister_type");
+ if (myType == H5I_BADID)
+ goto out;
+
+ /* The memory that testObj pointed to should already have been
+ * freed when the previous type was destroyed. Allocate new
+ * memory for it.
+ */
+ testObj = HDmalloc(7 * sizeof(int));
+ arrayID = H5Iregister(myType, testObj);
+
+ CHECK(arrayID, H5I_INVALID_HID, "H5Iregister");
+ if (arrayID == H5I_INVALID_HID) {
+ HDfree(testObj);
+ goto out;
+ }
+
+ err = H5Inmembers(myType, &num_members);
+ CHECK(err, -1, "H5Inmembers");
+ if (err < 0)
+ goto out;
+ VERIFY(num_members, 1, "H5Inmembers");
+ if (num_members != 1)
+ goto out;
+
+ /* Increment references to type and ensure that dec_type_ref
+ * doesn't destroy the type
+ */
+ num_ref = H5Iinc_type_ref(myType);
+ VERIFY(num_ref, 2, "H5Iinc_type_ref");
+ if (num_ref != 2)
+ goto out;
+ num_ref = H5Idec_type_ref(myType);
+ VERIFY(num_ref, 1, "H5Idec_type_ref");
+ if (num_ref != 1)
+ goto out;
+ err = H5Inmembers(myType, &num_members);
+ CHECK(err, -1, "H5Inmembers");
+ if (err < 0)
+ goto out;
+ VERIFY(num_members, 1, "H5Inmembers");
+ if (num_members != 1)
+ goto out;
+
+ /* This call to dec_type_ref should destroy the type */
+ num_ref = H5Idec_type_ref(myType);
+ VERIFY(num_ref, 0, "H5Idec_type_ref");
+ if (num_ref != 0)
+ goto out;
+ VERIFY(H5Itype_exists(myType), 0, "H5Itype_exists");
+ if (H5Itype_exists(myType) != 0)
+ goto out;
+
+ H5E_BEGIN_TRY
+ err = H5Inmembers(myType, &num_members);
+ if (err >= 0)
+ goto out;
+ H5E_END_TRY
+
+ return 0;
+
+out:
+ /* Clean up type if it has been allocated and free memory used
+ * by testObj
+ */
+ if (myType >= 0)
+ H5Idestroy_type(myType);
+
+ return -1;
+}
+
+/* A dummy search function for the next test */
+static int
+test_search_func(void H5_ATTR_UNUSED *ptr1, hid_t H5_ATTR_UNUSED id, void H5_ATTR_UNUSED *ptr2)
+{
+ return 0;
+}
+
+/* Ensure that public functions cannot access "predefined" ID types */
+static int
+id_predefined_test(void)
+{
+ void *testObj;
+ hid_t testID;
+ hid_t typeID = H5I_INVALID_HID;
+ void *testPtr;
+ herr_t testErr;
+
+ testObj = HDmalloc(sizeof(int));
+
+ /*
+ * Attempt to perform public functions on various library types
+ */
+
+ H5E_BEGIN_TRY
+ testID = H5Iregister(H5I_FILE, testObj);
+ H5E_END_TRY
+
+ VERIFY(testID, H5I_INVALID_HID, "H5Iregister");
+ if (testID != H5I_INVALID_HID)
+ goto out;
+
+ H5E_BEGIN_TRY
+ testPtr = H5Isearch(H5I_GENPROP_LST, test_search_func, testObj);
+ H5E_END_TRY
+
+ CHECK_PTR_NULL(testPtr, "H5Isearch");
+ if (testPtr != NULL)
+ goto out;
+
+ H5E_BEGIN_TRY
+ testErr = H5Inmembers(H5I_ERROR_STACK, NULL);
+ H5E_END_TRY
+
+ VERIFY(testErr, -1, "H5Inmembers");
+ if (testErr != -1)
+ goto out;
+
+ H5E_BEGIN_TRY
+ testErr = H5Iclear_type(H5I_FILE, 0);
+ H5E_END_TRY
+
+ VERIFY((testErr >= 0), 0, "H5Iclear_type");
+ if (testErr >= 0)
+ goto out;
+
+ H5E_BEGIN_TRY
+ testErr = H5Idestroy_type(H5I_DATASET);
+ H5E_END_TRY
+
+ VERIFY((testErr >= 0), 0, "H5Idestroy_type");
+ if (testErr >= 0)
+ goto out;
+
+ H5E_BEGIN_TRY
+ testErr = H5Itype_exists(H5I_GROUP);
+ H5E_END_TRY
+
+ VERIFY(testErr, -1, "H5Itype_exists");
+ if (testErr != -1)
+ goto out;
+
+ H5E_BEGIN_TRY
+ testErr = H5Itype_exists(H5I_ATTR);
+ H5E_END_TRY
+
+ VERIFY(testErr, -1, "H5Itype_exists");
+ if (testErr != -1)
+ goto out;
+
+ /*
+ * Create a datatype ID and try to perform illegal functions on it
+ */
+
+ typeID = H5Tcreate(H5T_OPAQUE, (size_t)42);
+ CHECK(typeID, H5I_INVALID_HID, "H5Tcreate");
+ if (typeID == H5I_INVALID_HID)
+ goto out;
+
+ H5E_BEGIN_TRY
+ testPtr = H5Iremove_verify(typeID, H5I_DATATYPE);
+ H5E_END_TRY
+
+ CHECK_PTR_NULL(testPtr, "H5Iremove_verify");
+ if (testPtr != NULL)
+ goto out;
+
+ H5E_BEGIN_TRY
+ testPtr = H5Iobject_verify(typeID, H5I_DATATYPE);
+ H5E_END_TRY
+
+ CHECK_PTR_NULL(testPtr, "H5Iobject_verify");
+ if (testPtr != NULL)
+ goto out;
+
+ H5Tclose(typeID);
+
+ /* testObj was never registered as an atom, so it will not be
+ * automatically freed. */
+ HDfree(testObj);
+ return 0;
+
+out:
+ if (typeID != H5I_INVALID_HID)
+ H5Tclose(typeID);
+ if (testObj != NULL)
+ HDfree(testObj);
+
+ return -1;
+}
+
+/* Test the H5Iis_valid function */
+static int
+test_is_valid(void)
+{
+ hid_t dtype; /* datatype id */
+#if 0
+ int64_t nmembs1; /* number of type memnbers */
+ int64_t nmembs2;
+#endif
+ htri_t tri_ret; /* htri_t return value */
+#if 0
+ herr_t ret; /* return value */
+#endif
+
+ /* Create a datatype id */
+ dtype = H5Tcopy(H5T_NATIVE_INT);
+ CHECK(dtype, FAIL, "H5Tcopy");
+ if (dtype < 0)
+ goto out;
+
+ /* Check that the ID is valid */
+ tri_ret = H5Iis_valid(dtype);
+ VERIFY(tri_ret, TRUE, "H5Iis_valid");
+ if (tri_ret != TRUE)
+ goto out;
+#if 0 /* Cannot call internal APIs and cannot call public H5Inmembers on library types */
+ /* Artificially manipulate the reference counts so app_count is 0, and dtype
+ * appears to be an internal id. This takes advantage of the fact that
+ * H5Ipkg is included.
+ */
+ ret = H5I_inc_ref(dtype, FALSE);
+ CHECK(ret, FAIL, "H5I_inc_ref");
+ if (ret < 0)
+ goto out;
+ ret = H5I_dec_app_ref(dtype);
+ CHECK(ret, FAIL, "H5I_dec_ref");
+ if (ret < 0)
+ goto out;
+
+ /* Check that dtype is invalid */
+ tri_ret = H5Iis_valid(dtype);
+ VERIFY(tri_ret, FALSE, "H5Iis_valid");
+ if (tri_ret != FALSE)
+ goto out;
+
+ /* Close dtype and verify that it has been closed */
+ nmembs1 = H5I_nmembers(H5I_DATATYPE);
+ CHECK(nmembs1, FAIL, "H5I_nmembers");
+ if (nmembs1 < 0)
+ goto out;
+ ret = H5I_dec_ref(dtype);
+ CHECK(ret, FAIL, "H5I_dec_ref");
+ if (ret < 0)
+ goto out;
+ nmembs2 = H5I_nmembers(H5I_DATATYPE);
+ VERIFY(nmembs2, nmembs1 - 1, "H5I_nmembers");
+ if (nmembs2 != nmembs1 - 1)
+ goto out;
+
+ /* Check that dtype is invalid */
+ tri_ret = H5Iis_valid(dtype);
+ VERIFY(tri_ret, FALSE, "H5Iis_valid");
+ if (tri_ret != FALSE)
+ goto out;
+#endif
+ /* Check that an id of -1 is invalid */
+ tri_ret = H5Iis_valid((hid_t)-1);
+ VERIFY(tri_ret, FALSE, "H4Iis_valid");
+ if (tri_ret != FALSE)
+ goto out;
+
+ return 0;
+
+out:
+ /* Don't attempt to close dtype as we don't know the exact state of the
+ * reference counts. Every state in this function will be automatically
+ * closed at library exit anyways, as internal count is never > 1.
+ */
+ return -1;
+}
+
+/* Test the H5Iget_type function */
+static int
+test_get_type(void)
+{
+ hid_t dtype; /* datatype id */
+ H5I_type_t type_ret; /* return value */
+
+ /* Create a datatype id */
+ dtype = H5Tcopy(H5T_NATIVE_INT);
+ CHECK(dtype, FAIL, "H5Tcopy");
+ if (dtype < 0)
+ goto out;
+
+ /* Check that the ID is correct */
+ type_ret = H5Iget_type(dtype);
+ VERIFY(type_ret, H5I_DATATYPE, "H5Iget_type");
+ if (type_ret == H5I_BADID)
+ goto out;
+
+ /* Check that the ID is correct */
+ type_ret = H5Iget_type((hid_t)H5T_STRING);
+ VERIFY(type_ret, H5I_BADID, "H5Iget_type");
+ if (type_ret != H5I_BADID)
+ goto out;
+
+ /* Check that the ID is correct */
+ type_ret = H5Iget_type((hid_t)-1);
+ VERIFY(type_ret, H5I_BADID, "H5Iget_type");
+ if (type_ret != H5I_BADID)
+ goto out;
+
+ H5Tclose(dtype);
+
+ return 0;
+
+out:
+ if (dtype != H5I_INVALID_HID)
+ H5Tclose(dtype);
+
+ return -1;
+}
+
+/* Test boundary cases with lots of types */
+
+/* Type IDs range from H5I_NTYPES to H5I_MAX_NUM_TYPES. The system will assign */
+/* IDs in sequential order until H5I_MAX_NUM_TYPES IDs have been given out, at which */
+/* point it will search for type IDs that were allocated but have since been */
+/* deleted. */
+/* This test will allocate IDs up to H5I_MAX_NUM_TYPES, ensure that IDs wrap around */
+/* to low values successfully, ensure that an error is thrown when all possible */
+/* type IDs are taken, then ensure that deleting types frees up their IDs. */
+/* Note that this test depends on the implementation of IDs, so may break */
+/* if the implementation changes. */
+/* Also note that if someone else registered a user-defined type and forgot to */
+/* destroy it, this test will mysteriously fail (because it will expect there to */
+/* be one more "free" type ID than there is). */
+/* H5I_NTYPES is defined in h5public.h, H5I_MAX_NUM_TYPES is defined in h5pkg.h */
+static int
+test_id_type_list(void)
+{
+ H5I_type_t startType; /* The first type ID we were assigned in this test */
+ H5I_type_t currentType;
+ H5I_type_t testType;
+ int i; /* Just a counter variable */
+
+ startType = H5Iregister_type((size_t)8, 0, free_wrapper);
+ CHECK(startType, H5I_BADID, "H5Iregister_type");
+ if (startType == H5I_BADID)
+ goto out;
+
+ /* Sanity check */
+ if ((int)startType >= H5I_MAX_NUM_TYPES || startType < H5I_NTYPES) {
+ /* Error condition, throw an error */
+ ERROR("H5Iregister_type");
+ goto out;
+ }
+ /* Create types up to H5I_MAX_NUM_TYPES */
+ for (i = startType + 1; i < H5I_MAX_NUM_TYPES; i++) {
+ currentType = H5Iregister_type((size_t)8, 0, free_wrapper);
+ CHECK(currentType, H5I_BADID, "H5Iregister_type");
+ if (currentType == H5I_BADID)
+ goto out;
+ }
+
+ /* Wrap around to low type ID numbers */
+ for (i = H5I_NTYPES; i < startType; i++) {
+ currentType = H5Iregister_type((size_t)8, 0, free_wrapper);
+ CHECK(currentType, H5I_BADID, "H5Iregister_type");
+ if (currentType == H5I_BADID)
+ goto out;
+ }
+
+ /* There should be no room at the inn for a new ID type*/
+ H5E_BEGIN_TRY
+ testType = H5Iregister_type((size_t)8, 0, free_wrapper);
+ H5E_END_TRY
+
+ VERIFY(testType, H5I_BADID, "H5Iregister_type");
+ if (testType != H5I_BADID)
+ goto out;
+
+ /* Now delete a type and try to insert again */
+ H5Idestroy_type(H5I_NTYPES);
+ testType = H5Iregister_type((size_t)8, 0, free_wrapper);
+
+ VERIFY(testType, H5I_NTYPES, "H5Iregister_type");
+ if (testType != H5I_NTYPES)
+ goto out;
+
+ /* Cleanup. Destroy all types. */
+ for (i = H5I_NTYPES; i < H5I_MAX_NUM_TYPES; i++)
+ H5Idestroy_type((H5I_type_t)i);
+
+ return 0;
+
+out:
+ /* Cleanup. For simplicity, just destroy all types and ignore errors. */
+ H5E_BEGIN_TRY
+ for (i = H5I_NTYPES; i < H5I_MAX_NUM_TYPES; i++)
+ H5Idestroy_type((H5I_type_t)i);
+ H5E_END_TRY
+ return -1;
+}
+
+/* Test removing ids in callback for H5Iclear_type */
+
+/* There was a rare bug where, if an id free callback being called by
+ * H5I_clear_type() removed another id in that type, a segfault could occur.
+ * This test tests for that error (and freeing ids "out of order" within
+ * H5Iclear_type() in general).
+ *
+ * NB: RCT = "remove clear type"
+ */
+
+/* Macro definitions */
+#define RCT_MAX_NOBJS 25 /* Maximum number of objects in the list */
+#define RCT_MIN_NOBJS 5
+#define RCT_NITER 50 /* Number of times we cycle through object creation and deletion */
+
+/* Structure to hold the master list of objects */
+typedef struct rct_obj_list_t {
+
+ /* Pointer to the objects */
+ struct rct_obj_t *objects;
+
+ /* The number of objects in the list */
+ long count;
+
+ /* The number of objects in the list that have not been freed */
+ long remaining;
+} rct_obj_list_t;
+
+/* Structure for an object */
+typedef struct rct_obj_t {
+ /* The ID for this object */
+ hid_t id;
+
+ /* The number of times this object has been freed */
+ int nfrees;
+
+ /* Whether we are currently freeing this object directly
+ * through H5Idec_ref().
+ */
+ hbool_t freeing;
+
+ /* Pointer to the master list of all objects */
+ rct_obj_list_t *list;
+} rct_obj_t;
+
+/* Free callback passed to H5Iclear_type()
+ *
+ * When invoked on a closing object, frees a random unfreed ID in the
+ * master list of objects.
+ */
+static herr_t
+rct_free_cb(void *_obj, void H5_ATTR_UNUSED **_ctx)
+{
+ rct_obj_t *obj = (rct_obj_t *)_obj;
+ long remove_nth;
+ long i;
+ herr_t ret;
+
+ /* Mark this object as freed */
+ obj->nfrees++;
+
+ /* Decrement the number of objects in the list that have not been freed */
+ obj->list->remaining--;
+
+ /* If this object isn't already being freed by a callback free call and
+ * the master object list still contains objects to free, pick another
+ * object and free it.
+ */
+ if (!obj->freeing && (obj->list->remaining > 0)) {
+
+ /* Pick a random object from the list. This is done by picking a
+ * random number between 0 and the # of remaining unfreed objects
+ * and then scanning through the list to find that nth unfreed
+ * object.
+ */
+ remove_nth = HDrandom() % obj->list->remaining;
+ for (i = 0; i < obj->list->count; i++)
+ if (obj->list->objects[i].nfrees == 0) {
+ if (remove_nth == 0)
+ break;
+ else
+ remove_nth--;
+ }
+
+ /* Badness if we scanned through the list and didn't manage to
+ * select one to delete (the list stats were probably updated
+ * incorrectly).
+ */
+ if (i == obj->list->count) {
+ ERROR("invalid obj_list");
+ goto error;
+ }
+
+ /* Mark the object we're about to free so its own callback does
+ * not free another object. We don't want to recursively free the
+ * entire list when we free the first ID.
+ */
+ obj->list->objects[i].freeing = TRUE;
+
+ /* Decrement the reference count on the object */
+ ret = H5Idec_ref(obj->list->objects[i].id);
+ CHECK(ret, FAIL, "H5Idec_ref");
+ if (ret == FAIL)
+ goto error;
+
+ /* Unset the "freeing" flag */
+ obj->list->objects[i].freeing = FALSE;
+ }
+
+ /* Verify the number of objects remaining in the master list is non-negative */
+ if (obj->list->remaining < 0) {
+ ERROR("invalid number of objects remaining");
+ goto error;
+ }
+
+ return 0;
+
+error:
+ return -1;
+} /* end rct_free_cb() */
+
+/* Test function */
+static int
+test_remove_clear_type(void)
+{
+ H5I_type_t obj_type;
+ rct_obj_list_t obj_list;
+ rct_obj_t *objects = NULL; /* Convenience pointer to objects stored in master list */
+ size_t list_size;
+ long i, j;
+ herr_t ret; /* return value */
+
+ /* Register a user-defined type with our custom ID-deleting callback */
+ obj_type = H5Iregister_type((size_t)8, 0, rct_free_cb);
+ CHECK(obj_type, H5I_BADID, "H5Iregister_type");
+ if (obj_type == H5I_BADID)
+ goto error;
+
+ /* Create an array to hold the objects in the master list */
+ list_size = RCT_MAX_NOBJS * sizeof(rct_obj_t);
+ obj_list.objects = HDmalloc(list_size);
+ CHECK_PTR(obj_list.objects, "HDcalloc");
+ if (NULL == obj_list.objects)
+ goto error;
+
+ /* Set a convenience pointer to the object array */
+ objects = obj_list.objects;
+
+ for (i = 0; i < RCT_NITER; i++) {
+
+ /* The number of members in the type, according to the HDF5 library */
+ hsize_t nmembers = 1234567; /* (init to fake number) */
+
+ /* The number of objects found while scanning through the object list */
+ int found;
+
+ /*********************
+ * Build object list *
+ *********************/
+
+ HDmemset(obj_list.objects, 0, list_size);
+
+ /* The number of objects used is a random number between the min and max */
+ obj_list.count = obj_list.remaining =
+ RCT_MIN_NOBJS + (HDrandom() % (long)(RCT_MAX_NOBJS - RCT_MIN_NOBJS + 1));
+
+ /* Create the actual objects */
+ for (j = 0; j < obj_list.count; j++) {
+
+ /* Object setup */
+ objects[j].nfrees = 0;
+ objects[j].freeing = FALSE;
+ objects[j].list = &obj_list;
+
+ /* Register an ID for it */
+ objects[j].id = H5Iregister(obj_type, &objects[j]);
+ CHECK(objects[j].id, FAIL, "H5Iregister");
+ if (objects[j].id == FAIL)
+ goto error;
+
+ /* Bump the reference count by 1 (to 2) 50% of the time */
+ if (HDrandom() % 2) {
+ ret = H5Iinc_ref(objects[j].id);
+ CHECK(ret, FAIL, "H5Iinc_ref");
+ if (ret == FAIL)
+ goto error;
+ }
+ }
+
+ /******************************************
+ * Clear the type with force set to FALSE *
+ ******************************************/
+
+ /* Clear the type. Since force is FALSE, only
+ * IDs with a reference count of 1 will be cleared.
+ */
+ ret = H5Iclear_type(obj_type, FALSE);
+ CHECK(ret, FAIL, "H5Iclear_type");
+ if (ret == FAIL)
+ goto error;
+
+ /* Verify that the object struct fields are sane and count the
+ * number of unfreed objects
+ */
+ found = 0;
+ for (j = 0; j < obj_list.count; j++) {
+
+ if (objects[j].nfrees == 0) {
+ /* Count unfreed objects */
+ found++;
+ }
+ else {
+ /* Every freed object should have been freed exactly once */
+ VERIFY(objects[j].nfrees, 1, "object freed more than once");
+ if (objects[j].nfrees != 1)
+ goto error;
+ }
+
+ /* No object should still be marked as "freeing" */
+ VERIFY(objects[j].freeing, FALSE, "object marked as freeing");
+ if (objects[j].freeing != FALSE)
+ goto error;
+ }
+
+ /* Verify the number of unfreed objects we found during our scan
+ * matches the number stored in the list
+ */
+ VERIFY(obj_list.remaining, found, "incorrect number of objects remaining");
+ if (obj_list.remaining != found)
+ goto error;
+
+ /* Make sure the HDF5 library confirms our count */
+ ret = H5Inmembers(obj_type, &nmembers);
+ CHECK(ret, FAIL, "H5Inmembers");
+ if (ret == FAIL)
+ goto error;
+ VERIFY(nmembers, found, "The number of members remaining in the type did not match our count");
+ if (nmembers != (hsize_t)found)
+ goto error;
+
+ /*****************************************
+ * Clear the type with force set to TRUE *
+ *****************************************/
+
+ /* Clear the type. Since force is TRUE, all IDs will be cleared. */
+ ret = H5Iclear_type(obj_type, TRUE);
+ CHECK(ret, FAIL, "H5Iclear_type");
+ if (ret == FAIL)
+ goto error;
+
+ /* Verify that the object struct fields are sane */
+ for (j = 0; j < obj_list.count; j++) {
+
+ /* Every object should have been freed exactly once */
+ VERIFY(objects[j].nfrees, 1, "object freed more than once");
+ if (objects[j].nfrees != 1)
+ goto error;
+
+ /* No object should still be marked as "freeing" */
+ VERIFY(objects[j].freeing, FALSE, "object marked as freeing");
+ if (objects[j].freeing != FALSE)
+ goto error;
+ }
+
+ /* Verify the number of objects is 0 */
+ VERIFY(obj_list.remaining, 0, "objects remaining was not zero");
+ if (obj_list.remaining != 0)
+ goto error;
+
+ /* Make sure the HDF5 library confirms zero members in the type */
+ ret = H5Inmembers(obj_type, &nmembers);
+ CHECK(ret, FAIL, "H5Inmembers");
+ if (ret == FAIL)
+ goto error;
+ VERIFY(nmembers, 0, "The number of members remaining in the type was not zero");
+ if (nmembers != 0)
+ goto error;
+ }
+
+ /* Destroy the type */
+ ret = H5Idestroy_type(obj_type);
+ CHECK(ret, FAIL, "H5Idestroy_type");
+ if (ret == FAIL)
+ goto error;
+
+ /* Free the object array */
+ HDfree(obj_list.objects);
+
+ return 0;
+
+error:
+ /* Cleanup. For simplicity, just destroy the types and ignore errors. */
+ H5E_BEGIN_TRY
+ {
+ H5Idestroy_type(obj_type);
+ }
+ H5E_END_TRY
+
+ HDfree(obj_list.objects);
+
+ return -1;
+} /* end test_remove_clear_type() */
+
+#if defined(H5VL_VERSION) && H5VL_VERSION >= 2
+/* Typedef for future objects */
+typedef struct {
+ H5I_type_t obj_type; /* ID type for actual object */
+} future_obj_t;
+
+/* Global (static) future ID object type */
+H5I_type_t future_obj_type_g = H5I_BADID;
+
+/* Callback to free the actual object for future object test */
+static herr_t
+free_actual_object(void *_p, void H5_ATTR_UNUSED **_ctx)
+{
+ int *p = (int *)_p;
+
+ if (7 != *p)
+ return FAIL;
+
+ HDfree(p);
+
+ return SUCCEED;
+}
+
+/* Callback to realize a future object */
+static herr_t
+realize_future_cb(void *_future_obj, hid_t *actual_id)
+{
+ future_obj_t *future_obj = (future_obj_t *)_future_obj; /* Future object */
+ int *actual_obj; /* Pointer to the actual object */
+
+ /* Check for bad future object */
+ if (NULL == future_obj)
+ return FAIL;
+
+ /* Determine type of object to realize */
+ if (H5I_DATASPACE == future_obj->obj_type) {
+ hsize_t dims = 13;
+
+ if ((*actual_id = H5Screate_simple(1, &dims, NULL)) < 0)
+ return FAIL;
+ }
+ else if (H5I_DATATYPE == future_obj->obj_type) {
+ if ((*actual_id = H5Tcopy(H5T_NATIVE_INT)) < 0)
+ return FAIL;
+ }
+ else if (H5I_GENPROP_LST == future_obj->obj_type) {
+ if ((*actual_id = H5Pcreate(H5P_DATASET_XFER)) < 0)
+ return FAIL;
+ }
+ else {
+ /* Create a new object (the 'actual object') of the correct type */
+ if (NULL == (actual_obj = HDmalloc(sizeof(int))))
+ return FAIL;
+ *actual_obj = 7;
+
+ /* Register actual object of the user-defined type */
+ *actual_id = H5Iregister(future_obj->obj_type, actual_obj);
+ CHECK(*actual_id, FAIL, "H5Iregister");
+ if (*actual_id == FAIL)
+ return FAIL;
+ }
+
+ return SUCCEED;
+}
+
+/* Callback to discard a future object */
+static herr_t
+discard_future_cb(void *future_obj)
+{
+ if (NULL == future_obj)
+ return FAIL;
+
+ HDfree(future_obj);
+
+ return SUCCEED;
+}
+
+/* Callback to realize a future object when future objects are NULL*/
+static herr_t
+realize_future_generate_cb(void *_future_obj, hid_t *actual_id)
+{
+ future_obj_t *future_obj = (future_obj_t *)_future_obj; /* Future object */
+ int *actual_obj; /* Pointer to the actual object */
+
+ if (NULL != future_obj)
+ return FAIL;
+ /* Create a new object (the 'actual object') of the correct type */
+ if (NULL == (actual_obj = HDmalloc(sizeof(int))))
+ return FAIL;
+ *actual_obj = 7;
+
+ /* Register actual object without using future object info */
+ *actual_id = H5Iregister(future_obj_type_g, actual_obj);
+ CHECK(*actual_id, FAIL, "H5Iregister");
+ if (*actual_id == FAIL)
+ return FAIL;
+
+ return SUCCEED;
+}
+
+/* Callback to discard a future object when future objects are NULL */
+static herr_t
+discard_future_generate_cb(void *future_obj)
+{
+ if (NULL != future_obj)
+ return FAIL;
+
+ return SUCCEED;
+}
+
+/* Test function */
+static int
+test_future_ids(void)
+{
+ H5I_type_t obj_type; /* New user-defined ID type */
+ hid_t future_id; /* ID for future object */
+ int fake_future_obj; /* "Fake" future object for tests */
+ future_obj_t *future_obj; /* Future object */
+ int *actual_obj; /* Actual object */
+ int *actual_obj2; /* Another actual object */
+ H5I_type_t id_type; /* Type of ID */
+ H5T_class_t type_class; /* Datatype class */
+ herr_t ret; /* Return value */
+
+ /* Register a user-defined type with our custom ID-deleting callback */
+ obj_type = H5Iregister_type((size_t)15, 0, free_actual_object);
+ CHECK(obj_type, H5I_BADID, "H5Iregister_type");
+ if (H5I_BADID == obj_type)
+ goto error;
+
+ /* Test basic error conditions */
+ fake_future_obj = 0;
+ H5E_BEGIN_TRY
+ {
+ future_id = H5Iregister_future(obj_type, &fake_future_obj, NULL, NULL);
+ }
+ H5E_END_TRY
+ VERIFY(future_id, H5I_INVALID_HID, "H5Iregister_future");
+ if (H5I_INVALID_HID != future_id)
+ goto error;
+
+ H5E_BEGIN_TRY
+ {
+ future_id = H5Iregister_future(obj_type, &fake_future_obj, realize_future_cb, NULL);
+ }
+ H5E_END_TRY
+ VERIFY(future_id, H5I_INVALID_HID, "H5Iregister_future");
+ if (H5I_INVALID_HID != future_id)
+ goto error;
+
+ H5E_BEGIN_TRY
+ {
+ future_id = H5Iregister_future(obj_type, &fake_future_obj, NULL, discard_future_cb);
+ }
+ H5E_END_TRY
+ VERIFY(future_id, H5I_INVALID_HID, "H5Iregister_future");
+ if (H5I_INVALID_HID != future_id)
+ goto error;
+
+ H5E_BEGIN_TRY
+ {
+ future_id = H5Iregister_future(H5I_BADID, &fake_future_obj, realize_future_cb, discard_future_cb);
+ }
+ H5E_END_TRY
+ VERIFY(future_id, H5I_INVALID_HID, "H5Iregister_future");
+ if (H5I_INVALID_HID != future_id)
+ goto error;
+
+ /* Test base use-case: create a future object and destroy type without
+ * realizing the future object.
+ */
+ future_obj = HDmalloc(sizeof(future_obj_t));
+ future_obj->obj_type = obj_type;
+ future_id = H5Iregister_future(obj_type, future_obj, realize_future_cb, discard_future_cb);
+ CHECK(future_id, H5I_INVALID_HID, "H5Iregister_future");
+ if (H5I_INVALID_HID == future_id)
+ goto error;
+
+ /* Destroy the type */
+ ret = H5Idestroy_type(obj_type);
+ CHECK(ret, FAIL, "H5Idestroy_type");
+ if (FAIL == ret)
+ goto error;
+
+ /* Re-register a user-defined type with our custom ID-deleting callback */
+ obj_type = H5Iregister_type((size_t)15, 0, free_actual_object);
+ CHECK(obj_type, H5I_BADID, "H5Iregister_type");
+ if (H5I_BADID == obj_type)
+ goto error;
+
+ /* Test base use-case: create a future object and realize the actual object. */
+ future_obj = HDmalloc(sizeof(future_obj_t));
+ future_obj->obj_type = obj_type;
+ future_id = H5Iregister_future(obj_type, future_obj, realize_future_cb, discard_future_cb);
+ CHECK(future_id, H5I_INVALID_HID, "H5Iregister_future");
+ if (H5I_INVALID_HID == future_id)
+ goto error;
+
+ actual_obj = H5Iobject_verify(future_id, obj_type);
+ CHECK_PTR(actual_obj, "H5Iobject_verify");
+ if (NULL == actual_obj)
+ goto error;
+ VERIFY(*actual_obj, 7, "H5Iobject_verify");
+ if (7 != *actual_obj)
+ goto error;
+
+ /* Retrieve the object again and verify that it's the same actual object */
+ actual_obj2 = H5Iobject_verify(future_id, obj_type);
+ CHECK_PTR(actual_obj2, "H5Iobject_verify");
+ if (NULL == actual_obj2)
+ goto error;
+ VERIFY(*actual_obj2, 7, "H5Iobject_verify");
+ if (7 != *actual_obj2)
+ goto error;
+ CHECK_PTR_EQ(actual_obj, actual_obj2, "H5Iobject_verify");
+ if (actual_obj != actual_obj2)
+ goto error;
+
+ /* Destroy the type */
+ ret = H5Idestroy_type(obj_type);
+ CHECK(ret, FAIL, "H5Idestroy_type");
+ if (FAIL == ret)
+ goto error;
+
+ /* Re-register a user-defined type with our custom ID-deleting callback */
+ obj_type = H5Iregister_type((size_t)15, 0, free_actual_object);
+ CHECK(obj_type, H5I_BADID, "H5Iregister_type");
+ if (H5I_BADID == obj_type)
+ goto error;
+
+ /* Set the global future object type */
+ future_obj_type_g = obj_type;
+
+ /* Test "actual object generator" use-case: create a future object with
+ * NULL object pointer, to create new object of predefined type when
+ * future object is realized.
+ */
+ future_id = H5Iregister_future(obj_type, NULL, realize_future_generate_cb, discard_future_generate_cb);
+ CHECK(future_id, H5I_INVALID_HID, "H5Iregister_future");
+ if (H5I_INVALID_HID == future_id)
+ goto error;
+
+ /* Realize the actual object, with will be dynamically allocated within
+ * the 'realize' callback.
+ */
+ actual_obj = H5Iobject_verify(future_id, obj_type);
+ CHECK_PTR(actual_obj, "H5Iobject_verify");
+ if (NULL == actual_obj)
+ goto error;
+ VERIFY(*actual_obj, 7, "H5Iobject_verify");
+ if (7 != *actual_obj)
+ goto error;
+
+ /* Reset the global future object type */
+ future_obj_type_g = H5I_BADID;
+
+ /* Retrieve the object again and verify that it's the same actual object */
+ /* (Will fail if global future object type used) */
+ actual_obj2 = H5Iobject_verify(future_id, obj_type);
+ CHECK_PTR(actual_obj2, "H5Iobject_verify");
+ if (NULL == actual_obj2)
+ goto error;
+ VERIFY(*actual_obj2, 7, "H5Iobject_verify");
+ if (7 != *actual_obj2)
+ goto error;
+ CHECK_PTR_EQ(actual_obj, actual_obj2, "H5Iobject_verify");
+ if (actual_obj != actual_obj2)
+ goto error;
+
+ /* Destroy the type */
+ ret = H5Idestroy_type(obj_type);
+ CHECK(ret, FAIL, "H5Idestroy_type");
+ if (FAIL == ret)
+ goto error;
+
+ /* Test base use-case: create a future object for a pre-defined type */
+ /* (DATASPACE) */
+ future_obj = HDmalloc(sizeof(future_obj_t));
+ future_obj->obj_type = H5I_DATASPACE;
+ future_id = H5Iregister_future(H5I_DATASPACE, future_obj, realize_future_cb, discard_future_cb);
+ CHECK(future_id, H5I_INVALID_HID, "H5Iregister_future");
+ if (H5I_INVALID_HID == future_id)
+ goto error;
+
+ /* (Can't verify the type of the future ID, because the library's current
+ * implementation realizes the object during sanity checks on the ID)
+ */
+
+ /* Close future object for pre-defined type without realizing it */
+ ret = H5Idec_ref(future_id);
+ CHECK(ret, FAIL, "H5Idec_ref");
+ if (FAIL == ret)
+ goto error;
+
+ /* Test base use-case: create a future object for a pre-defined type */
+ future_obj = HDmalloc(sizeof(future_obj_t));
+ future_obj->obj_type = H5I_DATASPACE;
+ future_id = H5Iregister_future(H5I_DATASPACE, future_obj, realize_future_cb, discard_future_cb);
+ CHECK(future_id, H5I_INVALID_HID, "H5Iregister_future");
+ if (H5I_INVALID_HID == future_id)
+ goto error;
+
+ /* Verify that the application believes the future ID is a dataspace */
+ /* (Currently realizes the object "implicitly" during a sanity check) */
+ id_type = H5Iget_type(future_id);
+ CHECK(id_type, H5I_BADID, "H5Iget_type");
+ if (H5I_BADID == id_type)
+ goto error;
+ if (H5I_DATASPACE != id_type)
+ goto error;
+
+ /* Close future object for pre-defined type without realizing it */
+ ret = H5Idec_ref(future_id);
+ CHECK(ret, FAIL, "H5Idec_ref");
+ if (FAIL == ret)
+ goto error;
+
+ /* Test base use-case: create a future object for a pre-defined type */
+ future_obj = HDmalloc(sizeof(future_obj_t));
+ future_obj->obj_type = H5I_DATASPACE;
+ future_id = H5Iregister_future(H5I_DATASPACE, future_obj, realize_future_cb, discard_future_cb);
+ CHECK(future_id, H5I_INVALID_HID, "H5Iregister_future");
+ if (H5I_INVALID_HID == future_id)
+ goto error;
+
+ /* Realize future dataspace by requesting its rank */
+ ret = H5Sget_simple_extent_ndims(future_id);
+ CHECK(ret, FAIL, "H5Sget_simple_extent_ndims");
+ if (FAIL == ret)
+ goto error;
+ if (1 != ret)
+ goto error;
+
+ /* Verify that the application believes the ID is still a dataspace */
+ id_type = H5Iget_type(future_id);
+ CHECK(id_type, H5I_BADID, "H5Iget_type");
+ if (H5I_BADID == id_type)
+ goto error;
+ if (H5I_DATASPACE != id_type)
+ goto error;
+
+ /* Close future object for pre-defined type after realizing it */
+ ret = H5Idec_ref(future_id);
+ CHECK(ret, FAIL, "H5Idec_ref");
+ if (FAIL == ret)
+ goto error;
+
+ /* Test base use-case: create a future object for a pre-defined type */
+ /* (DATATYPE) */
+ future_obj = HDmalloc(sizeof(future_obj_t));
+ future_obj->obj_type = H5I_DATATYPE;
+ future_id = H5Iregister_future(H5I_DATATYPE, future_obj, realize_future_cb, discard_future_cb);
+ CHECK(future_id, H5I_INVALID_HID, "H5Iregister_future");
+ if (H5I_INVALID_HID == future_id)
+ goto error;
+
+ /* (Can't verify the type of the future ID, because the library's current
+ * implementation realizes the object during sanity checks on the ID)
+ */
+
+ /* Close future object for pre-defined type without realizing it */
+ ret = H5Idec_ref(future_id);
+ CHECK(ret, FAIL, "H5Idec_ref");
+ if (FAIL == ret)
+ goto error;
+
+ /* Test base use-case: create a future object for a pre-defined type */
+ future_obj = HDmalloc(sizeof(future_obj_t));
+ future_obj->obj_type = H5I_DATATYPE;
+ future_id = H5Iregister_future(H5I_DATATYPE, future_obj, realize_future_cb, discard_future_cb);
+ CHECK(future_id, H5I_INVALID_HID, "H5Iregister_future");
+ if (H5I_INVALID_HID == future_id)
+ goto error;
+
+ /* Verify that the application believes the future ID is a datatype */
+ /* (Currently realizes the object "implicitly" during a sanity check) */
+ id_type = H5Iget_type(future_id);
+ CHECK(id_type, H5I_BADID, "H5Iget_type");
+ if (H5I_BADID == id_type)
+ goto error;
+ if (H5I_DATATYPE != id_type)
+ goto error;
+
+ /* Close future object for pre-defined type without realizing it */
+ ret = H5Idec_ref(future_id);
+ CHECK(ret, FAIL, "H5Idec_ref");
+ if (FAIL == ret)
+ goto error;
+
+ /* Test base use-case: create a future object for a pre-defined type */
+ future_obj = HDmalloc(sizeof(future_obj_t));
+ future_obj->obj_type = H5I_DATATYPE;
+ future_id = H5Iregister_future(H5I_DATATYPE, future_obj, realize_future_cb, discard_future_cb);
+ CHECK(future_id, H5I_INVALID_HID, "H5Iregister_future");
+ if (H5I_INVALID_HID == future_id)
+ goto error;
+
+ /* Realize future datatype by requesting its class */
+ type_class = H5Tget_class(future_id);
+ CHECK(ret, FAIL, "H5Tget_class");
+ if (FAIL == ret)
+ goto error;
+ if (H5T_INTEGER != type_class)
+ goto error;
+
+ /* Verify that the application believes the ID is still a datatype */
+ id_type = H5Iget_type(future_id);
+ CHECK(id_type, H5I_BADID, "H5Iget_type");
+ if (H5I_BADID == id_type)
+ goto error;
+ if (H5I_DATATYPE != id_type)
+ goto error;
+
+ /* Close future object for pre-defined type after realizing it */
+ ret = H5Idec_ref(future_id);
+ CHECK(ret, FAIL, "H5Idec_ref");
+ if (FAIL == ret)
+ goto error;
+
+ /* Test base use-case: create a future object for a pre-defined type */
+ /* (PROPERTY LIST) */
+ future_obj = HDmalloc(sizeof(future_obj_t));
+ future_obj->obj_type = H5I_GENPROP_LST;
+ future_id = H5Iregister_future(H5I_GENPROP_LST, future_obj, realize_future_cb, discard_future_cb);
+ CHECK(future_id, H5I_INVALID_HID, "H5Iregister_future");
+ if (H5I_INVALID_HID == future_id)
+ goto error;
+
+ /* (Can't verify the type of the future ID, because the library's current
+ * implementation realizes the object during sanity checks on the ID)
+ */
+
+ /* Close future object for pre-defined type without realizing it */
+ ret = H5Idec_ref(future_id);
+ CHECK(ret, FAIL, "H5Idec_ref");
+ if (FAIL == ret)
+ goto error;
+
+ /* Test base use-case: create a future object for a pre-defined type */
+ future_obj = HDmalloc(sizeof(future_obj_t));
+ future_obj->obj_type = H5I_GENPROP_LST;
+ future_id = H5Iregister_future(H5I_GENPROP_LST, future_obj, realize_future_cb, discard_future_cb);
+ CHECK(future_id, H5I_INVALID_HID, "H5Iregister_future");
+ if (H5I_INVALID_HID == future_id)
+ goto error;
+
+ /* Verify that the application believes the future ID is a property list */
+ /* (Currently realizes the object "implicitly" during a sanity check) */
+ id_type = H5Iget_type(future_id);
+ CHECK(id_type, H5I_BADID, "H5Iget_type");
+ if (H5I_BADID == id_type)
+ goto error;
+ if (H5I_GENPROP_LST != id_type)
+ goto error;
+
+ /* Close future object for pre-defined type without realizing it */
+ ret = H5Idec_ref(future_id);
+ CHECK(ret, FAIL, "H5Idec_ref");
+ if (FAIL == ret)
+ goto error;
+
+ /* Test base use-case: create a future object for a pre-defined type */
+ future_obj = HDmalloc(sizeof(future_obj_t));
+ future_obj->obj_type = H5I_GENPROP_LST;
+ future_id = H5Iregister_future(H5I_GENPROP_LST, future_obj, realize_future_cb, discard_future_cb);
+ CHECK(future_id, H5I_INVALID_HID, "H5Iregister_future");
+ if (H5I_INVALID_HID == future_id)
+ goto error;
+
+ /* Realize future property list by verifying its class */
+ ret = H5Pisa_class(future_id, H5P_DATASET_XFER);
+ CHECK(ret, FAIL, "H5Pisa_class");
+ if (FAIL == ret)
+ goto error;
+ if (TRUE != ret)
+ goto error;
+
+ /* Verify that the application believes the ID is still a property list */
+ id_type = H5Iget_type(future_id);
+ CHECK(id_type, H5I_BADID, "H5Iget_type");
+ if (H5I_BADID == id_type)
+ goto error;
+ if (H5I_GENPROP_LST != id_type)
+ goto error;
+
+ /* Close future object for pre-defined type after realizing it */
+ ret = H5Idec_ref(future_id);
+ CHECK(ret, FAIL, "H5Idec_ref");
+ if (FAIL == ret)
+ goto error;
+
+ return 0;
+
+error:
+ /* Cleanup. For simplicity, just destroy the types and ignore errors. */
+ H5E_BEGIN_TRY
+ {
+ H5Idestroy_type(obj_type);
+ }
+ H5E_END_TRY
+
+ return -1;
+} /* end test_future_ids() */
+#endif
+
+void
+test_ids(void)
+{
+ /* Set the random # seed */
+ HDsrandom((unsigned)HDtime(NULL));
+
+ if (basic_id_test() < 0)
+ TestErrPrintf("Basic ID test failed\n");
+ if (id_predefined_test() < 0)
+ TestErrPrintf("Predefined ID type test failed\n");
+ if (test_is_valid() < 0)
+ TestErrPrintf("H5Iis_valid test failed\n");
+ if (test_get_type() < 0)
+ TestErrPrintf("H5Iget_type test failed\n");
+ if (test_id_type_list() < 0)
+ TestErrPrintf("ID type list test failed\n");
+ if (test_remove_clear_type() < 0)
+ TestErrPrintf("ID remove during H5Iclear_type test failed\n");
+#if defined(H5VL_VERSION) && H5VL_VERSION >= 2
+ if (test_future_ids() < 0)
+ TestErrPrintf("Future ID test failed\n");
+#endif
+}
diff --git a/test/API/titerate.c b/test/API/titerate.c
new file mode 100644
index 0000000..6cbebbd
--- /dev/null
+++ b/test/API/titerate.c
@@ -0,0 +1,1263 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/***********************************************************
+ *
+ * Test program: titerate
+ *
+ * Test the Group & Attribute functionality
+ *
+ *************************************************************/
+
+#include "testhdf5.h"
+/* #include "H5srcdir.h" */
+
+#define DATAFILE "titerate.h5"
+
+/* Number of datasets for group iteration test */
+#define NDATASETS 50
+
+/* Number of attributes for attribute iteration test */
+#define NATTR 50
+
+/* Number of groups for second group iteration test */
+#define ITER_NGROUPS 150
+
+/* General maximum length of names used */
+#define NAMELEN 80
+
+/* 1-D dataset with fixed dimensions */
+#define SPACE1_RANK 1
+#define SPACE1_DIM1 4
+
+typedef enum { RET_ZERO, RET_TWO, RET_CHANGE, RET_CHANGE2 } iter_enum;
+
+/* Custom group iteration callback data */
+typedef struct {
+ char name[NAMELEN]; /* The name of the object */
+ H5O_type_t type; /* The type of the object */
+ iter_enum command; /* The type of return value */
+} iter_info;
+
+/* Definition for test_corrupted_attnamelen */
+#define CORRUPTED_ATNAMELEN_FILE "memleak_H5O_dtype_decode_helper_H5Odtype.h5"
+#define DSET_NAME "image"
+typedef struct searched_err_t {
+ char message[256];
+ hbool_t found;
+} searched_err_t;
+#if 0
+/* Call back function for test_corrupted_attnamelen */
+static int find_err_msg_cb(unsigned n, const H5E_error2_t *err_desc, void *_client_data);
+#endif
+/* Local functions */
+int iter_strcmp(const void *s1, const void *s2);
+int iter_strcmp2(const void *s1, const void *s2);
+#ifndef NO_ITERATION_RESTART
+static herr_t liter_cb(hid_t group, const char *name, const H5L_info2_t *info, void *op_data);
+static herr_t liter_cb2(hid_t group, const char *name, const H5L_info2_t *info, void *op_data);
+#endif
+herr_t aiter_cb(hid_t group, const char *name, const H5A_info_t *ainfo, void *op_data);
+
+/****************************************************************
+**
+** iter_strcmp(): String comparison routine for qsort
+**
+****************************************************************/
+H5_ATTR_PURE int
+iter_strcmp(const void *s1, const void *s2)
+{
+ return (HDstrcmp(*(const char *const *)s1, *(const char *const *)s2));
+}
+
+/****************************************************************
+**
+** liter_cb(): Custom link iteration callback routine.
+**
+****************************************************************/
+#ifndef NO_ITERATION_RESTART
+static herr_t
+liter_cb(hid_t H5_ATTR_UNUSED group, const char *name, const H5L_info2_t H5_ATTR_UNUSED *link_info,
+ void *op_data)
+{
+ iter_info *info = (iter_info *)op_data;
+ static int count = 0;
+ static int count2 = 0;
+
+ HDstrcpy(info->name, name);
+
+ switch (info->command) {
+ case RET_ZERO:
+ return (0);
+
+ case RET_TWO:
+ return (2);
+
+ case RET_CHANGE:
+ count++;
+ return (count > 10 ? 1 : 0);
+
+ case RET_CHANGE2:
+ count2++;
+ return (count2 > 10 ? 1 : 0);
+
+ default:
+ HDprintf("invalid iteration command");
+ return (-1);
+ } /* end switch */
+} /* end liter_cb() */
+#endif
+
+/****************************************************************
+**
+** test_iter_group(): Test group iteration functionality
+**
+****************************************************************/
+static void
+test_iter_group(hid_t fapl, hbool_t new_format)
+{
+#ifndef NO_ITERATION_RESTART
+ hid_t file; /* File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t datatype; /* Common datatype ID */
+ hid_t filespace; /* Common dataspace ID */
+ hid_t root_group, grp; /* Root group ID */
+ int i; /* counting variable */
+ hsize_t idx; /* Index in the group */
+ char name[NAMELEN]; /* temporary name buffer */
+ char *lnames[NDATASETS + 2]; /* Names of the links created */
+ char dataset_name[NAMELEN]; /* dataset name */
+ iter_info info; /* Custom iteration information */
+ H5G_info_t ginfo; /* Buffer for querying object's info */
+ herr_t ret; /* Generic return value */
+#else
+ (void)fapl;
+ (void)new_format;
+#endif
+
+ /* Output message about test being performed */
+ MESSAGE(
+ 5, ("Testing Group Iteration Functionality - SKIPPED for now due to no iteration restart support\n"));
+#ifndef NO_ITERATION_RESTART
+ /* Create the test file with the datasets */
+ file = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(file, FAIL, "H5Fcreate");
+
+ /* Test iterating over empty group */
+ info.command = RET_ZERO;
+ idx = 0;
+ ret = H5Literate2(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info);
+ VERIFY(ret, SUCCEED, "H5Literate2");
+
+ datatype = H5Tcopy(H5T_NATIVE_INT);
+ CHECK(datatype, FAIL, "H5Tcopy");
+
+ filespace = H5Screate(H5S_SCALAR);
+ CHECK(filespace, FAIL, "H5Screate");
+
+ for (i = 0; i < NDATASETS; i++) {
+ HDsnprintf(name, sizeof(name), "Dataset %d", i);
+ dataset = H5Dcreate2(file, name, datatype, filespace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Keep a copy of the dataset names around for later */
+ lnames[i] = HDstrdup(name);
+ CHECK_PTR(lnames[i], "strdup");
+
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+ } /* end for */
+
+ /* Create a group and named datatype under root group for testing */
+ grp = H5Gcreate2(file, "grp", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Gcreate2");
+
+ lnames[NDATASETS] = HDstrdup("grp");
+ CHECK_PTR(lnames[NDATASETS], "strdup");
+
+ ret = H5Tcommit2(file, "dtype", datatype, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+
+ lnames[NDATASETS + 1] = HDstrdup("dtype");
+ CHECK_PTR(lnames[NDATASETS], "strdup");
+
+ /* Close everything up */
+ ret = H5Tclose(datatype);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ ret = H5Gclose(grp);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Sclose(filespace);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Sort the dataset names */
+ HDqsort(lnames, (size_t)(NDATASETS + 2), sizeof(char *), iter_strcmp);
+
+ /* Iterate through the datasets in the root group in various ways */
+ file = H5Fopen(DATAFILE, H5F_ACC_RDONLY, fapl);
+ CHECK(file, FAIL, "H5Fopen");
+
+ /* These two functions, H5Oget_info_by_idx and H5Lget_name_by_idx, actually
+ * iterate through B-tree for group members in internal library design.
+ */
+ root_group = H5Gopen2(file, "/", H5P_DEFAULT);
+ CHECK(root_group, FAIL, "H5Gopen2");
+
+ ret = H5Gget_info(root_group, &ginfo);
+ CHECK(ret, FAIL, "H5Gget_info");
+ VERIFY(ginfo.nlinks, (NDATASETS + 2), "H5Gget_info");
+
+ for (i = 0; i < (int)ginfo.nlinks; i++) {
+ H5O_info2_t oinfo; /* Object info */
+
+ ret = (herr_t)H5Lget_name_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i,
+ dataset_name, (size_t)NAMELEN, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lget_name_by_idx");
+
+ //! [H5Oget_info_by_idx3_snip]
+
+ ret = H5Oget_info_by_idx3(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, &oinfo,
+ H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_idx");
+
+ //! [H5Oget_info_by_idx3_snip]
+
+ } /* end for */
+
+ H5E_BEGIN_TRY
+ {
+ ret =
+ (herr_t)H5Lget_name_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)(NDATASETS + 3),
+ dataset_name, (size_t)NAMELEN, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Lget_name_by_idx");
+
+ ret = H5Gclose(root_group);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* These two functions, H5Oget_info_by_idx and H5Lget_name_by_idx, actually
+ * iterate through B-tree for group members in internal library design.
+ * (Same as test above, but with the file ID instead of opening the root group)
+ */
+ ret = H5Gget_info(file, &ginfo);
+ CHECK(ret, FAIL, "H5Gget_info");
+ VERIFY(ginfo.nlinks, NDATASETS + 2, "H5Gget_info");
+
+ for (i = 0; i < (int)ginfo.nlinks; i++) {
+ H5O_info2_t oinfo; /* Object info */
+
+ ret = (herr_t)H5Lget_name_by_idx(file, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, dataset_name,
+ (size_t)NAMELEN, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lget_name_by_idx");
+
+ ret = H5Oget_info_by_idx3(file, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, &oinfo, H5O_INFO_BASIC,
+ H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_idx3");
+ } /* end for */
+
+ H5E_BEGIN_TRY
+ {
+ ret = (herr_t)H5Lget_name_by_idx(file, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)(NDATASETS + 3),
+ dataset_name, (size_t)NAMELEN, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Lget_name_by_idx");
+
+ /* Test invalid indices for starting iteration */
+ info.command = RET_ZERO;
+ idx = (hsize_t)-1;
+ H5E_BEGIN_TRY
+ {
+ ret = H5Literate2(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Literate2");
+
+ /* Test skipping exactly as many entries as in the group */
+ idx = NDATASETS + 2;
+ H5E_BEGIN_TRY
+ {
+ ret = H5Literate2(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Literate2");
+
+ /* Test skipping more entries than are in the group */
+ idx = NDATASETS + 3;
+ H5E_BEGIN_TRY
+ {
+ ret = H5Literate2(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Literate2");
+
+ /* Test all objects in group, when callback always returns 0 */
+ info.command = RET_ZERO;
+ idx = 0;
+ if ((ret = H5Literate2(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info)) > 0)
+ TestErrPrintf("Group iteration function didn't return zero correctly!\n");
+
+ /* Test all objects in group, when callback always returns 1 */
+ /* This also tests the "restarting" ability, because the index changes */
+ info.command = RET_TWO;
+ i = 0;
+ idx = 0;
+ memset(info.name, 0, NAMELEN);
+ while ((ret = H5Literate2(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info)) > 0) {
+ /* Verify return value from iterator gets propagated correctly */
+ VERIFY(ret, 2, "H5Literate2");
+
+ /* Increment the number of times "2" is returned */
+ i++;
+
+ /* Verify that the index is the correct value */
+ VERIFY(idx, (hsize_t)i, "H5Literate2");
+ if (idx != (hsize_t)i)
+ break;
+ if (idx > (NDATASETS + 2))
+ TestErrPrintf("Group iteration function walked too far!\n");
+
+ /* Verify that the correct name is retrieved */
+ if (HDstrncmp(info.name, lnames[(size_t)(idx - 1)], NAMELEN) != 0)
+ TestErrPrintf(
+ "Group iteration function didn't return name correctly for link - lnames[%u] = '%s'!\n",
+ (unsigned)(idx - 1), lnames[(size_t)(idx - 1)]);
+ } /* end while */
+ VERIFY(ret, -1, "H5Literate2");
+
+ if (i != (NDATASETS + 2))
+ TestErrPrintf("%u: Group iteration function didn't perform multiple iterations correctly!\n",
+ __LINE__);
+
+ /* Test all objects in group, when callback changes return value */
+ /* This also tests the "restarting" ability, because the index changes */
+ info.command = new_format ? RET_CHANGE2 : RET_CHANGE;
+ i = 0;
+ idx = 0;
+ memset(info.name, 0, NAMELEN);
+ while ((ret = H5Literate2(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info)) >= 0) {
+ /* Verify return value from iterator gets propagated correctly */
+ VERIFY(ret, 1, "H5Literate2");
+
+ /* Increment the number of times "1" is returned */
+ i++;
+
+ /* Verify that the index is the correct value */
+ VERIFY(idx, (hsize_t)(i + 10), "H5Literate2");
+ if (idx != (hsize_t)(i + 10))
+ break;
+ if (idx > (NDATASETS + 2))
+ TestErrPrintf("Group iteration function walked too far!\n");
+
+ /* Verify that the correct name is retrieved */
+ if (HDstrncmp(info.name, lnames[(size_t)(idx - 1)], NAMELEN) != 0)
+ TestErrPrintf(
+ "Group iteration function didn't return name correctly for link - lnames[%u] = '%s'!\n",
+ (unsigned)(idx - 1), lnames[(size_t)(idx - 1)]);
+ } /* end while */
+ VERIFY(ret, -1, "H5Literate2");
+
+ if (i != 42 || idx != 52)
+ TestErrPrintf("%u: Group iteration function didn't perform multiple iterations correctly!\n",
+ __LINE__);
+
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free the dataset names */
+ for (i = 0; i < (NDATASETS + 2); i++)
+ HDfree(lnames[i]);
+#endif
+} /* test_iter_group() */
+
+/****************************************************************
+**
+** aiter_cb(): Custom group iteration callback routine.
+**
+****************************************************************/
+herr_t
+aiter_cb(hid_t H5_ATTR_UNUSED group, const char *name, const H5A_info_t H5_ATTR_UNUSED *ainfo, void *op_data)
+{
+ iter_info *info = (iter_info *)op_data;
+ static int count = 0;
+ static int count2 = 0;
+
+ HDstrcpy(info->name, name);
+
+ switch (info->command) {
+ case RET_ZERO:
+ return (0);
+
+ case RET_TWO:
+ return (2);
+
+ case RET_CHANGE:
+ count++;
+ return (count > 10 ? 1 : 0);
+
+ case RET_CHANGE2:
+ count2++;
+ return (count2 > 10 ? 1 : 0);
+
+ default:
+ HDprintf("invalid iteration command");
+ return (-1);
+ } /* end switch */
+} /* end aiter_cb() */
+
+/****************************************************************
+**
+** test_iter_attr(): Test attribute iteration functionality
+**
+****************************************************************/
+static void
+test_iter_attr(hid_t fapl, hbool_t new_format)
+{
+#ifndef NO_ITERATION_RESTART
+ hid_t file; /* File ID */
+ hid_t dataset; /* Common Dataset ID */
+ hid_t filespace; /* Common dataspace ID */
+ hid_t attribute; /* Attribute ID */
+ int i; /* counting variable */
+ hsize_t idx; /* Index in the attribute list */
+ char name[NAMELEN]; /* temporary name buffer */
+ char *anames[NATTR]; /* Names of the attributes created */
+ iter_info info; /* Custom iteration information */
+ herr_t ret; /* Generic return value */
+#else
+ (void)fapl;
+ (void)new_format;
+#endif
+
+ /* Output message about test being performed */
+ MESSAGE(
+ 5,
+ ("Testing Attribute Iteration Functionality - SKIPPED for no due to no iteration restart support\n"));
+#ifndef NO_ITERATION_RESTART
+ HDmemset(&info, 0, sizeof(iter_info));
+
+ /* Create the test file with the datasets */
+ file = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(file, FAIL, "H5Fcreate");
+
+ filespace = H5Screate(H5S_SCALAR);
+ CHECK(filespace, FAIL, "H5Screate");
+
+ dataset = H5Dcreate2(file, "Dataset", H5T_NATIVE_INT, filespace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ for (i = 0; i < NATTR; i++) {
+ HDsnprintf(name, sizeof(name), "Attribute %02d", i);
+ attribute = H5Acreate2(dataset, name, H5T_NATIVE_INT, filespace, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attribute, FAIL, "H5Acreate2");
+
+ /* Keep a copy of the attribute names around for later */
+ anames[i] = HDstrdup(name);
+ CHECK_PTR(anames[i], "strdup");
+
+ ret = H5Aclose(attribute);
+ CHECK(ret, FAIL, "H5Aclose");
+ } /* end for */
+
+ /* Close everything up */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Sclose(filespace);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Iterate through the attributes on the dataset in various ways */
+ file = H5Fopen(DATAFILE, H5F_ACC_RDONLY, fapl);
+ CHECK(file, FAIL, "H5Fopen");
+
+ dataset = H5Dopen2(file, "Dataset", H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Test invalid indices for starting iteration */
+ info.command = RET_ZERO;
+
+ /* Test skipping exactly as many attributes as there are */
+ idx = NATTR;
+ H5E_BEGIN_TRY
+ {
+ ret = H5Aiterate2(dataset, H5_INDEX_NAME, H5_ITER_INC, &idx, aiter_cb, &info);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Aiterate2");
+
+ /* Test skipping more attributes than there are */
+ idx = NATTR + 1;
+ H5E_BEGIN_TRY
+ {
+ ret = H5Aiterate2(dataset, H5_INDEX_NAME, H5_ITER_INC, &idx, aiter_cb, &info);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Aiterate2");
+
+ /* Test all attributes on dataset, when callback always returns 0 */
+ info.command = RET_ZERO;
+ idx = 0;
+ if ((ret = H5Aiterate2(dataset, H5_INDEX_NAME, H5_ITER_INC, &idx, aiter_cb, &info)) > 0)
+ TestErrPrintf("Attribute iteration function didn't return zero correctly!\n");
+
+ /* Test all attributes on dataset, when callback always returns 2 */
+ /* This also tests the "restarting" ability, because the index changes */
+ info.command = RET_TWO;
+ i = 0;
+ idx = 0;
+ while ((ret = H5Aiterate2(dataset, H5_INDEX_NAME, H5_ITER_INC, &idx, aiter_cb, &info)) > 0) {
+ /* Verify return value from iterator gets propagated correctly */
+ VERIFY(ret, 2, "H5Aiterate2");
+
+ /* Increment the number of times "2" is returned */
+ i++;
+
+ /* Verify that the index is the correct value */
+ VERIFY(idx, (unsigned)i, "H5Aiterate2");
+
+ /* Don't check name when new format is used */
+ if (!new_format) {
+ /* Verify that the correct name is retrieved */
+ if (idx > 0) {
+ if (HDstrcmp(info.name, anames[(size_t)idx - 1]) != 0)
+ TestErrPrintf("%u: Attribute iteration function didn't set names correctly, info.name = "
+ "'%s', anames[%u] = '%s'!\n",
+ __LINE__, info.name, (unsigned)(idx - 1), anames[(size_t)idx - 1]);
+ } /* end if */
+ else
+ TestErrPrintf("%u: 'idx' was not set correctly!\n", __LINE__);
+ } /* end if */
+ } /* end while */
+ VERIFY(ret, -1, "H5Aiterate2");
+ if (i != 50 || idx != 50)
+ TestErrPrintf("%u: Attribute iteration function didn't perform multiple iterations correctly!\n",
+ __LINE__);
+
+ /* Test all attributes on dataset, when callback changes return value */
+ /* This also tests the "restarting" ability, because the index changes */
+ info.command = new_format ? RET_CHANGE2 : RET_CHANGE;
+ i = 0;
+ idx = 0;
+ while ((ret = H5Aiterate2(dataset, H5_INDEX_NAME, H5_ITER_INC, &idx, aiter_cb, &info)) > 0) {
+ /* Verify return value from iterator gets propagated correctly */
+ VERIFY(ret, 1, "H5Aiterate2");
+
+ /* Increment the number of times "1" is returned */
+ i++;
+
+ /* Verify that the index is the correct value */
+ VERIFY(idx, (unsigned)i + 10, "H5Aiterate2");
+
+ /* Don't check name when new format is used */
+ if (!new_format) {
+ /* Verify that the correct name is retrieved */
+ if (idx > 0) {
+ if (HDstrcmp(info.name, anames[(size_t)idx - 1]) != 0)
+ TestErrPrintf("%u: Attribute iteration function didn't set names correctly, info.name = "
+ "'%s', anames[%u] = '%s'!\n",
+ __LINE__, info.name, (unsigned)(idx - 1), anames[(size_t)idx - 1]);
+ }
+ else
+ TestErrPrintf("%u: 'idx' was not set correctly!\n", __LINE__);
+ } /* end if */
+ } /* end while */
+ VERIFY(ret, -1, "H5Aiterate2");
+ if (i != 40 || idx != 50)
+ TestErrPrintf("%u: Attribute iteration function didn't perform multiple iterations correctly!\n",
+ __LINE__);
+
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Free the attribute names */
+ for (i = 0; i < NATTR; i++)
+ HDfree(anames[i]);
+#endif
+} /* test_iter_attr() */
+
+/****************************************************************
+**
+** iter_strcmp2(): String comparison routine for qsort
+**
+****************************************************************/
+H5_ATTR_PURE int
+iter_strcmp2(const void *s1, const void *s2)
+{
+ return (HDstrcmp((const char *)s1, (const char *)s2));
+} /* end iter_strcmp2() */
+
+/****************************************************************
+**
+** liter_cb2(): Custom link iteration callback routine.
+**
+****************************************************************/
+#ifndef NO_ITERATION_RESTART
+static herr_t
+liter_cb2(hid_t loc_id, const char *name, const H5L_info2_t H5_ATTR_UNUSED *link_info, void *opdata)
+{
+ const iter_info *test_info = (const iter_info *)opdata;
+ H5O_info2_t oinfo;
+ herr_t ret; /* Generic return value */
+
+ if (HDstrcmp(name, test_info->name) != 0) {
+ TestErrPrintf("name = '%s', test_info = '%s'\n", name, test_info->name);
+ return (H5_ITER_ERROR);
+ } /* end if */
+
+ /*
+ * Get type of the object and check it.
+ */
+ ret = H5Oget_info_by_name3(loc_id, name, &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+
+ if (test_info->type != oinfo.type) {
+ TestErrPrintf("test_info->type = %d, oinfo.type = %d\n", test_info->type, (int)oinfo.type);
+ return (H5_ITER_ERROR);
+ } /* end if */
+
+ return (H5_ITER_STOP);
+} /* liter_cb2() */
+#endif
+
+/****************************************************************
+**
+** test_iter_group_large(): Test group iteration functionality
+** for groups with large #'s of objects
+**
+****************************************************************/
+static void
+test_iter_group_large(hid_t fapl)
+{
+#ifndef NO_ITERATION_RESTART
+ hid_t file; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t group; /* Group ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t tid; /* Datatype ID */
+ hsize_t dims[] = {SPACE1_DIM1};
+ herr_t ret; /* Generic return value */
+ char gname[20]; /* Temporary group name */
+ iter_info *names; /* Names of objects in the root group */
+ iter_info *curr_name; /* Pointer to the current name in the root group */
+ int i;
+
+ /* Compound datatype */
+ typedef struct s1_t {
+ unsigned int a;
+ unsigned int b;
+ float c;
+ } s1_t;
+
+ /* Allocate & initialize array */
+ names = (iter_info *)HDcalloc(sizeof(iter_info), (ITER_NGROUPS + 2));
+ CHECK_PTR(names, "HDcalloc");
+#else
+ (void)fapl;
+#endif
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Large Group Iteration Functionality - SKIPPED for now due to no iteration restart "
+ "support\n"));
+#ifndef NO_ITERATION_RESTART
+ /* Create file */
+ file = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(file, FAIL, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid = H5Screate_simple(SPACE1_RANK, dims, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Create a bunch of groups */
+ for (i = 0; i < ITER_NGROUPS; i++) {
+ HDsnprintf(gname, sizeof(gname), "Group_%d", i);
+
+ /* Add the name to the list of objects in the root group */
+ HDstrcpy(names[i].name, gname);
+ names[i].type = H5O_TYPE_GROUP;
+
+ /* Create a group */
+ group = H5Gcreate2(file, gname, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(group, FAIL, "H5Gcreate2");
+
+ /* Close a group */
+ ret = H5Gclose(group);
+ CHECK(ret, FAIL, "H5Gclose");
+ } /* end for */
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(file, "Dataset1", H5T_STD_U32LE, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Add the name to the list of objects in the root group */
+ HDstrcpy(names[ITER_NGROUPS].name, "Dataset1");
+ names[ITER_NGROUPS].type = H5O_TYPE_DATASET;
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close Dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create a datatype */
+ tid = H5Tcreate(H5T_COMPOUND, sizeof(s1_t));
+ CHECK(tid, FAIL, "H5Tcreate");
+
+ /* Insert fields */
+ ret = H5Tinsert(tid, "a", HOFFSET(s1_t, a), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(tid, "b", HOFFSET(s1_t, b), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(tid, "c", HOFFSET(s1_t, c), H5T_NATIVE_FLOAT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Save datatype for later */
+ ret = H5Tcommit2(file, "Datatype1", tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+
+ /* Add the name to the list of objects in the root group */
+ HDstrcpy(names[ITER_NGROUPS + 1].name, "Datatype1");
+ names[ITER_NGROUPS + 1].type = H5O_TYPE_NAMED_DATATYPE;
+
+ /* Close datatype */
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Need to sort the names in the root group, cause that's what the library does */
+ HDqsort(names, (size_t)(ITER_NGROUPS + 2), sizeof(iter_info), iter_strcmp2);
+
+ /* Iterate through the file to see members of the root group */
+ curr_name = &names[0];
+ ret = H5Literate2(file, H5_INDEX_NAME, H5_ITER_INC, NULL, liter_cb2, curr_name);
+ CHECK(ret, FAIL, "H5Literate2");
+ for (i = 1; i < 100; i++) {
+ hsize_t idx = (hsize_t)i;
+
+ curr_name = &names[i];
+ ret = H5Literate2(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb2, curr_name);
+ CHECK(ret, FAIL, "H5Literate2");
+ } /* end for */
+
+ /* Close file */
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Release memory */
+ HDfree(names);
+#endif
+} /* test_iterate_group_large() */
+
+/****************************************************************
+**
+** test_grp_memb_funcs(): Test group member information
+** functionality
+**
+****************************************************************/
+static void
+test_grp_memb_funcs(hid_t fapl)
+{
+ hid_t file; /* File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t datatype; /* Common datatype ID */
+ hid_t filespace; /* Common dataspace ID */
+ hid_t root_group, grp; /* Root group ID */
+ int i; /* counting variable */
+ char name[NAMELEN]; /* temporary name buffer */
+ char *dnames[NDATASETS + 2]; /* Names of the datasets created */
+ char *obj_names[NDATASETS + 2]; /* Names of the objects in group */
+ char dataset_name[NAMELEN]; /* dataset name */
+ ssize_t name_len; /* Length of object's name */
+ H5G_info_t ginfo; /* Buffer for querying object's info */
+ herr_t ret = SUCCEED; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Group Member Information Functionality\n"));
+
+ /* Create the test file with the datasets */
+ file = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(file, FAIL, "H5Fcreate");
+
+ datatype = H5Tcopy(H5T_NATIVE_INT);
+ CHECK(datatype, FAIL, "H5Tcopy");
+
+ filespace = H5Screate(H5S_SCALAR);
+ CHECK(filespace, FAIL, "H5Screate");
+
+ for (i = 0; i < NDATASETS; i++) {
+ HDsnprintf(name, sizeof(name), "Dataset %d", i);
+ dataset = H5Dcreate2(file, name, datatype, filespace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Keep a copy of the dataset names around for later */
+ dnames[i] = HDstrdup(name);
+ CHECK_PTR(dnames[i], "strdup");
+
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+ } /* end for */
+
+ /* Create a group and named datatype under root group for testing */
+ grp = H5Gcreate2(file, "grp", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Gcreate2");
+
+ dnames[NDATASETS] = HDstrdup("grp");
+ CHECK_PTR(dnames[NDATASETS], "strdup");
+
+ ret = H5Tcommit2(file, "dtype", datatype, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+
+ dnames[NDATASETS + 1] = HDstrdup("dtype");
+ CHECK_PTR(dnames[NDATASETS], "strdup");
+
+ /* Close everything up */
+ ret = H5Tclose(datatype);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ ret = H5Gclose(grp);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Sclose(filespace);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Sort the dataset names */
+ HDqsort(dnames, (size_t)(NDATASETS + 2), sizeof(char *), iter_strcmp);
+
+ /* Iterate through the datasets in the root group in various ways */
+ file = H5Fopen(DATAFILE, H5F_ACC_RDONLY, fapl);
+ CHECK(file, FAIL, "H5Fopen");
+
+ /* These two functions, H5Oget_info_by_idx and H5Lget_name_by_idx, actually
+ * iterate through B-tree for group members in internal library design.
+ */
+ root_group = H5Gopen2(file, "/", H5P_DEFAULT);
+ CHECK(root_group, FAIL, "H5Gopen2");
+
+ ret = H5Gget_info(root_group, &ginfo);
+ CHECK(ret, FAIL, "H5Gget_info");
+ VERIFY(ginfo.nlinks, (NDATASETS + 2), "H5Gget_info");
+
+ for (i = 0; i < (int)ginfo.nlinks; i++) {
+ H5O_info2_t oinfo; /* Object info */
+
+ /* Test with NULL for name, to query length */
+ name_len = H5Lget_name_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, NULL,
+ (size_t)NAMELEN, H5P_DEFAULT);
+ CHECK(name_len, FAIL, "H5Lget_name_by_idx");
+
+ ret = (herr_t)H5Lget_name_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i,
+ dataset_name, (size_t)(name_len + 1), H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lget_name_by_idx");
+
+ /* Double-check that the length is the same */
+ VERIFY(ret, name_len, "H5Lget_name_by_idx");
+
+ /* Keep a copy of the dataset names around for later */
+ obj_names[i] = HDstrdup(dataset_name);
+ CHECK_PTR(obj_names[i], "strdup");
+
+ ret = H5Oget_info_by_idx3(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, &oinfo,
+ H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_idx3");
+
+ if (!HDstrcmp(dataset_name, "grp"))
+ VERIFY(oinfo.type, H5O_TYPE_GROUP, "H5Lget_name_by_idx");
+ if (!HDstrcmp(dataset_name, "dtype"))
+ VERIFY(oinfo.type, H5O_TYPE_NAMED_DATATYPE, "H5Lget_name_by_idx");
+ if (!HDstrncmp(dataset_name, "Dataset", (size_t)7))
+ VERIFY(oinfo.type, H5O_TYPE_DATASET, "H5Lget_name_by_idx");
+ } /* end for */
+
+ H5E_BEGIN_TRY
+ {
+ ret =
+ (herr_t)H5Lget_name_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)(NDATASETS + 3),
+ dataset_name, (size_t)NAMELEN, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Lget_name_by_idx");
+
+ /* Sort the dataset names */
+ HDqsort(obj_names, (size_t)(NDATASETS + 2), sizeof(char *), iter_strcmp);
+
+ /* Compare object names */
+ for (i = 0; i < (int)ginfo.nlinks; i++) {
+ ret = HDstrcmp(dnames[i], obj_names[i]);
+ VERIFY(ret, 0, "HDstrcmp");
+ } /* end for */
+
+ ret = H5Gclose(root_group);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free the dataset names */
+ for (i = 0; i < (NDATASETS + 2); i++) {
+ HDfree(dnames[i]);
+ HDfree(obj_names[i]);
+ } /* end for */
+} /* test_grp_memb_funcs() */
+
+/****************************************************************
+**
+** test_links(): Test soft and hard link iteration
+**
+****************************************************************/
+static void
+test_links(hid_t fapl)
+{
+ hid_t file; /* File ID */
+ char obj_name[NAMELEN]; /* Names of the object in group */
+ ssize_t name_len; /* Length of object's name */
+ hid_t gid, gid1;
+ H5G_info_t ginfo; /* Buffer for querying object's info */
+ hsize_t i;
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Soft and Hard Link Iteration Functionality\n"));
+
+ /* Create the test file with the datasets */
+ file = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(file, FAIL, "H5Fcreate");
+
+ /* create groups */
+ gid = H5Gcreate2(file, "/g1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gcreate2");
+
+ gid1 = H5Gcreate2(file, "/g1/g1.1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid1, FAIL, "H5Gcreate2");
+
+ /* create soft and hard links to the group "/g1". */
+ ret = H5Lcreate_soft("something", gid, "softlink", H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lcreate_soft");
+
+ ret = H5Lcreate_hard(gid, "/g1", H5L_SAME_LOC, "hardlink", H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lcreate_hard");
+
+ ret = H5Gget_info(gid, &ginfo);
+ CHECK(ret, FAIL, "H5Gget_info");
+ VERIFY(ginfo.nlinks, 3, "H5Gget_info");
+
+ /* Test these two functions, H5Oget_info_by_idx and H5Lget_name_by_idx */
+ for (i = 0; i < ginfo.nlinks; i++) {
+ H5O_info2_t oinfo; /* Object info */
+ H5L_info2_t linfo; /* Link info */
+
+ /* Get link name */
+ name_len = H5Lget_name_by_idx(gid, ".", H5_INDEX_NAME, H5_ITER_INC, i, obj_name, (size_t)NAMELEN,
+ H5P_DEFAULT);
+ CHECK(name_len, FAIL, "H5Lget_name_by_idx");
+
+ /* Get link type */
+ ret = H5Lget_info_by_idx2(gid, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, &linfo, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lget_info_by_idx2");
+
+ /* Get object type */
+ if (linfo.type == H5L_TYPE_HARD) {
+ ret = H5Oget_info_by_idx3(gid, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, &oinfo,
+ H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_idx3");
+ } /* end if */
+
+ if (!HDstrcmp(obj_name, "g1.1"))
+ VERIFY(oinfo.type, H5O_TYPE_GROUP, "H5Lget_name_by_idx");
+ else if (!HDstrcmp(obj_name, "hardlink"))
+ VERIFY(oinfo.type, H5O_TYPE_GROUP, "H5Lget_name_by_idx");
+ else if (!HDstrcmp(obj_name, "softlink"))
+ VERIFY(linfo.type, H5L_TYPE_SOFT, "H5Lget_name_by_idx");
+ else
+ ERROR("unknown object name");
+ } /* end for */
+
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Gclose(gid1);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_links() */
+
+/*-------------------------------------------------------------------------
+ * Function: find_err_msg_cb
+ *
+ * Purpose: Callback function to find the given error message.
+ * Helper function for test_corrupted_attnamelen().
+ *
+ * Return: H5_ITER_STOP when the message is found
+ * H5_ITER_CONT, otherwise
+ *
+ *-------------------------------------------------------------------------
+ */
+#if 0
+static int
+find_err_msg_cb(unsigned H5_ATTR_UNUSED n, const H5E_error2_t *err_desc, void *_client_data)
+{
+ int status = H5_ITER_CONT;
+ searched_err_t *searched_err = (searched_err_t *)_client_data;
+
+ if (searched_err == NULL)
+ return H5_ITER_ERROR;
+
+ /* If the searched error message is found, stop the iteration */
+ if (err_desc->desc != NULL && HDstrcmp(err_desc->desc, searched_err->message) == 0) {
+ searched_err->found = TRUE;
+ status = H5_ITER_STOP;
+ }
+
+ return status;
+} /* end find_err_msg_cb() */
+#endif
+
+/**************************************************************************
+**
+** test_corrupted_attnamelen(): Test the fix for the JIRA issue HDFFV-10588,
+** where corrupted attribute's name length can be
+** detected and invalid read can be avoided.
+**
+**************************************************************************/
+#if 0
+static void
+test_corrupted_attnamelen(void)
+{
+ hid_t fid = -1; /* File ID */
+ hid_t did = -1; /* Dataset ID */
+ searched_err_t err_caught; /* Data to be passed to callback func */
+ int err_status; /* Status returned by H5Aiterate2 */
+ herr_t ret; /* Return value */
+ hbool_t driver_is_default_compatible;
+ const char *testfile = H5_get_srcdir_filename(CORRUPTED_ATNAMELEN_FILE); /* Corrected test file name */
+
+ const char *err_message = "attribute name has different length than stored length";
+ /* the error message produced when the failure occurs */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing the Handling of Corrupted Attribute's Name Length\n"));
+
+ ret = h5_driver_is_default_vfd_compatible(H5P_DEFAULT, &driver_is_default_compatible);
+ CHECK(ret, FAIL, "h5_driver_is_default_vfd_compatible");
+
+ if (!driver_is_default_compatible) {
+ HDprintf("-- SKIPPED --\n");
+ return;
+ }
+
+ fid = H5Fopen(testfile, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Open the dataset */
+ did = H5Dopen2(fid, DSET_NAME, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen2");
+
+ /* Call H5Aiterate2 to trigger the failure in HDFFV-10588. Failure should
+ occur in the decoding stage, so some arguments are not needed. */
+ err_status = H5Aiterate2(did, H5_INDEX_NAME, H5_ITER_INC, NULL, NULL, NULL);
+ VERIFY(err_status, FAIL, "H5Aiterate2");
+
+ /* Make sure the intended error was caught */
+ if (err_status == -1) {
+ /* Initialize client data */
+ HDstrcpy(err_caught.message, err_message);
+ err_caught.found = FALSE;
+
+ /* Look for the correct error message */
+ ret = H5Ewalk2(H5E_DEFAULT, H5E_WALK_UPWARD, find_err_msg_cb, &err_caught);
+ CHECK(ret, FAIL, "H5Ewalk2");
+
+ /* Fail if the indicated message is not found */
+ CHECK(err_caught.found, FALSE, "test_corrupted_attnamelen: Expected error not found");
+ }
+
+ /* Close the dataset and file */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* test_corrupted_attnamelen() */
+#endif
+
+#if 0
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+/****************************************************************
+**
+** test_links_deprec(): Test soft and hard link iteration
+**
+****************************************************************/
+static void
+test_links_deprec(hid_t fapl)
+{
+ hid_t file; /* File ID */
+ char obj_name[NAMELEN]; /* Names of the object in group */
+ ssize_t name_len; /* Length of object's name */
+ hid_t gid, gid1;
+ H5G_info_t ginfo; /* Buffer for querying object's info */
+ hsize_t i;
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Soft and Hard Link Iteration Functionality Using Deprecated Routines\n"));
+
+ /* Create the test file with the datasets */
+ file = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(file, FAIL, "H5Fcreate");
+
+ /* create groups */
+ gid = H5Gcreate2(file, "/g1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gcreate2");
+
+ gid1 = H5Gcreate2(file, "/g1/g1.1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid1, FAIL, "H5Gcreate2");
+
+ /* create soft and hard links to the group "/g1". */
+ ret = H5Lcreate_soft("something", gid, "softlink", H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lcreate_soft");
+
+ ret = H5Lcreate_hard(gid, "/g1", H5L_SAME_LOC, "hardlink", H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lcreate_hard");
+
+ ret = H5Gget_info(gid, &ginfo);
+ CHECK(ret, FAIL, "H5Gget_info");
+ VERIFY(ginfo.nlinks, 3, "H5Gget_info");
+
+ /* Test these two functions, H5Oget_info_by_idx and H5Lget_name_by_idx */
+ for (i = 0; i < ginfo.nlinks; i++) {
+ H5O_info2_t oinfo; /* Object info */
+ H5L_info2_t linfo; /* Link info */
+
+ /* Get link name */
+ name_len = H5Lget_name_by_idx(gid, ".", H5_INDEX_NAME, H5_ITER_INC, i, obj_name, (size_t)NAMELEN,
+ H5P_DEFAULT);
+ CHECK(name_len, FAIL, "H5Lget_name_by_idx");
+
+ /* Get link type */
+ ret = H5Lget_info_by_idx2(gid, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, &linfo, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lget_info_by_idx1");
+
+ /* Get object type */
+ if (linfo.type == H5L_TYPE_HARD) {
+ ret = H5Oget_info_by_idx3(gid, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, &oinfo,
+ H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_idx");
+ } /* end if */
+
+ if (!HDstrcmp(obj_name, "g1.1"))
+ VERIFY(oinfo.type, H5O_TYPE_GROUP, "H5Lget_name_by_idx");
+ else if (!HDstrcmp(obj_name, "hardlink"))
+ VERIFY(oinfo.type, H5O_TYPE_GROUP, "H5Lget_name_by_idx");
+ else if (!HDstrcmp(obj_name, "softlink"))
+ VERIFY(linfo.type, H5L_TYPE_SOFT, "H5Lget_name_by_idx");
+ else
+ ERROR("unknown object name");
+ } /* end for */
+
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Gclose(gid1);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_links_deprec() */
+#endif
+#endif
+
+/****************************************************************
+**
+** test_iterate(): Main iteration testing routine.
+**
+****************************************************************/
+void
+test_iterate(void)
+{
+ hid_t fapl, fapl2; /* File access property lists */
+ unsigned new_format; /* Whether to use the new format or not */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Iteration Operations\n"));
+
+ /* Get the default FAPL */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+
+ /* Copy the file access property list */
+ fapl2 = H5Pcopy(fapl);
+ CHECK(fapl2, FAIL, "H5Pcopy");
+
+ /* Set the "use the latest version of the format" bounds for creating objects in the file */
+ ret = H5Pset_libver_bounds(fapl2, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST);
+ CHECK(ret, FAIL, "H5Pset_libver_bounds");
+
+ /* These next tests use the same file */
+ for (new_format = FALSE; new_format <= TRUE; new_format++) {
+ test_iter_group(new_format ? fapl2 : fapl, new_format); /* Test group iteration */
+ test_iter_group_large(new_format ? fapl2 : fapl); /* Test group iteration for large # of objects */
+ test_iter_attr(new_format ? fapl2 : fapl, new_format); /* Test attribute iteration */
+ test_grp_memb_funcs(new_format ? fapl2 : fapl); /* Test group member information functions */
+ test_links(new_format ? fapl2 : fapl); /* Test soft and hard link iteration */
+#if 0
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+ test_links_deprec(new_format ? fapl2 : fapl); /* Test soft and hard link iteration */
+#endif
+#endif
+ } /* end for */
+#if 0
+ /* Test the fix for issue HDFFV-10588 */
+ test_corrupted_attnamelen();
+#endif
+ /* Close FAPLs */
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(fapl2);
+ CHECK(ret, FAIL, "H5Pclose");
+} /* test_iterate() */
+
+/*-------------------------------------------------------------------------
+ * Function: cleanup_iterate
+ *
+ * Purpose: Cleanup temporary test files
+ *
+ * Return: none
+ *
+ * Programmer: Quincey Koziol
+ * April 5, 2000
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+cleanup_iterate(void)
+{
+ H5Fdelete(DATAFILE, H5P_DEFAULT);
+}
diff --git a/test/API/tmisc.c b/test/API/tmisc.c
new file mode 100644
index 0000000..d35a00b
--- /dev/null
+++ b/test/API/tmisc.c
@@ -0,0 +1,6349 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/***********************************************************
+ *
+ * Test program: tmisc
+ *
+ * Test miscellaneous features not tested elsewhere. Generally
+ * regression tests for bugs that are reported and don't
+ * have an existing test to add them to.
+ *
+ *************************************************************/
+
+#define H5D_FRIEND /*suppress error about including H5Dpkg */
+
+/* Define this macro to indicate that the testing APIs should be available */
+#define H5D_TESTING
+
+#include "testhdf5.h"
+/* #include "H5srcdir.h" */
+/* #include "H5Dpkg.h" */ /* Datasets */
+/* #include "H5MMprivate.h" */ /* Memory */
+
+/* Definitions for misc. test #1 */
+#define MISC1_FILE "tmisc1.h5"
+#define MISC1_VAL (13417386) /* 0xccbbaa */
+#define MISC1_VAL2 (15654348) /* 0xeeddcc */
+#define MISC1_DSET_NAME "/scalar_set"
+
+/* Definitions for misc. test #2 */
+#define MISC2_FILE_1 "tmisc2a.h5"
+#define MISC2_FILE_2 "tmisc2b.h5"
+#define MISC2_ATT_NAME_1 "scalar_att_1"
+#define MISC2_ATT_NAME_2 "scalar_att_2"
+
+typedef struct {
+ char *string;
+} misc2_struct;
+
+/* Definitions for misc. test #3 */
+#define MISC3_FILE "tmisc3.h5"
+#define MISC3_RANK 2
+#define MISC3_DIM1 6
+#define MISC3_DIM2 6
+#define MISC3_CHUNK_DIM1 2
+#define MISC3_CHUNK_DIM2 2
+#define MISC3_FILL_VALUE 2
+#define MISC3_DSET_NAME "/chunked"
+
+/* Definitions for misc. test #4 */
+#define MISC4_FILE_1 "tmisc4a.h5"
+#define MISC4_FILE_2 "tmisc4b.h5"
+#define MISC4_GROUP_1 "/Group1"
+#define MISC4_GROUP_2 "/Group2"
+
+/* Definitions for misc. test #5 */
+#define MISC5_FILE "tmisc5.h5"
+#define MISC5_DSETNAME "dset1"
+#define MISC5_DSETRANK 1
+#define MISC5_NELMTOPLVL 1
+#define MISC5_DBGNELM1 2
+#define MISC5_DBGNELM2 1
+#define MISC5_DBGNELM3 1
+#define MISC5_DBGELVAL1 999999999
+#define MISC5_DBGELVAL2 888888888
+#define MISC5_DBGELVAL3 777777777
+
+typedef struct {
+ int st1_el1;
+ hvl_t st1_el2;
+} misc5_struct1;
+
+typedef struct {
+ int st2_el1;
+ hvl_t st2_el2;
+} misc5_struct2;
+
+typedef struct {
+ int st3_el1;
+} misc5_struct3;
+
+typedef struct {
+ hid_t st3h_base;
+ hid_t st3h_id;
+} misc5_struct3_hndl;
+
+typedef struct {
+ hid_t st2h_base;
+ hid_t st2h_id;
+ misc5_struct3_hndl *st2h_st3hndl;
+} misc5_struct2_hndl;
+
+typedef struct {
+ hid_t st1h_base;
+ hid_t st1h_id;
+ misc5_struct2_hndl *st1h_st2hndl;
+} misc5_struct1_hndl;
+
+/* Definitions for misc. test #6 */
+#define MISC6_FILE "tmisc6.h5"
+#define MISC6_DSETNAME1 "dset1"
+#define MISC6_DSETNAME2 "dset2"
+#define MISC6_NUMATTR 16
+
+/* Definitions for misc. test #7 */
+#define MISC7_FILE "tmisc7.h5"
+#define MISC7_DSETNAME1 "Dataset1"
+#define MISC7_DSETNAME2 "Dataset2"
+#define MISC7_TYPENAME1 "Datatype1"
+#define MISC7_TYPENAME2 "Datatype2"
+
+/* Definitions for misc. test #8 */
+#define MISC8_FILE "tmisc8.h5"
+#define MISC8_DSETNAME1 "Dataset1"
+#define MISC8_DSETNAME4 "Dataset4"
+#define MISC8_DSETNAME5 "Dataset5"
+#define MISC8_DSETNAME8 "Dataset8"
+
+#ifndef H5_HAVE_PARALLEL
+#define MISC8_DSETNAME2 "Dataset2"
+#define MISC8_DSETNAME3 "Dataset3"
+#define MISC8_DSETNAME6 "Dataset6"
+#define MISC8_DSETNAME7 "Dataset7"
+#define MISC8_DSETNAME9 "Dataset9"
+#define MISC8_DSETNAME10 "Dataset10"
+#endif
+
+#define MISC8_RANK 2
+#define MISC8_DIM0 50
+#define MISC8_DIM1 50
+#define MISC8_CHUNK_DIM0 10
+#define MISC8_CHUNK_DIM1 10
+
+/* Definitions for misc. test #9 */
+#define MISC9_FILE "tmisc9.h5"
+
+/* Definitions for misc. test #10 */
+#define MISC10_FILE_OLD "tmtimeo.h5"
+#define MISC10_FILE_NEW "tmisc10.h5"
+#define MISC10_DSETNAME "Dataset1"
+
+/* Definitions for misc. test #11 */
+#define MISC11_FILE "tmisc11.h5"
+#define MISC11_USERBLOCK 1024
+#define MISC11_SIZEOF_OFF 4
+#define MISC11_SIZEOF_LEN 4
+#define MISC11_SYM_LK 8
+#define MISC11_SYM_IK 32
+#define MISC11_ISTORE_IK 64
+#define MISC11_NINDEXES 1
+
+/* Definitions for misc. test #12 */
+#define MISC12_FILE "tmisc12.h5"
+#define MISC12_DSET_NAME "Dataset"
+#define MISC12_SPACE1_RANK 1
+#define MISC12_SPACE1_DIM1 4
+#define MISC12_CHUNK_SIZE 2
+#define MISC12_APPEND_SIZE 5
+
+/* Definitions for misc. test #13 */
+#define MISC13_FILE_1 "tmisc13a.h5"
+#define MISC13_FILE_2 "tmisc13b.h5"
+#define MISC13_DSET1_NAME "Dataset1"
+#define MISC13_DSET2_NAME "Dataset2"
+#define MISC13_DSET3_NAME "Dataset3"
+#define MISC13_GROUP1_NAME "Group1"
+#define MISC13_GROUP2_NAME "Group2"
+#define MISC13_DTYPE_NAME "Datatype"
+#define MISC13_RANK 1
+#define MISC13_DIM1 600
+#define MISC13_CHUNK_DIM1 10
+#define MISC13_USERBLOCK_SIZE 512
+#define MISC13_COPY_BUF_SIZE 4096
+
+/* Definitions for misc. test #14 */
+#define MISC14_FILE "tmisc14.h5"
+#define MISC14_DSET1_NAME "Dataset1"
+#define MISC14_DSET2_NAME "Dataset2"
+#define MISC14_DSET3_NAME "Dataset3"
+#define MISC14_METADATA_SIZE 4096
+
+/* Definitions for misc. test #15 */
+#define MISC15_FILE "tmisc15.h5"
+#define MISC15_BUF_SIZE 1024
+
+/* Definitions for misc. test #16 */
+#define MISC16_FILE "tmisc16.h5"
+#define MISC16_SPACE_DIM 4
+#define MISC16_SPACE_RANK 1
+#define MISC16_STR_SIZE 8
+#define MISC16_DSET_NAME "Dataset"
+
+/* Definitions for misc. test #17 */
+#define MISC17_FILE "tmisc17.h5"
+#define MISC17_SPACE_RANK 2
+#define MISC17_SPACE_DIM1 4
+#define MISC17_SPACE_DIM2 8
+#define MISC17_DSET_NAME "Dataset"
+
+/* Definitions for misc. test #18 */
+#define MISC18_FILE "tmisc18.h5"
+#define MISC18_DSET1_NAME "Dataset1"
+#define MISC18_DSET2_NAME "Dataset2"
+
+/* Definitions for misc. test #19 */
+#define MISC19_FILE "tmisc19.h5"
+#define MISC19_DSET_NAME "Dataset"
+#define MISC19_ATTR_NAME "Attribute"
+#define MISC19_GROUP_NAME "Group"
+
+/* Definitions for misc. test #20 */
+#define MISC20_FILE "tmisc20.h5"
+#define MISC20_FILE_OLD "tlayouto.h5"
+#define MISC20_DSET_NAME "Dataset"
+#define MISC20_DSET2_NAME "Dataset2"
+#define MISC20_SPACE_RANK 2
+/* Make sure the product of the following 2 does not get too close to */
+/* 64 bits, risking an overflow. */
+#define MISC20_SPACE_DIM0 (8 * 1024 * 1024 * (uint64_t)1024)
+#define MISC20_SPACE_DIM1 ((256 * 1024 * (uint64_t)1024) + 1)
+#define MISC20_SPACE2_DIM0 8
+#define MISC20_SPACE2_DIM1 4
+
+#if defined(H5_HAVE_FILTER_SZIP) && !defined(H5_API_TEST_NO_FILTERS)
+/* Definitions for misc. test #21 */
+#define MISC21_FILE "tmisc21.h5"
+#define MISC21_DSET_NAME "Dataset"
+#define MISC21_SPACE_RANK 2
+#define MISC21_SPACE_DIM0 7639
+#define MISC21_SPACE_DIM1 6308
+#define MISC21_CHUNK_DIM0 2048
+#define MISC21_CHUNK_DIM1 2048
+
+/* Definitions for misc. test #22 */
+#define MISC22_FILE "tmisc22.h5"
+#define MISC22_DSET_NAME "Dataset"
+#define MISC22_SPACE_RANK 2
+#define MISC22_CHUNK_DIM0 512
+#define MISC22_CHUNK_DIM1 512
+#define MISC22_SPACE_DIM0 639
+#define MISC22_SPACE_DIM1 1308
+#endif /* H5_HAVE_FILTER_SZIP */
+
+/* Definitions for misc. test #23 */
+#define MISC23_FILE "tmisc23.h5"
+#define MISC23_NAME_BUF_SIZE 40
+
+/* Definitions for misc. test #24 */
+#define MISC24_FILE "tmisc24.h5"
+#define MISC24_GROUP_NAME "group"
+#define MISC24_GROUP_LINK "group_link"
+#define MISC24_DATASET_NAME "dataset"
+#define MISC24_DATASET_LINK "dataset_link"
+#define MISC24_DATATYPE_NAME "datatype"
+#define MISC24_DATATYPE_LINK "datatype_link"
+
+/* Definitions for misc. test #25 'a', 'b' & 'c' */
+#define MISC25A_FILE "foo.h5"
+#define MISC25A_GROUP0_NAME "grp0"
+#define MISC25A_GROUP1_NAME "/grp0/grp1"
+#define MISC25A_GROUP2_NAME "/grp0/grp2"
+#define MISC25A_GROUP3_NAME "/grp0/grp3"
+#define MISC25A_ATTR1_NAME "_long attribute_"
+#define MISC25A_ATTR1_LEN 11
+#define MISC25A_ATTR2_NAME "_short attr__"
+#define MISC25A_ATTR2_LEN 11
+#define MISC25A_ATTR3_NAME "_short attr__"
+#define MISC25A_ATTR3_LEN 1
+#define MISC25B_FILE "mergemsg.h5"
+#define MISC25B_GROUP "grp1"
+#define MISC25C_FILE "nc4_rename.h5"
+#define MISC25C_DSETNAME "da"
+#define MISC25C_DSETNAME2 "dz"
+#define MISC25C_DSETGRPNAME "ga"
+#define MISC25C_GRPNAME "gb"
+#define MISC25C_GRPNAME2 "gc"
+#define MISC25C_ATTRNAME "aa"
+#define MISC25C_ATTRNAME2 "ab"
+
+/* Definitions for misc. test #26 */
+#define MISC26_FILE "dcpl_file"
+
+/* Definitions for misc. test #27 */
+/* (Note that this test file is generated by the "gen_bad_ohdr.c" code) */
+#define MISC27_FILE "tbad_msg_count.h5"
+#define MISC27_GROUP "Group"
+
+/* Definitions for misc. test #28 */
+#define MISC28_FILE "tmisc28.h5"
+#define MISC28_SIZE 10
+#define MISC28_NSLOTS 10000
+
+/* Definitions for misc. test #29 */
+#define MISC29_ORIG_FILE "specmetaread.h5"
+#define MISC29_COPY_FILE "tmisc29.h5"
+#define MISC29_DSETNAME "dset2"
+
+/* Definitions for misc. test #30 */
+#define MISC30_FILE "tmisc30.h5"
+
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+/* Definitions for misc. test #31 */
+#define MISC31_FILE "tmisc31.h5"
+#define MISC31_DSETNAME "dset"
+#define MISC31_ATTRNAME1 "attr1"
+#define MISC31_ATTRNAME2 "attr2"
+#define MISC31_GROUPNAME "group"
+#define MISC31_PROPNAME "misc31_prop"
+#define MISC31_DTYPENAME "dtype"
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
+
+/* Definitions for misc. test #33 */
+/* Note that this test file is generated by "gen_bad_offset.c" */
+/* and bad offset values are written to that file for testing */
+#define MISC33_FILE "bad_offset.h5"
+
+/* Definitions for misc. test #35 */
+#define MISC35_SPACE_RANK 3
+#define MISC35_SPACE_DIM1 3
+#define MISC35_SPACE_DIM2 15
+#define MISC35_SPACE_DIM3 13
+#define MISC35_NPOINTS 10
+
+/* Definitions for misc. test #37 */
+/* The test file is formerly named h5_nrefs_POC.
+ See https://nvd.nist.gov/vuln/detail/CVE-2020-10812 */
+#define CVE_2020_10812_FILENAME "cve_2020_10812.h5"
+
+#if defined(H5_HAVE_FILTER_SZIP) && !defined(H5_API_TEST_NO_FILTERS)
+/*-------------------------------------------------------------------------
+ * Function: h5_szip_can_encode
+ *
+ * Purpose: Retrieve the filter config flags for szip, tell if
+ * encoder is available.
+ *
+ * Return: 1: decode+encode is enabled
+ * 0: only decode is enabled
+ * -1: other
+ *
+ * Programmer:
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+int
+h5_szip_can_encode(void)
+{
+ unsigned int filter_config_flags;
+
+ H5Zget_filter_info(H5Z_FILTER_SZIP, &filter_config_flags);
+ if ((filter_config_flags & (H5Z_FILTER_CONFIG_ENCODE_ENABLED | H5Z_FILTER_CONFIG_DECODE_ENABLED)) == 0) {
+ /* filter present but neither encode nor decode is supported (???) */
+ return -1;
+ }
+ else if ((filter_config_flags & (H5Z_FILTER_CONFIG_ENCODE_ENABLED | H5Z_FILTER_CONFIG_DECODE_ENABLED)) ==
+ H5Z_FILTER_CONFIG_DECODE_ENABLED) {
+ /* decoder only: read but not write */
+ return 0;
+ }
+ else if ((filter_config_flags & (H5Z_FILTER_CONFIG_ENCODE_ENABLED | H5Z_FILTER_CONFIG_DECODE_ENABLED)) ==
+ H5Z_FILTER_CONFIG_ENCODE_ENABLED) {
+ /* encoder only: write but not read (???) */
+ return -1;
+ }
+ else if ((filter_config_flags & (H5Z_FILTER_CONFIG_ENCODE_ENABLED | H5Z_FILTER_CONFIG_DECODE_ENABLED)) ==
+ (H5Z_FILTER_CONFIG_ENCODE_ENABLED | H5Z_FILTER_CONFIG_DECODE_ENABLED)) {
+ return 1;
+ }
+ return (-1);
+}
+#endif /* H5_HAVE_FILTER_SZIP */
+
+/****************************************************************
+**
+** test_misc1(): test unlinking a dataset from a group and immediately
+** re-using the dataset name
+**
+****************************************************************/
+static void
+test_misc1(void)
+{
+ int i;
+ int i_check;
+ hid_t file, dataspace, dataset;
+ herr_t ret;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Unlinking Dataset and Re-creating It\n"));
+
+ file = H5Fcreate(MISC1_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file, FAIL, "H5Fcreate");
+
+ dataspace = H5Screate(H5S_SCALAR);
+ CHECK(dataspace, FAIL, "H5Screate");
+
+ /* Write the dataset the first time. */
+ dataset =
+ H5Dcreate2(file, MISC1_DSET_NAME, H5T_NATIVE_INT, dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ i = MISC1_VAL;
+ ret = H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &i);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Remove the dataset. */
+ ret = H5Ldelete(file, MISC1_DSET_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ /* Write the dataset for the second time with a different value. */
+ dataset =
+ H5Dcreate2(file, MISC1_DSET_NAME, H5T_NATIVE_INT, dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ i = MISC1_VAL2;
+ ret = H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &i);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Sclose(dataspace);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Now, check the value written to the dataset, after it was re-created */
+ file = H5Fopen(MISC1_FILE, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(file, FAIL, "H5Fopen");
+
+ dataspace = H5Screate(H5S_SCALAR);
+ CHECK(dataspace, FAIL, "H5Screate");
+
+ dataset = H5Dopen2(file, MISC1_DSET_NAME, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ ret = H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &i_check);
+ CHECK(ret, FAIL, "H5Dread");
+ VERIFY(i_check, MISC1_VAL2, "H5Dread");
+
+ ret = H5Sclose(dataspace);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* end test_misc1() */
+
+static hid_t
+misc2_create_type(void)
+{
+ hid_t type, type_tmp;
+ herr_t ret;
+
+ type_tmp = H5Tcopy(H5T_C_S1);
+ CHECK(type_tmp, FAIL, "H5Tcopy");
+
+ ret = H5Tset_size(type_tmp, H5T_VARIABLE);
+ CHECK(ret, FAIL, "H5Tset_size");
+
+ type = H5Tcreate(H5T_COMPOUND, sizeof(misc2_struct));
+ CHECK(type, FAIL, "H5Tcreate");
+
+ ret = H5Tinsert(type, "string", offsetof(misc2_struct, string), type_tmp);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tclose(type_tmp);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ return type;
+}
+
+static void
+test_misc2_write_attribute(void)
+{
+ hid_t file1, file2, root1, root2, dataspace, att1, att2;
+ hid_t type;
+ herr_t ret;
+ misc2_struct data, data_check;
+ char *string_att1 = HDstrdup("string attribute in file one");
+ char *string_att2 = HDstrdup("string attribute in file two");
+
+ HDmemset(&data, 0, sizeof(data));
+ HDmemset(&data_check, 0, sizeof(data_check));
+
+ type = misc2_create_type();
+
+ dataspace = H5Screate(H5S_SCALAR);
+ CHECK(dataspace, FAIL, "H5Screate");
+
+ file2 = H5Fcreate(MISC2_FILE_2, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file2, FAIL, "H5Fcreate");
+
+ file1 = H5Fcreate(MISC2_FILE_1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file1, FAIL, "H5Fcreate");
+
+ root1 = H5Gopen2(file1, "/", H5P_DEFAULT);
+ CHECK(root1, FAIL, "H5Gopen2");
+
+ att1 = H5Acreate2(root1, MISC2_ATT_NAME_1, type, dataspace, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(att1, FAIL, "H5Acreate2");
+
+ data.string = string_att1;
+
+ ret = H5Awrite(att1, type, &data);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ ret = H5Aread(att1, type, &data_check);
+ CHECK(ret, FAIL, "H5Aread");
+
+ ret = H5Treclaim(type, dataspace, H5P_DEFAULT, &data_check);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ ret = H5Aclose(att1);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ ret = H5Gclose(root1);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Fclose(file1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ root2 = H5Gopen2(file2, "/", H5P_DEFAULT);
+ CHECK(root2, FAIL, "H5Gopen2");
+
+ att2 = H5Acreate2(root2, MISC2_ATT_NAME_2, type, dataspace, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(att2, FAIL, "H5Acreate2");
+
+ data.string = string_att2;
+
+ ret = H5Awrite(att2, type, &data);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ ret = H5Aread(att2, type, &data_check);
+ CHECK(ret, FAIL, "H5Aread");
+
+ ret = H5Treclaim(type, dataspace, H5P_DEFAULT, &data_check);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ ret = H5Aclose(att2);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ ret = H5Gclose(root2);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Tclose(type);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ ret = H5Sclose(dataspace);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Fclose(file2);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ HDfree(string_att1);
+ HDfree(string_att2);
+}
+
+static void
+test_misc2_read_attribute(const char *filename, const char *att_name)
+{
+ hid_t file, root, att;
+ hid_t type;
+ hid_t space;
+ herr_t ret;
+ misc2_struct data_check;
+
+ type = misc2_create_type();
+
+ file = H5Fopen(filename, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(file, FAIL, "H5Fopen");
+
+ root = H5Gopen2(file, "/", H5P_DEFAULT);
+ CHECK(root, FAIL, "H5Gopen2");
+
+ att = H5Aopen(root, att_name, H5P_DEFAULT);
+ CHECK(att, FAIL, "H5Aopen");
+
+ space = H5Aget_space(att);
+ CHECK(space, FAIL, "H5Aget_space");
+
+ ret = H5Aread(att, type, &data_check);
+ CHECK(ret, FAIL, "H5Aread");
+
+ ret = H5Treclaim(type, space, H5P_DEFAULT, &data_check);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ ret = H5Sclose(space);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Aclose(att);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ ret = H5Tclose(type);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ ret = H5Gclose(root);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+}
+/****************************************************************
+**
+** test_misc2(): test using the same VL-derived datatype in two
+** different files, which was causing problems with the
+** datatype conversion functions
+**
+****************************************************************/
+static void
+test_misc2(void)
+{
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing VL datatype in two different files\n"));
+
+ test_misc2_write_attribute();
+ test_misc2_read_attribute(MISC2_FILE_1, MISC2_ATT_NAME_1);
+ test_misc2_read_attribute(MISC2_FILE_2, MISC2_ATT_NAME_2);
+} /* end test_misc2() */
+
+/****************************************************************
+**
+** test_misc3(): Test reading from chunked dataset with non-zero
+** fill value
+**
+****************************************************************/
+static void
+test_misc3(void)
+{
+ hid_t file, dataspace, dataset, dcpl;
+ int rank = MISC3_RANK;
+ hsize_t dims[MISC3_RANK] = {MISC3_DIM1, MISC3_DIM2};
+ hsize_t chunk_dims[MISC3_RANK] = {MISC3_CHUNK_DIM1, MISC3_CHUNK_DIM2};
+ int fill = MISC3_FILL_VALUE;
+ int read_buf[MISC3_DIM1][MISC3_DIM2];
+ int i, j;
+ herr_t ret;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing reading from chunked dataset with non-zero fill-value\n"));
+
+ file = H5Fcreate(MISC3_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file, FAIL, "H5Fcreate");
+
+ /* Create a simple dataspace */
+ dataspace = H5Screate_simple(rank, dims, NULL);
+ CHECK(dataspace, FAIL, "H5Screate_simple");
+
+ /* Create a dataset creation property list */
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+
+ /* Set the chunk information */
+ ret = H5Pset_chunk(dcpl, rank, chunk_dims);
+ CHECK(dcpl, FAIL, "H5Pset_chunk");
+
+ /* Set the fill-value information */
+ ret = H5Pset_fill_value(dcpl, H5T_NATIVE_INT, &fill);
+ CHECK(dcpl, FAIL, "H5Pset_fill_value");
+
+ /* Create the dataset */
+ dataset = H5Dcreate2(file, MISC3_DSET_NAME, H5T_NATIVE_INT, dataspace, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Read from the dataset (should be fill-values) */
+ ret = H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &read_buf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ for (i = 0; i < MISC3_DIM1; i++)
+ for (j = 0; j < MISC3_DIM2; j++)
+ VERIFY(read_buf[i][j], fill, "H5Dread");
+
+ /* Release resources */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ ret = H5Sclose(dataspace);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_misc3() */
+
+/****************************************************************
+**
+** test_misc4(): Test the that 'fileno' field in H5O_info_t is
+** valid.
+**
+****************************************************************/
+static void
+test_misc4(void)
+{
+ hid_t file1, file2, group1, group2, group3;
+ H5O_info2_t oinfo1, oinfo2, oinfo3;
+ herr_t ret;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing fileno working in H5O_info2_t\n"));
+
+ file1 = H5Fcreate(MISC4_FILE_1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file1, FAIL, "H5Fcreate");
+
+ /* Create the first group */
+ group1 = H5Gcreate2(file1, MISC4_GROUP_1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(group1, FAIL, "H5Gcreate2");
+
+ /* Create the second group */
+ group2 = H5Gcreate2(file1, MISC4_GROUP_2, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(group2, FAIL, "H5Gcreate2");
+
+ file2 = H5Fcreate(MISC4_FILE_2, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file2, FAIL, "H5Fcreate");
+
+ /* Create the first group */
+ group3 = H5Gcreate2(file2, MISC4_GROUP_1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(group3, FAIL, "H5Gcreate2");
+
+ /* Get the stat information for each group */
+ ret = H5Oget_info_by_name3(file1, MISC4_GROUP_1, &oinfo1, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+ ret = H5Oget_info_by_name3(file1, MISC4_GROUP_2, &oinfo2, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+ ret = H5Oget_info_by_name3(file2, MISC4_GROUP_1, &oinfo3, H5O_INFO_BASIC, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name3");
+
+ /* Verify that the fileno values are the same for groups from file1 */
+ VERIFY(oinfo1.fileno, oinfo2.fileno, "H5Oget_info_by_name");
+
+ /* Verify that the fileno values are not the same between file1 & file2 */
+ if (oinfo1.fileno == oinfo3.fileno)
+ TestErrPrintf("Error on line %d: oinfo1.fileno != oinfo3.fileno\n", __LINE__);
+ if (oinfo2.fileno == oinfo3.fileno)
+ TestErrPrintf("Error on line %d: oinfo2.fileno != oinfo3.fileno\n", __LINE__);
+
+ /* Close the objects */
+ ret = H5Gclose(group1);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Gclose(group2);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Gclose(group3);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Fclose(file1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ ret = H5Fclose(file2);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_misc4() */
+
+/****************************************************************
+**
+** test_misc5(): Test several level deep nested compound & VL datatypes
+**
+****************************************************************/
+
+/*********************** struct3 ***********************/
+
+static misc5_struct3_hndl *
+create_struct3(void)
+{
+ misc5_struct3_hndl *str3hndl; /* New 'struct3' created */
+ herr_t ret; /* For error checking */
+
+ str3hndl = (misc5_struct3_hndl *)HDmalloc(sizeof(misc5_struct3_hndl));
+ CHECK_PTR(str3hndl, "malloc");
+
+ str3hndl->st3h_base = H5Tcreate(H5T_COMPOUND, sizeof(misc5_struct3));
+ CHECK(str3hndl->st3h_base, FAIL, "H5Tcreate");
+
+ ret = H5Tinsert(str3hndl->st3h_base, "st3_el1", HOFFSET(misc5_struct3, st3_el1), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ str3hndl->st3h_id = H5Tvlen_create(str3hndl->st3h_base);
+ CHECK(str3hndl->st3h_id, FAIL, "H5Tvlen_create");
+
+ return str3hndl;
+}
+
+static void
+delete_struct3(misc5_struct3_hndl *str3hndl)
+{
+ herr_t ret; /* For error checking */
+
+ ret = H5Tclose(str3hndl->st3h_id);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ ret = H5Tclose(str3hndl->st3h_base);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ HDfree(str3hndl);
+}
+
+static void
+set_struct3(misc5_struct3 *buf)
+{
+ buf->st3_el1 = MISC5_DBGELVAL3;
+}
+
+/*********************** struct2 ***********************/
+
+static misc5_struct2_hndl *
+create_struct2(void)
+{
+ misc5_struct2_hndl *str2hndl; /* New 'struct2' created */
+ herr_t ret; /* For error checking */
+
+ str2hndl = (misc5_struct2_hndl *)HDmalloc(sizeof(misc5_struct2_hndl));
+ CHECK_PTR(str2hndl, "HDmalloc");
+
+ str2hndl->st2h_base = H5Tcreate(H5T_COMPOUND, sizeof(misc5_struct2));
+ CHECK(str2hndl->st2h_base, FAIL, "H5Tcreate");
+
+ ret = H5Tinsert(str2hndl->st2h_base, "st2_el1", HOFFSET(misc5_struct2, st2_el1), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ str2hndl->st2h_st3hndl = create_struct3();
+ CHECK_PTR(str2hndl->st2h_st3hndl, "create_struct3");
+
+ ret = H5Tinsert(str2hndl->st2h_base, "st2_el2", HOFFSET(misc5_struct2, st2_el2),
+ str2hndl->st2h_st3hndl->st3h_id);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ str2hndl->st2h_id = H5Tvlen_create(str2hndl->st2h_base);
+ CHECK(str2hndl->st2h_id, FAIL, "H5Tvlen_create");
+
+ return str2hndl;
+}
+
+static void
+delete_struct2(misc5_struct2_hndl *str2hndl)
+{
+ herr_t ret; /* For error checking */
+
+ ret = H5Tclose(str2hndl->st2h_id);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ delete_struct3(str2hndl->st2h_st3hndl);
+
+ H5Tclose(str2hndl->st2h_base);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ HDfree(str2hndl);
+}
+
+static void
+set_struct2(misc5_struct2 *buf)
+{
+ unsigned i; /* Local index variable */
+
+ buf->st2_el1 = MISC5_DBGELVAL2;
+ buf->st2_el2.len = MISC5_DBGNELM3;
+
+ buf->st2_el2.p = HDmalloc((buf->st2_el2.len) * sizeof(misc5_struct3));
+ CHECK_PTR(buf->st2_el2.p, "HDmalloc");
+
+ for (i = 0; i < (buf->st2_el2.len); i++)
+ set_struct3(&(((misc5_struct3 *)(buf->st2_el2.p))[i]));
+}
+
+static void
+clear_struct2(misc5_struct2 *buf)
+{
+ HDfree(buf->st2_el2.p);
+}
+
+/*********************** struct1 ***********************/
+
+static misc5_struct1_hndl *
+create_struct1(void)
+{
+ misc5_struct1_hndl *str1hndl; /* New 'struct1' created */
+ herr_t ret; /* For error checking */
+
+ str1hndl = (misc5_struct1_hndl *)HDmalloc(sizeof(misc5_struct1_hndl));
+ CHECK_PTR(str1hndl, "HDmalloc");
+
+ str1hndl->st1h_base = H5Tcreate(H5T_COMPOUND, sizeof(misc5_struct1));
+ CHECK(str1hndl->st1h_base, FAIL, "H5Tcreate");
+
+ ret = H5Tinsert(str1hndl->st1h_base, "st1_el1", HOFFSET(misc5_struct1, st1_el1), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ str1hndl->st1h_st2hndl = create_struct2();
+ CHECK_PTR(str1hndl->st1h_st2hndl, "create_struct2");
+
+ ret = H5Tinsert(str1hndl->st1h_base, "st1_el2", HOFFSET(misc5_struct1, st1_el2),
+ str1hndl->st1h_st2hndl->st2h_id);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ str1hndl->st1h_id = H5Tvlen_create(str1hndl->st1h_base);
+ CHECK(str1hndl->st1h_id, FAIL, "H5Tvlen_create");
+
+ return str1hndl;
+}
+
+static void
+delete_struct1(misc5_struct1_hndl *str1hndl)
+{
+ herr_t ret; /* For error checking */
+
+ ret = H5Tclose(str1hndl->st1h_id);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ delete_struct2(str1hndl->st1h_st2hndl);
+
+ ret = H5Tclose(str1hndl->st1h_base);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ HDfree(str1hndl);
+}
+
+static void
+set_struct1(misc5_struct1 *buf)
+{
+ unsigned i; /* Local index variable */
+
+ buf->st1_el1 = MISC5_DBGELVAL1;
+ buf->st1_el2.len = MISC5_DBGNELM2;
+
+ buf->st1_el2.p = HDmalloc((buf->st1_el2.len) * sizeof(misc5_struct2));
+ CHECK_PTR(buf->st1_el2.p, "HDmalloc");
+
+ for (i = 0; i < (buf->st1_el2.len); i++)
+ set_struct2(&(((misc5_struct2 *)(buf->st1_el2.p))[i]));
+}
+
+static void
+clear_struct1(misc5_struct1 *buf)
+{
+ unsigned i;
+
+ for (i = 0; i < buf->st1_el2.len; i++)
+ clear_struct2(&(((misc5_struct2 *)(buf->st1_el2.p))[i]));
+ HDfree(buf->st1_el2.p);
+}
+
+static void
+test_misc5(void)
+{
+ hid_t loc_id, space_id, dataset_id;
+ hid_t mem_type_id;
+ misc5_struct1_hndl *str1hndl;
+ hsize_t dims[MISC5_DSETRANK];
+ hvl_t buf;
+ unsigned i, j, k;
+ herr_t ret;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing several level deep nested compound & VL datatypes \n"));
+
+ /* Write the dataset out */
+ loc_id = H5Fcreate(MISC5_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(loc_id, FAIL, "H5Fcreate");
+
+ /* Create the memory structure to write */
+ str1hndl = create_struct1();
+ CHECK_PTR(str1hndl, "create_struct1");
+
+ /* Create the dataspace */
+ dims[0] = MISC5_NELMTOPLVL;
+ space_id = H5Screate_simple(MISC5_DSETRANK, dims, NULL);
+ CHECK(space_id, FAIL, "H5Screate_simple");
+
+ /* Create the dataset */
+ dataset_id = H5Dcreate2(loc_id, MISC5_DSETNAME, str1hndl->st1h_id, space_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT);
+ CHECK(dataset_id, FAIL, "H5Dcreate2");
+
+ /* Create the variable-length buffer */
+ buf.len = MISC5_DBGNELM1;
+ buf.p = HDmalloc((buf.len) * sizeof(misc5_struct1));
+ CHECK_PTR(buf.p, "HDmalloc");
+
+ /* Create the top-level VL information */
+ for (i = 0; i < MISC5_DBGNELM1; i++)
+ set_struct1(&(((misc5_struct1 *)(buf.p))[i]));
+
+ /* Write the data out */
+ ret = H5Dwrite(dataset_id, str1hndl->st1h_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, &buf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Release the top-level VL information */
+ for (j = 0; j < MISC5_DBGNELM1; j++)
+ clear_struct1(&(((misc5_struct1 *)(buf.p))[j]));
+
+ /* Free the variable-length buffer */
+ HDfree(buf.p);
+
+ /* Close dataset */
+ ret = H5Dclose(dataset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close dataspace */
+ ret = H5Sclose(space_id);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Delete memory structures */
+ delete_struct1(str1hndl);
+
+ /* Close file */
+ ret = H5Fclose(loc_id);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Read the dataset back in & verify it */
+ loc_id = H5Fopen(MISC5_FILE, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(loc_id, FAIL, "H5Fopen");
+
+ /* Open dataset again */
+ dataset_id = H5Dopen2(loc_id, MISC5_DSETNAME, H5P_DEFAULT);
+ CHECK(dataset_id, FAIL, "H5Dopen2");
+
+ /* Get the dataset's datatype */
+ mem_type_id = H5Dget_type(dataset_id);
+ CHECK(mem_type_id, FAIL, "H5Dget_type");
+
+ /* Get the dataset's dataspace */
+ space_id = H5Dget_space(dataset_id);
+ CHECK(space_id, FAIL, "H5Dget_space");
+
+ /* Read the data back in */
+ ret = H5Dread(dataset_id, mem_type_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, &buf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Verify the correct information was read in */
+ for (i = 0; i < (buf.len); i++) {
+ /* HDprintf("[%d]=%d\n",i, ((misc5_struct1 *)(buf.p))[i].st1_el1); */
+ VERIFY(((misc5_struct1 *)(buf.p))[i].st1_el1, MISC5_DBGELVAL1, "H5Dread");
+ for (j = 0; j < (((misc5_struct1 *)(buf.p))[i].st1_el2.len); j++) {
+ /* HDprintf(" [%d]=%d\n",j, ((misc5_struct2 *)(((misc5_struct1 *)
+ * (buf.p))[i].st1_el2.p))[j].st2_el1); */
+ VERIFY(((misc5_struct2 *)(((misc5_struct1 *)(buf.p))[i].st1_el2.p))[j].st2_el1, MISC5_DBGELVAL2,
+ "H5Dread");
+ for (k = 0; k < (((misc5_struct2 *)(((misc5_struct1 *)(buf.p))[i].st1_el2.p))[j].st2_el2.len);
+ k++) {
+ /* HDprintf(" [%d]=%d\n",k, ((misc5_struct3 *)(((misc5_struct2 *) (((misc5_struct1
+ * *)(buf.p))[i]. st1_el2.p))[j].st2_el2.p))[k].st3_el1); */
+ VERIFY(((misc5_struct3 *)(((misc5_struct2 *)(((misc5_struct1 *)(buf.p))[i].st1_el2.p))[j]
+ .st2_el2.p))[k]
+ .st3_el1,
+ MISC5_DBGELVAL3, "H5Dread");
+ } /* end for */
+ }
+ }
+
+ /* Reclaim the memory for the VL information */
+ ret = H5Treclaim(mem_type_id, space_id, H5P_DEFAULT, &buf);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Close dataspace */
+ ret = H5Sclose(space_id);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close dataset */
+ ret = H5Tclose(mem_type_id);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close dataset */
+ ret = H5Dclose(dataset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(loc_id);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* end test_misc5() */
+
+/****************************************************************
+**
+** test_misc6(): Test that object header continuation messages are
+** created correctly.
+**
+****************************************************************/
+static void
+test_misc6(void)
+{
+ hid_t loc_id, space_id, dataset_id;
+ hid_t attr_id;
+ char attr_name[16];
+ unsigned u;
+ herr_t ret;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing object header continuation code \n"));
+
+ /* Create the file */
+ loc_id = H5Fcreate(MISC6_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(loc_id, FAIL, "H5Fcreate");
+
+ /* Create the dataspace */
+ space_id = H5Screate(H5S_SCALAR);
+ CHECK(space_id, FAIL, "H5Screate");
+
+ /* Create the first dataset */
+ dataset_id =
+ H5Dcreate2(loc_id, MISC6_DSETNAME1, H5T_NATIVE_INT, space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset_id, FAIL, "H5Dcreate2");
+
+ /* Close dataset */
+ ret = H5Dclose(dataset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Create the second dataset */
+ dataset_id =
+ H5Dcreate2(loc_id, MISC6_DSETNAME2, H5T_NATIVE_INT, space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset_id, FAIL, "H5Dcreate2");
+
+ /* Close dataset */
+ ret = H5Dclose(dataset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(loc_id);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Loop through adding attributes to each dataset */
+ for (u = 0; u < MISC6_NUMATTR; u++) {
+ /* Create name for attribute */
+ HDsnprintf(attr_name, sizeof(attr_name), "Attr#%u", u);
+
+ /* Open the file */
+ loc_id = H5Fopen(MISC6_FILE, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(loc_id, FAIL, "H5Fopen");
+
+ /* Open first dataset */
+ dataset_id = H5Dopen2(loc_id, MISC6_DSETNAME1, H5P_DEFAULT);
+ CHECK(dataset_id, FAIL, "H5Dopen2");
+
+ /* Add attribute to dataset */
+ attr_id = H5Acreate2(dataset_id, attr_name, H5T_NATIVE_INT, space_id, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr_id, FAIL, "H5Acreate2");
+
+ /* Close attribute */
+ ret = H5Aclose(attr_id);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close dataset */
+ ret = H5Dclose(dataset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Open second dataset */
+ dataset_id = H5Dopen2(loc_id, MISC6_DSETNAME2, H5P_DEFAULT);
+ CHECK(dataset_id, FAIL, "H5Dopen2");
+
+ /* Add attribute to dataset */
+ attr_id = H5Acreate2(dataset_id, attr_name, H5T_NATIVE_INT, space_id, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr_id, FAIL, "H5Acreate2");
+
+ /* Close attribute */
+ ret = H5Aclose(attr_id);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close dataset */
+ ret = H5Dclose(dataset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(loc_id);
+ CHECK(ret, FAIL, "H5Fclose");
+ } /* end for */
+
+ /* Close dataspace */
+ ret = H5Sclose(space_id);
+ CHECK(ret, FAIL, "H5Sclose");
+
+} /* end test_misc6() */
+
+/****************************************************************
+**
+** test_misc7(): Test that datatypes are sensible to store on
+** disk. (i.e. not partially initialized)
+**
+****************************************************************/
+#if 0
+static void
+test_misc7(void)
+{
+ hid_t fid, did, tid, sid;
+ int enum_value = 1;
+ herr_t ret;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing sensible datatype on disk code \n"));
+
+ /* Attempt to commit a non-sensible datatype */
+
+ /* Create the file */
+ fid = H5Fcreate(MISC7_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create the dataspace */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create the compound datatype to commit*/
+ tid = H5Tcreate(H5T_COMPOUND, (size_t)32);
+ CHECK(tid, FAIL, "H5Tcreate");
+
+ /* Attempt to commit an empty compound datatype */
+ ret = H5Tcommit2(fid, MISC7_TYPENAME1, tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VERIFY(ret, FAIL, "H5Tcommit2");
+
+ /* Attempt to use empty compound datatype to create dataset */
+ did = H5Dcreate2(fid, MISC7_DSETNAME1, tid, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VERIFY(ret, FAIL, "H5Dcreate2");
+
+ /* Add a field to the compound datatype */
+ ret = H5Tinsert(tid, "a", (size_t)0, H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Attempt to commit the compound datatype now - should work */
+ ret = H5Tcommit2(fid, MISC7_TYPENAME1, tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+
+ /* Attempt to use compound datatype to create dataset now - should work */
+ did = H5Dcreate2(fid, MISC7_DSETNAME1, tid, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+
+ /* Close dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close compound datatype */
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Create the enum datatype to commit*/
+ tid = H5Tenum_create(H5T_NATIVE_INT);
+ CHECK(tid, FAIL, "H5Tenum_create");
+
+ /* Attempt to commit an empty enum datatype */
+ ret = H5Tcommit2(fid, MISC7_TYPENAME2, tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VERIFY(ret, FAIL, "H5Tcommit2");
+
+ /* Attempt to use empty enum datatype to create dataset */
+ did = H5Dcreate2(fid, MISC7_DSETNAME2, tid, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VERIFY(did, FAIL, "H5Dcreate2");
+
+ /* Add a member to the enum datatype */
+ ret = H5Tenum_insert(tid, "a", &enum_value);
+ CHECK(ret, FAIL, "H5Tenum_insert");
+
+ /* Attempt to commit the enum datatype now - should work */
+ ret = H5Tcommit2(fid, MISC7_TYPENAME2, tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+
+ /* Attempt to use enum datatype to create dataset now - should work */
+ did = H5Dcreate2(fid, MISC7_DSETNAME2, tid, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+
+ /* Close dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close enum datatype */
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* end test_misc7() */
+#endif
+
+/****************************************************************
+**
+** test_misc8(): Test storage size of various types of dataset
+** storage methods.
+**
+****************************************************************/
+#if 0
+static void
+test_misc8(void)
+{
+ hid_t fid, did, sid;
+ hid_t fapl; /* File access property list */
+ hid_t dcpl; /* Dataset creation property list */
+ int rank = MISC8_RANK;
+ hsize_t dims[MISC8_RANK] = {MISC8_DIM0, MISC8_DIM1};
+ hsize_t chunk_dims[MISC8_RANK] = {MISC8_CHUNK_DIM0, MISC8_CHUNK_DIM1};
+ hsize_t storage_size; /* Number of bytes of raw data storage used */
+ int *wdata; /* Data to write */
+ int *tdata; /* Temporary pointer to data write */
+#ifdef VERIFY_DATA
+ int *rdata; /* Data to read */
+ int *tdata2; /* Temporary pointer to data to read */
+#endif /* VERIFY_DATA */
+ unsigned u, v; /* Local index variables */
+ int mdc_nelmts; /* Metadata number of elements */
+ size_t rdcc_nelmts; /* Raw data number of elements */
+ size_t rdcc_nbytes; /* Raw data number of bytes */
+ double rdcc_w0; /* Raw data write percentage */
+ hsize_t start[MISC8_RANK]; /* Hyperslab start */
+ hsize_t count[MISC8_RANK]; /* Hyperslab block count */
+ herr_t ret;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing dataset storage sizes\n"));
+
+ /* Allocate space for the data to write & read */
+ wdata = (int *)HDmalloc(sizeof(int) * MISC8_DIM0 * MISC8_DIM1);
+ CHECK_PTR(wdata, "HDmalloc");
+#ifdef VERIFY_DATA
+ rdata = (int *)HDmalloc(sizeof(int) * MISC8_DIM0 * MISC8_DIM1);
+ CHECK_PTR(rdata, "HDmalloc");
+#endif /* VERIFY_DATA */
+
+ /* Initialize values */
+ tdata = wdata;
+ for (u = 0; u < MISC8_DIM0; u++)
+ for (v = 0; v < MISC8_DIM1; v++)
+ *tdata++ = (int)(((u * MISC8_DIM1) + v) % 13);
+
+ /* Create a file access property list */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+
+ /* Get the default file access properties for caching */
+ ret = H5Pget_cache(fapl, &mdc_nelmts, &rdcc_nelmts, &rdcc_nbytes, &rdcc_w0);
+ CHECK(ret, FAIL, "H5Pget_cache");
+
+ /* Decrease the size of the raw data cache */
+ rdcc_nbytes = 0;
+
+ /* Set the file access properties for caching */
+ ret = H5Pset_cache(fapl, mdc_nelmts, rdcc_nelmts, rdcc_nbytes, rdcc_w0);
+ CHECK(ret, FAIL, "H5Pset_cache");
+
+ /* Create the file */
+ fid = H5Fcreate(MISC8_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Close file access property list */
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Create a simple dataspace */
+ sid = H5Screate_simple(rank, dims, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Select a hyperslab which coincides with chunk boundaries */
+ /* (For later use) */
+ start[0] = 1;
+ start[1] = 1;
+ count[0] = (MISC8_CHUNK_DIM0 * 2) - 1;
+ count[1] = (MISC8_CHUNK_DIM1 * 2) - 1;
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create a dataset creation property list */
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+
+ /* I. contiguous dataset tests */
+
+ ret = H5Pset_layout(dcpl, H5D_CONTIGUOUS);
+ CHECK(ret, FAIL, "H5Pset_layout");
+
+ /* Set the space allocation time to early */
+ ret = H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_EARLY);
+ CHECK(ret, FAIL, "H5Pset_alloc_time");
+
+ /* Create a contiguous dataset, with space allocation early */
+ did = H5Dcreate2(fid, MISC8_DSETNAME1, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+
+ /* Check the storage size */
+ storage_size = H5Dget_storage_size(did);
+ CHECK(storage_size, 0, "H5Dget_storage_size");
+ VERIFY(storage_size, (hsize_t)(MISC8_DIM0 * MISC8_DIM1 * H5Tget_size(H5T_NATIVE_INT)),
+ "H5Dget_storage_size");
+
+ /* Close dataset ID */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+#ifndef H5_HAVE_PARALLEL
+ /* Set the space allocation time to late */
+ ret = H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_LATE);
+ CHECK(ret, FAIL, "H5Pset_alloc_time");
+
+ /* Create a contiguous dataset, with space allocation late */
+ did = H5Dcreate2(fid, MISC8_DSETNAME2, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+
+ /* Check the storage size before data is written */
+ storage_size = H5Dget_storage_size(did);
+ VERIFY(storage_size, 0, "H5Dget_storage_size");
+
+ /* Write data */
+ ret = H5Dwrite(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Check the storage size after data is written */
+ storage_size = H5Dget_storage_size(did);
+ CHECK(storage_size, 0, "H5Dget_storage_size");
+ VERIFY(storage_size, (hsize_t)(MISC8_DIM0 * MISC8_DIM1 * H5Tget_size(H5T_NATIVE_INT)),
+ "H5Dget_storage_size");
+
+ /* Close dataset ID */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Set the space allocation time to incremental */
+ ret = H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_INCR);
+ CHECK(ret, FAIL, "H5Pset_alloc_time");
+
+ /* Create a contiguous dataset, with space allocation late */
+ did = H5Dcreate2(fid, MISC8_DSETNAME3, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+
+ /* Check the storage size before data is written */
+ storage_size = H5Dget_storage_size(did);
+ VERIFY(storage_size, 0, "H5Dget_storage_size");
+
+ /* Write data */
+ ret = H5Dwrite(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Check the storage size after data is written */
+ storage_size = H5Dget_storage_size(did);
+ CHECK(storage_size, 0, "H5Dget_storage_size");
+ VERIFY(storage_size, (hsize_t)(MISC8_DIM0 * MISC8_DIM1 * H5Tget_size(H5T_NATIVE_INT)),
+ "H5Dget_storage_size");
+
+ /* Close dataset ID */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+#endif /* H5_HAVE_PARALLEL */
+
+ /* II. compact dataset tests */
+ ret = H5Pset_layout(dcpl, H5D_COMPACT);
+ CHECK(ret, FAIL, "H5Pset_layout");
+
+ /* Set the space allocation time to late */
+ ret = H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_LATE);
+ CHECK(ret, FAIL, "H5Pset_alloc_time");
+
+ /* Create a contiguous dataset, with space allocation late */
+ /* Should fail */
+ H5E_BEGIN_TRY
+ {
+ did = H5Dcreate2(fid, MISC8_DSETNAME4, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(did, FAIL, "H5Dcreate2");
+
+ /* Set the space allocation time to incremental */
+ ret = H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_INCR);
+ CHECK(ret, FAIL, "H5Pset_alloc_time");
+
+ /* Create a contiguous dataset, with space allocation incremental */
+ /* Should fail */
+ H5E_BEGIN_TRY
+ {
+ did = H5Dcreate2(fid, MISC8_DSETNAME4, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(did, FAIL, "H5Dcreate2");
+
+ /* Set the space allocation time to early */
+ ret = H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_EARLY);
+ CHECK(ret, FAIL, "H5Pset_alloc_time");
+
+ /* Set the fill time to allocation */
+ ret = H5Pset_fill_time(dcpl, H5D_FILL_TIME_ALLOC);
+ CHECK(ret, FAIL, "H5Pset_alloc_time");
+
+ /* Create a contiguous dataset, with space allocation early */
+ did = H5Dcreate2(fid, MISC8_DSETNAME4, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+
+ /* Check the storage size */
+ storage_size = H5Dget_storage_size(did);
+ CHECK(storage_size, 0, "H5Dget_storage_size");
+ VERIFY(storage_size, (hsize_t)(MISC8_DIM0 * MISC8_DIM1 * H5Tget_size(H5T_NATIVE_INT)),
+ "H5Dget_storage_size");
+
+ /* Close dataset ID */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* III. chunked dataset tests */
+
+ ret = H5Pset_layout(dcpl, H5D_CHUNKED);
+ CHECK(ret, FAIL, "H5Pset_layout");
+
+ /* Set the space allocation time to early */
+ ret = H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_EARLY);
+ CHECK(ret, FAIL, "H5Pset_alloc_time");
+
+ /* Use chunked storage for this dataset */
+ ret = H5Pset_chunk(dcpl, rank, chunk_dims);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+
+ /* Create a chunked dataset, with space allocation early */
+ did = H5Dcreate2(fid, MISC8_DSETNAME5, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+
+ /* Check the storage size after data is written */
+ storage_size = H5Dget_storage_size(did);
+ CHECK(storage_size, 0, "H5Dget_storage_size");
+ VERIFY(storage_size, (hsize_t)(MISC8_DIM0 * MISC8_DIM1 * H5Tget_size(H5T_NATIVE_INT)),
+ "H5Dget_storage_size");
+
+ /* Close dataset ID */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+#ifndef H5_HAVE_PARALLEL
+ /* Set the space allocation time to late */
+ ret = H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_LATE);
+ CHECK(ret, FAIL, "H5Pset_alloc_time");
+
+ /* Use chunked storage for this dataset */
+ ret = H5Pset_chunk(dcpl, rank, chunk_dims);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+
+ /* Create a chunked dataset, with space allocation late */
+ did = H5Dcreate2(fid, MISC8_DSETNAME6, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+
+ /* Check the storage size after dataset is created */
+ storage_size = H5Dget_storage_size(did);
+ VERIFY(storage_size, 0, "H5Dget_storage_size");
+
+ /* Write part of the dataset */
+ ret = H5Dwrite(did, H5T_NATIVE_INT, sid, sid, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Check the storage size after data is written */
+ storage_size = H5Dget_storage_size(did);
+ CHECK(storage_size, 0, "H5Dget_storage_size");
+ VERIFY(storage_size, (hsize_t)(MISC8_DIM0 * MISC8_DIM1 * H5Tget_size(H5T_NATIVE_INT)),
+ "H5Dget_storage_size");
+
+ /* Close dataset ID */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Set the space allocation time to incremental */
+ ret = H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_INCR);
+ CHECK(ret, FAIL, "H5Pset_alloc_time");
+
+ /* Create a chunked dataset, with space allocation incremental */
+ did = H5Dcreate2(fid, MISC8_DSETNAME7, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+
+ /* Check the storage size before data is written */
+ storage_size = H5Dget_storage_size(did);
+ VERIFY(storage_size, 0, "H5Dget_storage_size");
+
+ /* Write part of the dataset */
+ ret = H5Dwrite(did, H5T_NATIVE_INT, sid, sid, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Check the storage size after only four chunks are written */
+ storage_size = H5Dget_storage_size(did);
+ VERIFY(storage_size, (hsize_t)(4 * MISC8_CHUNK_DIM0 * MISC8_CHUNK_DIM1 * H5Tget_size(H5T_NATIVE_INT)),
+ "H5Dget_storage_size");
+
+ /* Write entire dataset */
+ ret = H5Dwrite(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+#ifdef VERIFY_DATA
+ /* Read data */
+ ret = H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check values written */
+ tdata = wdata;
+ tdata2 = rdata;
+ for (u = 0; u < MISC8_DIM0; u++)
+ for (v = 0; v < MISC8_DIM1; v++, tdata++, tdata2++)
+ if (*tdata != *tdata2)
+ TestErrPrintf("Error on line %d: u=%u, v=%d, *tdata=%d, *tdata2=%d\n", __LINE__, (unsigned)u,
+ (unsigned)v, (int)*tdata, (int)*tdata2);
+#endif /* VERIFY_DATA */
+
+ /* Check the storage size after data is written */
+ storage_size = H5Dget_storage_size(did);
+ CHECK(storage_size, 0, "H5Dget_storage_size");
+ VERIFY(storage_size, (hsize_t)(MISC8_DIM0 * MISC8_DIM1 * H5Tget_size(H5T_NATIVE_INT)),
+ "H5Dget_storage_size");
+
+ /* Close dataset ID */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+#endif /* H5_HAVE_PARALLEL */
+
+ /* Set the space allocation time to early */
+ ret = H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_EARLY);
+ CHECK(ret, FAIL, "H5Pset_alloc_time");
+
+ /* Use compression as well as chunking for these datasets */
+#ifdef H5_HAVE_FILTER_DEFLATE
+ ret = H5Pset_deflate(dcpl, 9);
+ CHECK(ret, FAIL, "H5Pset_deflate");
+#endif /* end H5_HAVE_FILTER_DEFLATE */
+
+ /* Create a chunked dataset, with space allocation early */
+ did = H5Dcreate2(fid, MISC8_DSETNAME8, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+
+ /* Write part of the dataset */
+ ret = H5Dwrite(did, H5T_NATIVE_INT, sid, sid, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Check the storage size after data is written */
+ storage_size = H5Dget_storage_size(did);
+ CHECK(storage_size, 0, "H5Dget_storage_size");
+#ifdef H5_HAVE_FILTER_DEFLATE
+ if (storage_size >= (MISC8_DIM0 * MISC8_DIM1 * H5Tget_size(H5T_NATIVE_INT)))
+ TestErrPrintf("Error on line %d: data wasn't compressed! storage_size=%u\n", __LINE__,
+ (unsigned)storage_size);
+#else /* Compression is not configured */
+ if (storage_size != (MISC8_DIM0 * MISC8_DIM1 * H5Tget_size(H5T_NATIVE_INT)))
+ TestErrPrintf("Error on line %d: wrong storage size! storage_size=%u\n", __LINE__,
+ (unsigned)storage_size);
+#endif /* H5_HAVE_FILTER_DEFLATE */
+
+ /* Close dataset ID */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+#ifndef H5_HAVE_PARALLEL
+ /* Set the space allocation time to late */
+ ret = H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_LATE);
+ CHECK(ret, FAIL, "H5Pset_alloc_time");
+
+ /* Create a chunked dataset, with space allocation late */
+ did = H5Dcreate2(fid, MISC8_DSETNAME9, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+
+ /* Check the storage size before data is written */
+ storage_size = H5Dget_storage_size(did);
+ VERIFY(storage_size, 0, "H5Dget_storage_size");
+
+ /* Write part of the dataset */
+ ret = H5Dwrite(did, H5T_NATIVE_INT, sid, sid, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Check the storage size after only four chunks are written */
+ storage_size = H5Dget_storage_size(did);
+ CHECK(storage_size, 0, "H5Dget_storage_size");
+#ifdef H5_HAVE_FILTER_DEFLATE
+ if (storage_size >= (MISC8_DIM0 * MISC8_DIM1 * H5Tget_size(H5T_NATIVE_INT)))
+ TestErrPrintf("Error on line %d: data wasn't compressed! storage_size=%u\n", __LINE__,
+ (unsigned)storage_size);
+#else /* Compression is not configured */
+ if (storage_size != (MISC8_DIM0 * MISC8_DIM1 * H5Tget_size(H5T_NATIVE_INT)))
+ TestErrPrintf("Error on line %d: wrong storage size! storage_size=%u\n", __LINE__,
+ (unsigned)storage_size);
+#endif /* H5_HAVE_FILTER_DEFLATE */
+
+ /* Write entire dataset */
+ ret = H5Dwrite(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+#ifdef VERIFY_DATA
+ /* Read data */
+ ret = H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check values written */
+ tdata = wdata;
+ tdata2 = rdata;
+ for (u = 0; u < MISC8_DIM0; u++)
+ for (v = 0; v < MISC8_DIM1; v++, tdata++, tdata2++)
+ if (*tdata != *tdata2)
+ TestErrPrintf("Error on line %d: u=%u, v=%d, *tdata=%d, *tdata2=%d\n", __LINE__, (unsigned)u,
+ (unsigned)v, (int)*tdata, (int)*tdata2);
+#endif /* VERIFY_DATA */
+
+ /* Check the storage size after data is written */
+ storage_size = H5Dget_storage_size(did);
+ CHECK(storage_size, 0, "H5Dget_storage_size");
+#ifdef H5_HAVE_FILTER_DEFLATE
+ if (storage_size >= (MISC8_DIM0 * MISC8_DIM1 * H5Tget_size(H5T_NATIVE_INT)))
+ TestErrPrintf("Error on line %d: data wasn't compressed! storage_size=%u\n", __LINE__,
+ (unsigned)storage_size);
+#else
+ if (storage_size != (MISC8_DIM0 * MISC8_DIM1 * H5Tget_size(H5T_NATIVE_INT)))
+ TestErrPrintf("Error on line %d: wrong storage size! storage_size=%u\n", __LINE__,
+ (unsigned)storage_size);
+#endif /*H5_HAVE_FILTER_DEFLATE*/
+
+ /* Close dataset ID */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Set the space allocation time to incremental */
+ ret = H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_INCR);
+ CHECK(ret, FAIL, "H5Pset_alloc_time");
+
+ /* Create a chunked dataset, with space allocation incremental */
+ did = H5Dcreate2(fid, MISC8_DSETNAME10, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+
+ /* Check the storage size before data is written */
+ storage_size = H5Dget_storage_size(did);
+ VERIFY(storage_size, 0, "H5Dget_storage_size");
+
+ /* Write part of the dataset */
+ ret = H5Dwrite(did, H5T_NATIVE_INT, sid, sid, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Check the storage size after only four chunks are written */
+ storage_size = H5Dget_storage_size(did);
+ CHECK(storage_size, 0, "H5Dget_storage_size");
+#ifdef H5_HAVE_FILTER_DEFLATE
+ if (storage_size >= (4 * MISC8_CHUNK_DIM0 * MISC8_CHUNK_DIM1 * H5Tget_size(H5T_NATIVE_INT)))
+ TestErrPrintf("Error on line %d: data wasn't compressed! storage_size=%u\n", __LINE__,
+ (unsigned)storage_size);
+#else /* Compression is not configured */
+ if (storage_size != (4 * MISC8_CHUNK_DIM0 * MISC8_CHUNK_DIM1 * H5Tget_size(H5T_NATIVE_INT)))
+ TestErrPrintf("Error on line %d: wrong storage size! storage_size=%u\n", __LINE__,
+ (unsigned)storage_size);
+#endif /* H5_HAVE_FILTER_DEFLATE */
+
+ /* Write entire dataset */
+ ret = H5Dwrite(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+#ifdef VERIFY_DATA
+ /* Read data */
+ ret = H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check values written */
+ tdata = wdata;
+ tdata2 = rdata;
+ for (u = 0; u < MISC8_DIM0; u++)
+ for (v = 0; v < MISC8_DIM1; v++, tdata++, tdata2++)
+ if (*tdata != *tdata2)
+ TestErrPrintf("Error on line %d: u=%u, v=%d, *tdata=%d, *tdata2=%d\n", __LINE__, (unsigned)u,
+ (unsigned)v, (int)*tdata, (int)*tdata2);
+#endif /* VERIFY_DATA */
+
+ /* Check the storage size after data is written */
+ storage_size = H5Dget_storage_size(did);
+ CHECK(storage_size, 0, "H5Dget_storage_size");
+#ifdef H5_HAVE_FILTER_DEFLATE
+ if (storage_size >= (MISC8_DIM0 * MISC8_DIM1 * H5Tget_size(H5T_NATIVE_INT)))
+ TestErrPrintf("Error on line %d: data wasn't compressed! storage_size=%u\n", __LINE__,
+ (unsigned)storage_size);
+#else
+ if (storage_size != (MISC8_DIM0 * MISC8_DIM1 * H5Tget_size(H5T_NATIVE_INT)))
+ TestErrPrintf("Error on line %d: wrong storage size! storage_size=%u\n", __LINE__,
+ (unsigned)storage_size);
+#endif /*H5_HAVE_FILTER_DEFLATE*/
+
+ /* Close dataset ID */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+#endif /* H5_HAVE_PARALLEL */
+
+ /* Close dataset creation property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free the read & write buffers */
+ HDfree(wdata);
+#ifdef VERIFY_DATA
+ HDfree(rdata);
+#endif /* VERIFY_DATA */
+} /* end test_misc8() */
+#endif
+
+/****************************************************************
+**
+** test_misc9(): Test that H5Fopen() does not succeed for core
+** files, H5Fcreate() must be used to open them.
+**
+****************************************************************/
+static void
+test_misc9(void)
+{
+ hid_t fapl, fid;
+ herr_t ret;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing core file opening\n"));
+
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+
+ ret = H5Pset_fapl_core(fapl, (size_t)1024, 0);
+ CHECK(ret, FAIL, "H5Pset_fapl_core");
+
+ H5E_BEGIN_TRY
+ {
+ fid = H5Fopen(MISC9_FILE, H5F_ACC_RDWR, fapl);
+ }
+ H5E_END_TRY;
+ VERIFY(fid, FAIL, "H5Fopen");
+
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pset_fapl_core");
+} /* end test_misc9() */
+
+/****************************************************************
+**
+** test_misc10(): Test opening a dataset created with an older
+** version of the library (shares the tmtimeo.h5 file with the mtime.c
+** test - see notes in gen_old_mtime.c for notes on generating this
+** data file) and using the dataset creation property list from
+** that dataset to create a dataset with the current version of
+** the library. Also tests using file creation property in same way.
+**
+****************************************************************/
+#if 0
+static void
+test_misc10(void)
+{
+ hid_t file, file_new; /* File IDs for old & new files */
+ hid_t fcpl; /* File creation property list */
+ hid_t dataset, dataset_new; /* Dataset IDs for old & new datasets */
+ hid_t dcpl; /* Dataset creation property list */
+ hid_t space, type; /* Old dataset's dataspace & datatype */
+ const char *testfile = H5_get_srcdir_filename(MISC10_FILE_OLD); /* Corrected test file name */
+ hbool_t driver_is_default_compatible;
+ herr_t ret;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing using old dataset creation property list\n"));
+
+ ret = h5_driver_is_default_vfd_compatible(H5P_DEFAULT, &driver_is_default_compatible);
+ CHECK(ret, FAIL, "h5_driver_is_default_vfd_compatible");
+
+ if (!driver_is_default_compatible) {
+ HDprintf("-- SKIPPED --\n");
+ return;
+ }
+
+ /*
+ * Open the old file and the dataset and get old settings.
+ */
+ file = H5Fopen(testfile, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(file, FAIL, "H5Fopen");
+ fcpl = H5Fget_create_plist(file);
+ CHECK(fcpl, FAIL, "H5Fget_create_plist");
+
+ dataset = H5Dopen2(file, MISC10_DSETNAME, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+ dcpl = H5Dget_create_plist(dataset);
+ CHECK(dcpl, FAIL, "H5Dget_create_plist");
+ space = H5Dget_space(dataset);
+ CHECK(space, FAIL, "H5Dget_space");
+ type = H5Dget_type(dataset);
+ CHECK(type, FAIL, "H5Dget_type");
+
+ /* Create new file & dataset */
+ file_new = H5Fcreate(MISC10_FILE_NEW, H5F_ACC_TRUNC, fcpl, H5P_DEFAULT);
+ CHECK(file_new, FAIL, "H5Fcreate");
+
+ dataset_new = H5Dcreate2(file_new, MISC10_DSETNAME, type, space, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dataset_new, FAIL, "H5Dcreate2");
+
+ /* Close new dataset & file */
+ ret = H5Dclose(dataset_new);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Fclose(file_new);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Close old dataset information */
+ ret = H5Tclose(type);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Sclose(space);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close old file information */
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+} /* end test_misc10() */
+#endif
+
+/****************************************************************
+**
+** test_misc11(): Test that all properties in a file creation property
+** list are stored correctly in the file and can be retrieved
+** when the file is re-opened.
+**
+****************************************************************/
+static void
+test_misc11(void)
+{
+ hid_t file; /* File IDs for old & new files */
+ hid_t fcpl; /* File creation property list */
+ hsize_t userblock; /* Userblock size retrieved from FCPL */
+ size_t off_size; /* Size of offsets in the file */
+ size_t len_size; /* Size of lengths in the file */
+ unsigned sym_ik; /* Symbol table B-tree initial 'K' value */
+ unsigned istore_ik; /* Indexed storage B-tree initial 'K' value */
+ unsigned sym_lk; /* Symbol table B-tree leaf 'K' value */
+ unsigned nindexes; /* Shared message number of indexes */
+#if 0
+ H5F_info2_t finfo; /* global information about file */
+#endif
+ H5F_fspace_strategy_t strategy; /* File space strategy */
+ hsize_t threshold; /* Free-space section threshold */
+ hbool_t persist; /* To persist free-space or not */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing file creation properties retrieved correctly\n"));
+
+ /* Creating a file with the default file creation property list should
+ * create a version 0 superblock
+ */
+
+ /* Create file with default file creation property list */
+ file = H5Fcreate(MISC11_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file, FAIL, "H5Fcreate");
+#if 0
+ /* Get the file's version information */
+ ret = H5Fget_info2(file, &finfo);
+ CHECK(ret, FAIL, "H5Fget_info2");
+ VERIFY(finfo.super.version, 0, "H5Fget_info2");
+ VERIFY(finfo.free.version, 0, "H5Fget_info2");
+ VERIFY(finfo.sohm.version, 0, "H5Fget_info2");
+#endif
+ /* Close file */
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Create a file creation property list */
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ CHECK(fcpl, FAIL, "H5Pcreate");
+
+ /* Set all the properties in the FCPL */
+ ret = H5Pset_userblock(fcpl, (hsize_t)MISC11_USERBLOCK);
+ CHECK(ret, FAIL, "H5Pset_userblock");
+
+ ret = H5Pset_sizes(fcpl, (size_t)MISC11_SIZEOF_OFF, (size_t)MISC11_SIZEOF_LEN);
+ CHECK(ret, FAIL, "H5Pset_sizes");
+
+ /* This should fail as (32770*2) will exceed ^16 - 2 bytes for storing btree entries */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Pset_sym_k(fcpl, 32770, 0);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Pset_sym_k");
+
+ ret = H5Pset_sym_k(fcpl, MISC11_SYM_IK, MISC11_SYM_LK);
+ CHECK(ret, FAIL, "H5Pset_sym_k");
+
+ /* This should fail as (32770*2) will exceed ^16 - 2 bytes for storing btree entries */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Pset_istore_k(fcpl, 32770);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Pset_istore_k");
+
+ ret = H5Pset_istore_k(fcpl, MISC11_ISTORE_IK);
+ CHECK(ret, FAIL, "H5Pset_istore_k");
+
+ ret = H5Pset_shared_mesg_nindexes(fcpl, MISC11_NINDEXES);
+ CHECK(ret, FAIL, "H5Pset_shared_mesg");
+
+ ret = H5Pset_file_space_strategy(fcpl, H5F_FSPACE_STRATEGY_NONE, FALSE, (hsize_t)1);
+ CHECK(ret, FAIL, "H5Pset_file_space");
+
+ /* Creating a file with the non-default file creation property list should
+ * create a version 2 superblock
+ */
+
+ /* Create file with custom file creation property list */
+ file = H5Fcreate(MISC11_FILE, H5F_ACC_TRUNC, fcpl, H5P_DEFAULT);
+ CHECK(file, FAIL, "H5Fcreate");
+
+ /* Close FCPL */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+#if 0
+ /* Get the file's version information */
+ ret = H5Fget_info2(file, &finfo);
+ CHECK(ret, FAIL, "H5Fget_info2");
+ VERIFY(finfo.super.version, 2, "H5Fget_info2");
+ VERIFY(finfo.free.version, 0, "H5Fget_info2");
+ VERIFY(finfo.sohm.version, 0, "H5Fget_info2");
+#endif
+ /* Close file */
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open the file */
+ file = H5Fopen(MISC11_FILE, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(file, FAIL, "H5Fcreate");
+
+ /* Get the file's creation property list */
+ fcpl = H5Fget_create_plist(file);
+ CHECK(fcpl, FAIL, "H5Fget_create_plist");
+#if 0
+ /* Get the file's version information */
+ ret = H5Fget_info2(file, &finfo);
+ CHECK(ret, FAIL, "H5Fget_info2");
+ VERIFY(finfo.super.version, 2, "H5Fget_info2");
+ VERIFY(finfo.free.version, 0, "H5Fget_info2");
+ VERIFY(finfo.sohm.version, 0, "H5Fget_info2");
+#endif
+ /* Retrieve all the property values & check them */
+ ret = H5Pget_userblock(fcpl, &userblock);
+ CHECK(ret, FAIL, "H5Pget_userblock");
+ VERIFY(userblock, MISC11_USERBLOCK, "H5Pget_userblock");
+
+ ret = H5Pget_sizes(fcpl, &off_size, &len_size);
+ CHECK(ret, FAIL, "H5Pget_sizes");
+ VERIFY(off_size, MISC11_SIZEOF_OFF, "H5Pget_sizes");
+ VERIFY(len_size, MISC11_SIZEOF_LEN, "H5Pget_sizes");
+
+ ret = H5Pget_sym_k(fcpl, &sym_ik, &sym_lk);
+ CHECK(ret, FAIL, "H5Pget_sym_k");
+ VERIFY(sym_ik, MISC11_SYM_IK, "H5Pget_sym_k");
+ VERIFY(sym_lk, MISC11_SYM_LK, "H5Pget_sym_k");
+
+ ret = H5Pget_istore_k(fcpl, &istore_ik);
+ CHECK(ret, FAIL, "H5Pget_istore_k");
+ VERIFY(istore_ik, MISC11_ISTORE_IK, "H5Pget_istore_k");
+
+ ret = H5Pget_shared_mesg_nindexes(fcpl, &nindexes);
+ CHECK(ret, FAIL, "H5Pget_shared_mesg_nindexes");
+ VERIFY(nindexes, MISC11_NINDEXES, "H5Pget_shared_mesg_nindexes");
+
+ ret = H5Pget_file_space_strategy(fcpl, &strategy, &persist, &threshold);
+ CHECK(ret, FAIL, "H5Pget_file_space_strategy");
+ VERIFY(strategy, 3, "H5Pget_file_space_strategy");
+ VERIFY(persist, FALSE, "H5Pget_file_space_strategy");
+ VERIFY(threshold, 1, "H5Pget_file_space_strategy");
+
+ /* Close file */
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Close FCPL */
+ ret = H5Pclose(fcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+} /* end test_misc11() */
+
+/****************************************************************
+**
+** test_misc12(): Test that VL-types operate correctly in chunked
+** datasets that are extended.
+**
+****************************************************************/
+static void
+test_misc12(void)
+{
+ const char *wdata[MISC12_SPACE1_DIM1] = {
+ "Four score and seven years ago our forefathers brought forth on this continent a new nation,",
+ "conceived in liberty and dedicated to the proposition that all men are created equal.",
+ "Now we are engaged in a great civil war,",
+ "testing whether that nation or any nation so conceived and so dedicated can long endure."};
+ const char *wdata1[MISC12_APPEND_SIZE] = {
+ "O Gloria inmarcesible! O Jubilo inmortal! En surcos de dolores, el",
+ "bien germina ya! Ceso la horrible noche, La libertad sublime",
+ "derrama las auroras de su invencible luz.", "La humanidad entera, que entre cadenas gime, comprende",
+ "las palabras del que murio en la cruz."};
+ char *rdata[MISC12_SPACE1_DIM1 + MISC12_APPEND_SIZE]; /* Information read in */
+ hid_t fid1;
+ hid_t dataset;
+ hid_t sid1, space, memspace;
+ hid_t tid1, cparms;
+ hsize_t dims1[] = {MISC12_SPACE1_DIM1};
+ hsize_t dimsn[] = {MISC12_APPEND_SIZE};
+ hsize_t maxdims1[1] = {H5S_UNLIMITED};
+ hsize_t chkdims1[1] = {MISC12_CHUNK_SIZE};
+ hsize_t newsize[1] = {MISC12_SPACE1_DIM1 + MISC12_APPEND_SIZE};
+ hsize_t offset[1] = {MISC12_SPACE1_DIM1};
+ hsize_t count[1] = {MISC12_APPEND_SIZE};
+ int i; /* counting variable */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing VL-type in chunked dataset\n"));
+
+ /* This test requirese a relatively "fresh" library environment */
+ ret = H5garbage_collect();
+ CHECK(ret, FAIL, "H5garbage_collect");
+
+ /* Create file */
+ fid1 = H5Fcreate(MISC12_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(MISC12_SPACE1_RANK, dims1, maxdims1);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create a datatype to refer to */
+ tid1 = H5Tcopy(H5T_C_S1);
+ CHECK(tid1, FAIL, "H5Tcopy");
+
+ ret = H5Tset_size(tid1, H5T_VARIABLE);
+ CHECK(ret, FAIL, "H5Tset_size");
+
+ cparms = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(cparms, FAIL, "H5Pcreate");
+
+ ret = H5Pset_chunk(cparms, 1, chkdims1);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, MISC12_DSET_NAME, tid1, sid1, H5P_DEFAULT, cparms, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write dataset to disk */
+ ret = H5Dwrite(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Extend dataset */
+ ret = H5Dset_extent(dataset, newsize);
+ CHECK(ret, FAIL, "H5Dset_extent");
+
+ memspace = H5Screate_simple(MISC12_SPACE1_RANK, dimsn, NULL);
+ CHECK(memspace, FAIL, "H5Screate_simple");
+
+ space = H5Dget_space(dataset);
+ CHECK(space, FAIL, "H5Dget_space");
+
+ ret = H5Sselect_hyperslab(space, H5S_SELECT_SET, offset, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Write data to new portion of dataset */
+ ret = H5Dwrite(dataset, tid1, memspace, space, H5P_DEFAULT, wdata1);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Read all data back */
+ ret = H5Dread(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ for (i = 0; i < MISC12_SPACE1_DIM1; i++)
+ if (HDstrcmp(wdata[i], rdata[i]) != 0)
+ TestErrPrintf("Error on line %d: wdata[%d]=%s, rdata[%d]=%s\n", __LINE__, i, wdata[i], i,
+ rdata[i]);
+ for (; i < (MISC12_SPACE1_DIM1 + MISC12_APPEND_SIZE); i++)
+ if (HDstrcmp(wdata1[i - MISC12_SPACE1_DIM1], rdata[i]) != 0)
+ TestErrPrintf("Error on line %d: wdata1[%d]=%s, rdata[%d]=%s\n", __LINE__, i - MISC12_SPACE1_DIM1,
+ wdata1[i - MISC12_SPACE1_DIM1], i, rdata[i]);
+
+ ret = H5Sselect_all(space);
+ CHECK(ret, FAIL, "H5Sselect_all");
+
+ /* Reclaim VL data memory */
+ ret = H5Treclaim(tid1, space, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Close Everything */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Sclose(space);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(memspace);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Pclose(cparms);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_misc12() */
+#if 0
+/* Various routines for misc. 13 test */
+static void
+misc13_init_data(unsigned *original_data)
+{
+ unsigned u;
+
+ for (u = 0; u < MISC13_DIM1; u++)
+ original_data[u] = u;
+}
+
+static hbool_t
+misc13_verify_data_match(const unsigned *original_data, const unsigned *read_data)
+{
+ unsigned u;
+
+ for (u = 0; u < MISC13_DIM1; u++)
+ if (original_data[u] != read_data[u])
+ return FALSE;
+
+ return TRUE;
+}
+
+static void
+misc13_create_dataset(hid_t loc_id, const char *name, hid_t dcpl, const unsigned *data)
+{
+ hid_t dsid = -1; /* Dataset ID */
+ hid_t sid = -1; /* Dataspace ID */
+ hsize_t dims[MISC13_RANK]; /* Dataset dimensions */
+ herr_t ret; /* Generic return value */
+
+ /* Create dataspace for use with dataset */
+ dims[0] = MISC13_DIM1;
+ sid = H5Screate_simple(MISC13_RANK, dims, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Create contiguous dataset in root group */
+ dsid = H5Dcreate2(loc_id, name, H5T_NATIVE_UINT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dsid, FAIL, "H5Dcreate2");
+
+ /* Write some data to dataset */
+ ret = H5Dwrite(dsid, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, data);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close the contiguous dataset */
+ ret = H5Dclose(dsid);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close the dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+} /* end misc13_create_dataset() */
+
+static void
+misc13_verify_dataset(hid_t loc_id, const char *name, const unsigned *data)
+{
+ unsigned *read_data = NULL; /* Data to write to dataset */
+ hid_t dsid = -1; /* Dataset ID */
+ herr_t ret; /* Generic return value */
+
+ /* Create a data buffer for the dataset read */
+ read_data = (unsigned *)HDcalloc(MISC13_DIM1, sizeof(unsigned));
+ CHECK_PTR(read_data, "HDcalloc");
+
+ /* Open the contiguous dataset in the root group */
+ dsid = H5Dopen2(loc_id, name, H5P_DEFAULT);
+ CHECK(dsid, FAIL, "H5Dopen2");
+
+ /* Read the data */
+ ret = H5Dread(dsid, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, read_data);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Verify that the data are correct */
+ ret = misc13_verify_data_match(data, read_data);
+ CHECK(ret, FAIL, "misc13_verify_data_match");
+
+ /* Close the contiguous dataset */
+ ret = H5Dclose(dsid);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Free the dataset read buffer */
+ HDfree(read_data);
+
+} /* end misc13_verify_dataset() */
+
+static void
+misc13_create_hdf_file(const char *name, const unsigned *data)
+{
+ hid_t fid = -1; /* File ID */
+ hid_t gid1 = -1; /* Group ID (level 1) */
+ hid_t gid2 = -1; /* Group ID (level 2) */
+ hid_t tid = -1; /* Datatype ID */
+ hid_t dcplid = -1; /* Dataset creation property list ID */
+ hsize_t chunk_dims[MISC13_RANK]; /* Chunk dimensions */
+ herr_t ret; /* Generic return value */
+
+ /* Create file */
+ fid = H5Fcreate(name, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create DCPL for use with datasets */
+ dcplid = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcplid, FAIL, "H5Pcreate");
+
+ /* Set the DCPL to be chunked */
+ ret = H5Pset_layout(dcplid, H5D_CHUNKED);
+ CHECK(ret, FAIL, "H5Pset_layout");
+
+ /* Use chunked storage for this DCPL */
+ chunk_dims[0] = MISC13_CHUNK_DIM1;
+ ret = H5Pset_chunk(dcplid, MISC13_RANK, chunk_dims);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+
+ /* Create contiguous dataset in root group */
+ misc13_create_dataset(fid, MISC13_DSET1_NAME, H5P_DEFAULT, data);
+
+ /* Create chunked dataset in root group */
+ misc13_create_dataset(fid, MISC13_DSET2_NAME, dcplid, data);
+
+ /* Create a datatype to commit to the file */
+ tid = H5Tcopy(H5T_NATIVE_INT);
+ CHECK(tid, FAIL, "H5Tcopy");
+
+ /* Create a named datatype in the root group */
+ ret = H5Tcommit2(fid, MISC13_DTYPE_NAME, tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+
+ /* Close named datatype */
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Create a group in the root group */
+ gid1 = H5Gcreate2(fid, MISC13_GROUP1_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid1, FAIL, "H5Gcreate2");
+
+ /* Create another group in the new group */
+ gid2 = H5Gcreate2(gid1, MISC13_GROUP2_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid2, FAIL, "H5Gcreate2");
+
+ /* Close the second group */
+ ret = H5Gclose(gid2);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Create contiguous dataset in new group */
+ misc13_create_dataset(gid1, MISC13_DSET1_NAME, H5P_DEFAULT, data);
+
+ /* Create chunked dataset in new group */
+ misc13_create_dataset(gid1, MISC13_DSET2_NAME, dcplid, data);
+
+ /* Create a datatype to commit to the new group */
+ tid = H5Tcopy(H5T_NATIVE_INT);
+ CHECK(tid, FAIL, "H5Tcopy");
+
+ /* Create a named datatype in the new group */
+ ret = H5Tcommit2(gid1, MISC13_DTYPE_NAME, tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+
+ /* Close named datatype */
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close the first group */
+ ret = H5Gclose(gid1);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close the DCPL */
+ ret = H5Pclose(dcplid);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* end misc13_create_hdf_file() */
+
+static void
+misc13_insert_user_block(const char *old_name, const char *new_name, const char *str, size_t size)
+{
+ FILE *new_fp = NULL; /* Pointers to new & old files */
+ FILE *old_fp = NULL;
+ void *user_block = NULL; /* Pointer to user block to write to file */
+ void *copy_buf = NULL; /* Pointer to buffer for copying data */
+ size_t written; /* Amount of data written to new file */
+ size_t read_in; /* Amount of data read in from old file */
+ int ret; /* Generic status value */
+
+ /* Allocate space for the user block */
+ user_block = HDcalloc(size, (size_t)1);
+ CHECK_PTR(user_block, "HDcalloc");
+
+ /* Copy in the user block data */
+ HDmemcpy(user_block, str, HDstrlen(str));
+
+ /* Open the new file */
+ new_fp = HDfopen(new_name, "wb");
+ CHECK_PTR(new_fp, "HDfopen");
+
+ /* Write the user block to the new file */
+ written = HDfwrite(user_block, (size_t)1, size, new_fp);
+ VERIFY(written, size, "HDfwrite");
+
+ /* Open the old file */
+ old_fp = HDfopen(old_name, "rb");
+ CHECK_PTR(old_fp, "HDfopen");
+
+ /* Allocate space for the copy buffer */
+ copy_buf = HDmalloc((size_t)MISC13_COPY_BUF_SIZE);
+ CHECK_PTR(copy_buf, "HDmalloc");
+
+ /* Copy data from the old file to the new file */
+ while ((read_in = HDfread(copy_buf, (size_t)1, (size_t)MISC13_COPY_BUF_SIZE, old_fp)) > 0) {
+ /* Write the data to the new file */
+ written = HDfwrite(copy_buf, (size_t)1, read_in, new_fp);
+ VERIFY(written, read_in, "HDfwrite");
+ }
+
+ /* Close the old file */
+ ret = HDfclose(old_fp);
+ VERIFY(ret, 0, "HDfclose");
+
+ /* Close the new file */
+ ret = HDfclose(new_fp);
+ VERIFY(ret, 0, "HDfclose");
+
+ /* Free the copy buffer */
+ HDfree(copy_buf);
+
+ /* Free the user block */
+ HDfree(user_block);
+
+} /* end misc13_insert_user_block() */
+
+static void
+misc13_verify_file(const char *name, const unsigned *data, hsize_t userblock_size,
+ hbool_t check_for_new_dataset)
+{
+ hid_t fid = -1; /* File ID */
+ hid_t gid1 = -1; /* Group IDs */
+ hid_t gid2 = -1; /* Group IDs */
+ hid_t tid = -1; /* Datatype ID */
+ hid_t fcplid = -1; /* File creation property list ID */
+ hsize_t ub_size_out; /* Userblock size retrieved from FCPL */
+ herr_t ret; /* Generic return value */
+
+ /* Open the file */
+ fid = H5Fopen(name, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Get the file's FCPL */
+ fcplid = H5Fget_create_plist(fid);
+ CHECK(fcplid, FAIL, "H5Fget_create_plist");
+
+ /* Get the user block size for the file */
+ ret = H5Pget_userblock(fcplid, &ub_size_out);
+ CHECK(ret, FAIL, "H5Pget_userblock");
+
+ /* Check the userblock size */
+ VERIFY(userblock_size, ub_size_out, "H5Pget_userblock");
+
+ /* Close the FCPL */
+ ret = H5Pclose(fcplid);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Verify the contiguous dataset in the root group */
+ misc13_verify_dataset(fid, MISC13_DSET1_NAME, data);
+
+ /* Verify the chunked dataset in the root group */
+ misc13_verify_dataset(fid, MISC13_DSET2_NAME, data);
+
+ /* Verify the "new" contiguous dataset in the root group, if asked */
+ if (check_for_new_dataset)
+ misc13_verify_dataset(fid, MISC13_DSET3_NAME, data);
+
+ /* Open the named datatype in the root group */
+ tid = H5Topen2(fid, MISC13_DTYPE_NAME, H5P_DEFAULT);
+ CHECK(tid, FAIL, "H5Topen2");
+
+ /* Verify the type is correct */
+ VERIFY(H5Tequal(tid, H5T_NATIVE_INT), TRUE, "H5Tequal");
+
+ /* Close named datatype */
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Open the first group */
+ gid1 = H5Gopen2(fid, MISC13_GROUP1_NAME, H5P_DEFAULT);
+ CHECK(gid1, FAIL, "H5Gopen2");
+
+ /* Verify the contiguous dataset in the first group */
+ misc13_verify_dataset(gid1, MISC13_DSET1_NAME, data);
+
+ /* Verify the chunked dataset in the first group */
+ misc13_verify_dataset(gid1, MISC13_DSET2_NAME, data);
+
+ /* Open the named datatype in the first group */
+ tid = H5Topen2(gid1, MISC13_DTYPE_NAME, H5P_DEFAULT);
+ CHECK(tid, FAIL, "H5Topen2");
+
+ /* Verify the type is correct */
+ VERIFY(H5Tequal(tid, H5T_NATIVE_INT), TRUE, "H5Tequal");
+
+ /* Close named datatype */
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Open the second group */
+ gid2 = H5Gopen2(gid1, MISC13_GROUP2_NAME, H5P_DEFAULT);
+ CHECK(gid2, FAIL, "H5Gopen2");
+
+ /* Close the second group */
+ ret = H5Gclose(gid2);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close the first group */
+ ret = H5Gclose(gid1);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* end misc13_verify_file() */
+
+static void
+misc13_add_to_new_file(const char *name, const unsigned *data)
+{
+ hid_t fid = -1; /* File ID */
+ herr_t ret; /* Generic return value */
+
+ /* Open the file */
+ fid = H5Fopen(name, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Create new contiguous dataset in root group */
+ misc13_create_dataset(fid, MISC13_DSET3_NAME, H5P_DEFAULT, data);
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* end misc13_add_to_new_file() */
+
+/****************************************************************
+**
+** test_misc13(): Test that file contents can be "slid down" by
+** inserting a user block in front of an existing file.
+**
+****************************************************************/
+static void
+test_misc13(void)
+{
+ unsigned *data = NULL; /* Data to write to dataset */
+ hsize_t userblock_size; /* Correct size of userblock */
+ hbool_t check_for_new_dataset; /* Whether to check for the post-userblock-creation dataset */
+
+ /* Create a data buffer for the datasets */
+ data = (unsigned *)HDcalloc(MISC13_DIM1, sizeof(unsigned));
+ CHECK_PTR(data, "HDcalloc");
+
+ /* Initialize data to write */
+ misc13_init_data(data);
+
+ /* Create first file, with no user block */
+ misc13_create_hdf_file(MISC13_FILE_1, data);
+
+ /* Verify file contents are correct */
+ userblock_size = 0;
+ check_for_new_dataset = FALSE;
+ misc13_verify_file(MISC13_FILE_1, data, userblock_size, check_for_new_dataset);
+
+ /* Create a new file by inserting a user block in front of the first file */
+ misc13_insert_user_block(MISC13_FILE_1, MISC13_FILE_2, "Test String", (size_t)MISC13_USERBLOCK_SIZE);
+
+ /* Verify file contents are still correct */
+ userblock_size = MISC13_USERBLOCK_SIZE;
+ check_for_new_dataset = FALSE;
+ misc13_verify_file(MISC13_FILE_2, data, userblock_size, check_for_new_dataset);
+
+ /* Make certain we can modify the new file */
+ misc13_add_to_new_file(MISC13_FILE_2, data);
+
+ /* Verify file contents are still correct */
+ userblock_size = MISC13_USERBLOCK_SIZE;
+ check_for_new_dataset = TRUE;
+ misc13_verify_file(MISC13_FILE_2, data, userblock_size, check_for_new_dataset);
+
+ /* Free the dataset buffer */
+ HDfree(data);
+
+} /* end test_misc13() */
+#endif
+
+/****************************************************************
+**
+** test_misc14(): Test that file contents can be "slid down" by
+** inserting a user block in front of an existing file.
+**
+****************************************************************/
+static void
+test_misc14(void)
+{
+ hid_t file_id; /* File ID */
+ hid_t fapl; /* File access property list ID */
+ hid_t DataSpace; /* Dataspace ID */
+ hid_t Dataset1; /* Dataset ID #1 */
+ hid_t Dataset2; /* Dataset ID #2 */
+ hid_t Dataset3; /* Dataset ID #3 */
+ double data1 = 5.0; /* Data to write for dataset #1 */
+ double data2 = 10.0; /* Data to write for dataset #2 */
+ double data3 = 15.0; /* Data to write for dataset #3 */
+ double rdata; /* Data read in */
+ herr_t ret; /* Generic return value */
+
+ /* Test creating two datasets and deleting the second */
+
+ /* Increase the metadata block size */
+ /* (This makes certain that all the data blocks are allocated together) */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+
+ ret = H5Pset_meta_block_size(fapl, (hsize_t)MISC14_METADATA_SIZE);
+ CHECK(ret, FAIL, "H5Pset_meta_block_size");
+
+ /* Create dataspace to use */
+ DataSpace = H5Screate(H5S_SCALAR);
+ CHECK(DataSpace, FAIL, "H5Screate");
+
+ /* Open the file */
+ file_id = H5Fcreate(MISC14_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(file_id, FAIL, "H5Fcreate");
+
+ /* Create first dataset & write data */
+ Dataset1 = H5Dcreate2(file_id, MISC14_DSET1_NAME, H5T_NATIVE_DOUBLE, DataSpace, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT);
+ CHECK(Dataset1, FAIL, "H5Dcreate2");
+
+ ret = H5Dwrite(Dataset1, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, &data1);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Create second dataset (to be unlinked). */
+ Dataset2 = H5Dcreate2(file_id, MISC14_DSET2_NAME, H5T_NATIVE_DOUBLE, DataSpace, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT);
+ CHECK(Dataset2, FAIL, "H5Dcreate2");
+
+ ret = H5Dwrite(Dataset2, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, &data2);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Check data from first dataset */
+ ret = H5Dread(Dataset1, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, &rdata);
+ CHECK(ret, FAIL, "H5Dread");
+ if (!H5_DBL_ABS_EQUAL(rdata, data1))
+ TestErrPrintf("Error on line %d: data1!=rdata\n", __LINE__);
+
+ /* Unlink second dataset */
+ ret = H5Ldelete(file_id, MISC14_DSET2_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ /* Close second dataset */
+ ret = H5Dclose(Dataset2);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Verify the data from dataset #1 */
+ ret = H5Dread(Dataset1, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, &rdata);
+ CHECK(ret, FAIL, "H5Dread");
+ if (!H5_DBL_ABS_EQUAL(rdata, data1))
+ TestErrPrintf("Error on line %d: data1!=rdata\n", __LINE__);
+
+ /* Close first dataset */
+ ret = H5Dclose(Dataset1);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close the file */
+ ret = H5Fclose(file_id);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Test creating two datasets and deleting the first */
+
+ /* Open the file */
+ file_id = H5Fcreate(MISC14_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(file_id, FAIL, "H5Fcreate");
+
+ /* Create first dataset & write data */
+ Dataset1 = H5Dcreate2(file_id, MISC14_DSET1_NAME, H5T_NATIVE_DOUBLE, DataSpace, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT);
+ CHECK(Dataset1, FAIL, "H5Dcreate2");
+
+ ret = H5Dwrite(Dataset1, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, &data1);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Create second dataset */
+ Dataset2 = H5Dcreate2(file_id, MISC14_DSET2_NAME, H5T_NATIVE_DOUBLE, DataSpace, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT);
+ CHECK(Dataset2, FAIL, "H5Dcreate2");
+
+ ret = H5Dwrite(Dataset2, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, &data2);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Check data from second dataset */
+ ret = H5Dread(Dataset2, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, &rdata);
+ CHECK(ret, FAIL, "H5Dread");
+ if (!H5_DBL_ABS_EQUAL(rdata, data2))
+ TestErrPrintf("Error on line %d: data2!=rdata\n", __LINE__);
+
+ /* Unlink first dataset */
+ ret = H5Ldelete(file_id, MISC14_DSET1_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ /* Close first dataset */
+ ret = H5Dclose(Dataset1);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Verify the data from dataset #2 */
+ ret = H5Dread(Dataset2, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, &rdata);
+ CHECK(ret, FAIL, "H5Dread");
+ if (!H5_DBL_ABS_EQUAL(rdata, data2))
+ TestErrPrintf("Error on line %d: data2!=rdata\n", __LINE__);
+
+ /* Close second dataset */
+ ret = H5Dclose(Dataset2);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close the file */
+ ret = H5Fclose(file_id);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Test creating three datasets and deleting the second */
+
+ /* Open the file */
+ file_id = H5Fcreate(MISC14_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(file_id, FAIL, "H5Fcreate");
+
+ /* Create first dataset & write data */
+ Dataset1 = H5Dcreate2(file_id, MISC14_DSET1_NAME, H5T_NATIVE_DOUBLE, DataSpace, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT);
+ CHECK(Dataset1, FAIL, "H5Dcreate2");
+
+ ret = H5Dwrite(Dataset1, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, &data1);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Create second dataset */
+ Dataset2 = H5Dcreate2(file_id, MISC14_DSET2_NAME, H5T_NATIVE_DOUBLE, DataSpace, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT);
+ CHECK(Dataset2, FAIL, "H5Dcreate2");
+
+ ret = H5Dwrite(Dataset2, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, &data2);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Create third dataset */
+ Dataset3 = H5Dcreate2(file_id, MISC14_DSET3_NAME, H5T_NATIVE_DOUBLE, DataSpace, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT);
+ CHECK(Dataset2, FAIL, "H5Dcreate2");
+
+ ret = H5Dwrite(Dataset3, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, &data3);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Check data from first dataset */
+ ret = H5Dread(Dataset1, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, &rdata);
+ CHECK(ret, FAIL, "H5Dread");
+ if (!H5_DBL_ABS_EQUAL(rdata, data1))
+ TestErrPrintf("Error on line %d: data1!=rdata\n", __LINE__);
+
+ /* Check data from third dataset */
+ ret = H5Dread(Dataset3, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, &rdata);
+ CHECK(ret, FAIL, "H5Dread");
+ if (!H5_DBL_ABS_EQUAL(rdata, data3))
+ TestErrPrintf("Error on line %d: data3!=rdata\n", __LINE__);
+
+ /* Unlink second dataset */
+ ret = H5Ldelete(file_id, MISC14_DSET2_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ /* Close second dataset */
+ ret = H5Dclose(Dataset2);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Verify the data from dataset #1 */
+ ret = H5Dread(Dataset1, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, &rdata);
+ CHECK(ret, FAIL, "H5Dread");
+ if (!H5_DBL_ABS_EQUAL(rdata, data1))
+ TestErrPrintf("Error on line %d: data1!=rdata\n", __LINE__);
+
+ /* Verify the data from dataset #3 */
+ ret = H5Dread(Dataset3, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, &rdata);
+ CHECK(ret, FAIL, "H5Dread");
+ if (!H5_DBL_ABS_EQUAL(rdata, data3))
+ TestErrPrintf("Error on line %d: data3!=rdata\n", __LINE__);
+
+ /* Close first dataset */
+ ret = H5Dclose(Dataset1);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close third dataset */
+ ret = H5Dclose(Dataset3);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close the file */
+ ret = H5Fclose(file_id);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Close shared objects (dataspace & fapl) */
+ ret = H5Sclose(DataSpace);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+} /* end test_misc14() */
+
+/****************************************************************
+**
+** test_misc15(): Test that checking a file's access property list
+** more than once correctly increments internal reference counts.
+**
+****************************************************************/
+static void
+test_misc15(void)
+{
+ char filename[MISC15_BUF_SIZE];
+ hid_t file; /* File ID */
+ hid_t fapl; /* File access property list */
+ herr_t ret; /* Generic return value */
+
+ fapl = h5_fileaccess();
+ h5_fixname(MISC15_FILE, fapl, filename, MISC15_BUF_SIZE);
+
+ /* Create the file & get it's FAPL */
+ file = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(file, FAIL, "H5Fcreate");
+
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ fapl = H5Fget_access_plist(file);
+ CHECK(fapl, FAIL, "H5Fget_access_plist");
+
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Open the file & get it's FAPL again */
+ file = H5Fopen(filename, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(file, FAIL, "H5Fopen");
+
+ fapl = H5Fget_access_plist(file);
+ CHECK(fapl, FAIL, "H5Fget_access_plist");
+
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Verify that the file is still OK */
+ ret = H5Fis_accessible(filename, fapl);
+ CHECK(ret, FAIL, "H5Fis_accessible");
+
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ file = H5Fopen(filename, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(file, FAIL, "H5Fopen");
+
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_misc15() */
+
+/****************************************************************
+**
+** test_misc16(): Test array of NULL-terminated
+** fixed-length string. It creates a dataset of fixed-length
+** strings. Each string is MISC16_STR_SIZE long. There are
+** totally MISC16_SPACE_DIM by MISC16_SPACE_RANK strings.
+**
+****************************************************************/
+static void
+test_misc16(void)
+{
+ hid_t file; /* File ID */
+ herr_t ret; /* Generic return value */
+ char wdata[MISC16_SPACE_DIM][MISC16_STR_SIZE];
+ char rdata[MISC16_SPACE_DIM][MISC16_STR_SIZE]; /* Information read in */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t tid; /* Datatype ID */
+ hsize_t dims[] = {MISC16_SPACE_DIM};
+ int i;
+
+ HDmemset(wdata, 0, sizeof(wdata));
+ HDmemset(rdata, 0, sizeof(rdata));
+
+ /* Initialize the data */
+ /* (Note that these are supposed to stress the code, so are a little weird) */
+ HDmemcpy(wdata[0], "1234567", MISC16_STR_SIZE);
+ HDmemcpy(wdata[1], "1234567\0", MISC16_STR_SIZE);
+ HDmemcpy(wdata[2], "12345678", MISC16_STR_SIZE);
+ HDmemcpy(wdata[3], "\0\0\0\0\0\0\0\0", MISC16_STR_SIZE);
+
+ /* Create the file */
+ file = H5Fcreate(MISC16_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file, FAIL, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid = H5Screate_simple(MISC16_SPACE_RANK, dims, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Create a datatype to refer to */
+ tid = H5Tcopy(H5T_C_S1);
+ CHECK(tid, FAIL, "H5Tcopy");
+
+ ret = H5Tset_size(tid, (size_t)MISC16_STR_SIZE);
+ CHECK(ret, FAIL, "H5Tset_size");
+
+ /*ret = H5Tset_strpad(tid,H5T_STR_NULLPAD);
+ CHECK(ret, FAIL, "H5Tset_strpad");*/
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(file, MISC16_DSET_NAME, tid, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write dataset to disk */
+ ret = H5Dwrite(dataset, tid, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Read dataset from disk */
+ ret = H5Dread(dataset, tid, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < MISC16_SPACE_DIM; i++) {
+ if (HDstrlen(wdata[i]) != HDstrlen(rdata[i])) {
+ TestErrPrintf(
+ "Line %u: VL data length don't match!, strlen(wdata[%d])=%d, strlen(rdata[%d])=%d\n",
+ (unsigned)__LINE__, (int)i, (int)HDstrlen(wdata[i]), (int)i, (int)HDstrlen(rdata[i]));
+ continue;
+ } /* end if */
+ if (HDstrcmp(wdata[i], rdata[i]) != 0) {
+ TestErrPrintf("Line %u: VL data values don't match!, wdata[%d]=%s, rdata[%d]=%s\n",
+ (unsigned)__LINE__, (int)i, wdata[i], (int)i, rdata[i]);
+ continue;
+ } /* end if */
+ } /* end for */
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_misc16() */
+
+/****************************************************************
+**
+** test_misc17(): Test array of characters. It creates a dataset
+** of ASCII characters, with dimensionality of MISC17_SPACE_DIM1
+** by MISC17_SPACE_DIM2.
+**
+****************************************************************/
+static void
+test_misc17(void)
+{
+ hid_t file; /* File ID */
+ herr_t ret; /* Generic return value */
+ char wdata[MISC17_SPACE_DIM1][MISC17_SPACE_DIM2];
+ char rdata[MISC17_SPACE_DIM1][MISC17_SPACE_DIM2]; /* Information read in */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t tid; /* Datatype ID */
+ hsize_t dims[] = {MISC17_SPACE_DIM1, MISC17_SPACE_DIM2};
+ int i;
+
+ HDmemset(wdata, 0, sizeof(wdata));
+ HDmemset(rdata, 0, sizeof(rdata));
+
+ /* Initialize the data */
+ /* (Note that these are supposed to stress the code, so are a little weird) */
+ HDmemcpy(wdata[0], "1234567", MISC17_SPACE_DIM2);
+ HDmemcpy(wdata[1], "1234567\0", MISC17_SPACE_DIM2);
+ HDmemcpy(wdata[2], "12345678", MISC17_SPACE_DIM2);
+ HDmemcpy(wdata[3], "\0\0\0\0\0\0\0\0", MISC17_SPACE_DIM2);
+
+ /* Create the file */
+ file = H5Fcreate(MISC17_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file, FAIL, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid = H5Screate_simple(MISC17_SPACE_RANK, dims, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Create a datatype to refer to */
+ tid = H5Tcopy(H5T_C_S1);
+ CHECK(tid, FAIL, "H5Tcopy");
+
+ ret = H5Tset_strpad(tid, H5T_STR_NULLPAD);
+ CHECK(ret, FAIL, "H5Tset_strpad");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(file, MISC17_DSET_NAME, tid, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write dataset to disk */
+ ret = H5Dwrite(dataset, tid, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Read dataset from disk */
+ ret = H5Dread(dataset, tid, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data in the way of strings. */
+ for (i = 0; i < MISC17_SPACE_DIM1; i++) {
+ if (HDstrlen(wdata[i]) != HDstrlen(rdata[i])) {
+ TestErrPrintf(
+ "Line %u: VL data length don't match!, strlen(wdata[%d])=%d, strlen(rdata[%d])=%d\n",
+ (unsigned)__LINE__, (int)i, (int)HDstrlen(wdata[i]), (int)i, (int)HDstrlen(rdata[i]));
+ continue;
+ } /* end if */
+ if (HDstrcmp(wdata[i], rdata[i]) != 0) {
+ TestErrPrintf("Line %u: VL data values don't match!, wdata[%d]=%s, rdata[%d]=%s\n",
+ (unsigned)__LINE__, (int)i, wdata[i], (int)i, rdata[i]);
+ continue;
+ } /* end if */
+ } /* end for */
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_misc17() */
+
+/****************************************************************
+**
+** test_misc18(): Test new object header information in H5O_info_t
+** struct.
+**
+****************************************************************/
+static void
+test_misc18(void)
+{
+ hid_t fid; /* File ID */
+ hid_t sid; /* 'Space ID */
+ hid_t did1, did2; /* Dataset IDs */
+ hid_t aid; /* Attribute ID */
+#if 0
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+ H5O_info1_t old_oinfo; /* (deprecated) information about object */
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
+#endif
+ H5O_info2_t oinfo; /* Data model information about object */
+#if 0
+ H5O_native_info_t ninfo; /* Native file format information about object */
+#endif
+ char attr_name[32]; /* Attribute name buffer */
+ unsigned u; /* Local index variable */
+ herr_t ret; /* Generic return value */
+
+ /* Create the file */
+ fid = H5Fcreate(MISC18_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create dataspace for attributes */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create first dataset */
+ did1 = H5Dcreate2(fid, MISC18_DSET1_NAME, H5T_STD_U32LE, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(did1, FAIL, "H5Dcreate2");
+
+ /* Get object information */
+ ret = H5Oget_info_by_name3(fid, MISC18_DSET1_NAME, &oinfo, H5O_INFO_NUM_ATTRS, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name");
+ VERIFY(oinfo.num_attrs, 0, "H5Oget_info_by_name");
+#if 0
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+ ret = H5Oget_info_by_name2(fid, MISC18_DSET1_NAME, &old_oinfo, H5O_INFO_HDR | H5O_INFO_NUM_ATTRS,
+ H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name");
+ VERIFY(old_oinfo.hdr.nmesgs, 6, "H5Oget_info_by_name");
+ VERIFY(old_oinfo.hdr.nchunks, 1, "H5Oget_info_by_name");
+ VERIFY(old_oinfo.hdr.space.total, 272, "H5Oget_info_by_name");
+ VERIFY(old_oinfo.hdr.space.free, 152, "H5Oget_info_by_name");
+ VERIFY(old_oinfo.num_attrs, 0, "H5Oget_info_by_name");
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
+ ret = H5Oget_native_info_by_name(fid, MISC18_DSET1_NAME, &ninfo, H5O_NATIVE_INFO_HDR, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_native_info_by_name");
+ VERIFY(ninfo.hdr.nmesgs, 6, "H5Oget_native_info_by_name");
+ VERIFY(ninfo.hdr.nchunks, 1, "H5Oget_native_info_by_name");
+ VERIFY(ninfo.hdr.space.total, 272, "H5Oget_native_info_by_name");
+ VERIFY(ninfo.hdr.space.free, 152, "H5Oget_native_info_by_name");
+#endif
+
+ /* Create second dataset */
+ did2 = H5Dcreate2(fid, MISC18_DSET2_NAME, H5T_STD_U32LE, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(did2, FAIL, "H5Dcreate2");
+
+ /* Get object information */
+ ret = H5Oget_info_by_name3(fid, MISC18_DSET2_NAME, &oinfo, H5O_INFO_NUM_ATTRS, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name");
+ VERIFY(oinfo.num_attrs, 0, "H5Oget_info_by_name");
+#if 0
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+ ret = H5Oget_info_by_name2(fid, MISC18_DSET2_NAME, &old_oinfo, H5O_INFO_HDR | H5O_INFO_NUM_ATTRS,
+ H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name");
+ VERIFY(old_oinfo.hdr.nmesgs, 6, "H5Oget_info_by_name");
+ VERIFY(old_oinfo.hdr.nchunks, 1, "H5Oget_info_by_name");
+ VERIFY(old_oinfo.hdr.space.total, 272, "H5Oget_info_by_name");
+ VERIFY(old_oinfo.hdr.space.free, 152, "H5Oget_info_by_name");
+ VERIFY(old_oinfo.num_attrs, 0, "H5Oget_info_by_name");
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
+ ret = H5Oget_native_info_by_name(fid, MISC18_DSET2_NAME, &ninfo, H5O_NATIVE_INFO_HDR, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_native_info_by_name");
+ VERIFY(ninfo.hdr.nmesgs, 6, "H5Oget_native_info_by_name");
+ VERIFY(ninfo.hdr.nchunks, 1, "H5Oget_native_info_by_name");
+ VERIFY(ninfo.hdr.space.total, 272, "H5Oget_native_info_by_name");
+ VERIFY(ninfo.hdr.space.free, 152, "H5Oget_native_info_by_name");
+#endif
+
+ /* Loop creating attributes on each dataset, flushing them to the file each time */
+ for (u = 0; u < 10; u++) {
+ /* Set up attribute name */
+ HDsnprintf(attr_name, sizeof(attr_name), "Attr %u", u);
+
+ /* Create & close attribute on first dataset */
+ aid = H5Acreate2(did1, attr_name, H5T_STD_U32LE, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Create & close attribute on second dataset */
+ aid = H5Acreate2(did2, attr_name, H5T_STD_U32LE, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Flush file, to 'fix' size of dataset object headers */
+ ret = H5Fflush(fid, H5F_SCOPE_GLOBAL);
+ CHECK(ret, FAIL, "H5Fflush");
+ } /* end for */
+
+ /* Get object information for dataset #1 now */
+ ret = H5Oget_info_by_name3(fid, MISC18_DSET1_NAME, &oinfo, H5O_INFO_NUM_ATTRS, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name");
+ VERIFY(oinfo.num_attrs, 10, "H5Oget_info_by_name");
+#if 0
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+ ret = H5Oget_info_by_name2(fid, MISC18_DSET1_NAME, &old_oinfo, H5O_INFO_HDR | H5O_INFO_NUM_ATTRS,
+ H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name");
+ VERIFY(old_oinfo.hdr.nmesgs, 24, "H5Oget_info_by_name");
+ VERIFY(old_oinfo.hdr.nchunks, 9, "H5Oget_info_by_name");
+ VERIFY(old_oinfo.hdr.space.total, 888, "H5Oget_info_by_name");
+ VERIFY(old_oinfo.hdr.space.free, 16, "H5Oget_info_by_name");
+ VERIFY(old_oinfo.num_attrs, 10, "H5Oget_info_by_name");
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
+ ret = H5Oget_native_info_by_name(fid, MISC18_DSET1_NAME, &ninfo, H5O_NATIVE_INFO_HDR, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_native_info_by_name");
+ VERIFY(ninfo.hdr.nmesgs, 24, "H5Oget_native_info_by_name");
+ VERIFY(ninfo.hdr.nchunks, 9, "H5Oget_native_info_by_name");
+ VERIFY(ninfo.hdr.space.total, 888, "H5Oget_native_info_by_name");
+ VERIFY(ninfo.hdr.space.free, 16, "H5Oget_native_info_by_name");
+#endif
+
+ /* Get object information for dataset #2 now */
+ ret = H5Oget_info_by_name3(fid, MISC18_DSET2_NAME, &oinfo, H5O_INFO_NUM_ATTRS, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name");
+ VERIFY(oinfo.num_attrs, 10, "H5Oget_info_by_name");
+#if 0
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+ ret = H5Oget_info_by_name2(fid, MISC18_DSET2_NAME, &old_oinfo, H5O_INFO_HDR | H5O_INFO_NUM_ATTRS,
+ H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_name");
+ VERIFY(old_oinfo.hdr.nmesgs, 24, "H5Oget_info_by_name");
+ VERIFY(old_oinfo.hdr.nchunks, 9, "H5Oget_info_by_name");
+ VERIFY(old_oinfo.hdr.space.total, 888, "H5Oget_info_by_name");
+ VERIFY(old_oinfo.hdr.space.free, 16, "H5Oget_info_by_name");
+ VERIFY(old_oinfo.num_attrs, 10, "H5Oget_info_by_name");
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
+ ret = H5Oget_native_info_by_name(fid, MISC18_DSET2_NAME, &ninfo, H5O_NATIVE_INFO_HDR, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_mative_info_by_name");
+ VERIFY(ninfo.hdr.nmesgs, 24, "H5Oget_native_info_by_name");
+ VERIFY(ninfo.hdr.nchunks, 9, "H5Oget_native_info_by_name");
+ VERIFY(ninfo.hdr.space.total, 888, "H5Oget_native_info_by_name");
+ VERIFY(ninfo.hdr.space.free, 16, "H5Oget_native_info_by_name");
+#endif
+
+ /* Close second dataset */
+ ret = H5Dclose(did2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close first dataset */
+ ret = H5Dclose(did1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_misc18() */
+
+/****************************************************************
+**
+** test_misc19(): Test incrementing & decrementing ref count on IDs
+**
+****************************************************************/
+static void
+test_misc19(void)
+{
+ hid_t fid = -1; /* File ID */
+ hid_t sid = -1; /* Dataspace ID */
+ hid_t did = -1; /* Dataset ID */
+ hid_t tid = -1; /* Datatype ID */
+ hid_t aid = -1; /* Attribute ID */
+ hid_t plid = -1; /* Property List ID */
+ hid_t pcid = -1; /* Property Class ID */
+ hid_t gid = -1; /* Group ID */
+ hid_t ecid = -1; /* Error Class ID */
+ hid_t emid = -1; /* Error Message ID */
+ hid_t esid = -1; /* Error Stack ID */
+#if 0
+ hid_t vfdid = -1; /* Virtual File Driver ID */
+ hid_t volid = -1; /* Virtual Object Layer ID */
+ H5FD_class_t *vfd_cls = NULL; /* VFD class */
+ H5VL_class_t *vol_cls = NULL; /* VOL class */
+#endif
+ int rc; /* Reference count */
+ herr_t ret; /* Generic return value */
+
+ /* Check H5I operations on files */
+
+ /* Create the file */
+ fid = H5Fcreate(MISC19_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Check the reference count */
+ rc = H5Iget_ref(fid);
+ VERIFY(rc, 1, "H5Iget_ref");
+
+ /* Inc the reference count */
+ rc = H5Iinc_ref(fid);
+ VERIFY(rc, 2, "H5Iinc_ref");
+
+ /* Close the file normally */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Check the reference count */
+ rc = H5Iget_ref(fid);
+ VERIFY(rc, 1, "H5Iget_ref");
+
+ /* Close the file by decrementing the reference count */
+ rc = H5Idec_ref(fid);
+ VERIFY(rc, 0, "H5Idec_ref");
+
+ /* Try closing the file again (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Fclose(fid);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Fclose");
+
+ /* Check H5I operations on property lists */
+
+ /* Create the property list */
+ plid = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(plid, FAIL, "H5Pcreate");
+
+ /* Check the reference count */
+ rc = H5Iget_ref(plid);
+ VERIFY(rc, 1, "H5Iget_ref");
+
+ /* Inc the reference count */
+ rc = H5Iinc_ref(plid);
+ VERIFY(rc, 2, "H5Iinc_ref");
+
+ /* Close the property list normally */
+ ret = H5Pclose(plid);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Check the reference count */
+ rc = H5Iget_ref(plid);
+ VERIFY(rc, 1, "H5Iget_ref");
+
+ /* Close the property list by decrementing the reference count */
+ rc = H5Idec_ref(plid);
+ VERIFY(rc, 0, "H5Idec_ref");
+
+ /* Try closing the property list again (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Pclose(plid);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Pclose");
+
+ /* Check H5I operations on property classes */
+
+ /* Create a property class */
+ pcid = H5Pcreate_class(H5P_DATASET_CREATE, "foo", NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK(pcid, FAIL, "H5Pcreate_class");
+
+ /* Check the reference count */
+ rc = H5Iget_ref(pcid);
+ VERIFY(rc, 1, "H5Iget_ref");
+
+ /* Inc the reference count */
+ rc = H5Iinc_ref(pcid);
+ VERIFY(rc, 2, "H5Iinc_ref");
+
+ /* Close the property class normally */
+ ret = H5Pclose_class(pcid);
+ CHECK(ret, FAIL, "H5Pclose_class");
+
+ /* Check the reference count */
+ rc = H5Iget_ref(pcid);
+ VERIFY(rc, 1, "H5Iget_ref");
+
+ /* Close the property class by decrementing the reference count */
+ rc = H5Idec_ref(pcid);
+ VERIFY(rc, 0, "H5Idec_ref");
+
+ /* Try closing the property class again (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Pclose_class(pcid);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Pclose_class");
+
+ /* Check H5I operations on datatypes */
+
+ /* Create a datatype */
+ tid = H5Tcreate(H5T_OPAQUE, (size_t)16);
+ CHECK(tid, FAIL, "H5Tcreate");
+
+ /* Check the reference count */
+ rc = H5Iget_ref(tid);
+ VERIFY(rc, 1, "H5Iget_ref");
+
+ /* Inc the reference count */
+ rc = H5Iinc_ref(tid);
+ VERIFY(rc, 2, "H5Iinc_ref");
+
+ /* Close the datatype normally */
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Check the reference count */
+ rc = H5Iget_ref(tid);
+ VERIFY(rc, 1, "H5Iget_ref");
+
+ /* Close the datatype by decrementing the reference count */
+ rc = H5Idec_ref(tid);
+ VERIFY(rc, 0, "H5Idec_ref");
+
+ /* Try closing the datatype again (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Tclose(tid);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Tclose");
+
+ /* Check H5I operations on dataspaces */
+
+ /* Create a dataspace */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Check the reference count */
+ rc = H5Iget_ref(sid);
+ VERIFY(rc, 1, "H5Iget_ref");
+
+ /* Inc the reference count */
+ rc = H5Iinc_ref(sid);
+ VERIFY(rc, 2, "H5Iinc_ref");
+
+ /* Close the dataspace normally */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Check the reference count */
+ rc = H5Iget_ref(sid);
+ VERIFY(rc, 1, "H5Iget_ref");
+
+ /* Close the dataspace by decrementing the reference count */
+ rc = H5Idec_ref(sid);
+ VERIFY(rc, 0, "H5Idec_ref");
+
+ /* Try closing the dataspace again (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Sclose(sid);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Sclose");
+
+ /* Check H5I operations on datasets */
+
+ /* Create a file */
+ fid = H5Fcreate(MISC19_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create a dataspace */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create a dataset */
+ did = H5Dcreate2(fid, MISC19_DSET_NAME, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+
+ /* Check the reference count */
+ rc = H5Iget_ref(did);
+ VERIFY(rc, 1, "H5Iget_ref");
+
+ /* Inc the reference count */
+ rc = H5Iinc_ref(did);
+ VERIFY(rc, 2, "H5Iinc_ref");
+
+ /* Close the dataset normally */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Check the reference count */
+ rc = H5Iget_ref(did);
+ VERIFY(rc, 1, "H5Iget_ref");
+
+ /* Close the dataset by decrementing the reference count */
+ rc = H5Idec_ref(did);
+ VERIFY(rc, 0, "H5Idec_ref");
+
+ /* Try closing the dataset again (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Dclose(did);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Dclose");
+
+ /* Close the dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Check H5I operations on attributes */
+
+ /* Create a file */
+ fid = H5Fcreate(MISC19_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Open the root group */
+ gid = H5Gopen2(fid, "/", H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gopen2");
+
+ /* Create a dataspace */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create an attribute */
+ aid = H5Acreate2(gid, MISC19_ATTR_NAME, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+
+ /* Check the reference count */
+ rc = H5Iget_ref(aid);
+ VERIFY(rc, 1, "H5Iget_ref");
+
+ /* Inc the reference count */
+ rc = H5Iinc_ref(aid);
+ VERIFY(rc, 2, "H5Iinc_ref");
+
+ /* Close the dataset normally */
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Check the reference count */
+ rc = H5Iget_ref(aid);
+ VERIFY(rc, 1, "H5Iget_ref");
+
+ /* Close the attribute by decrementing the reference count */
+ rc = H5Idec_ref(aid);
+ VERIFY(rc, 0, "H5Idec_ref");
+
+ /* Try closing the attribute again (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Aclose(aid);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Aclose");
+
+ /* Close the root group */
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close the dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Check H5I operations on groups */
+
+ /* Create a file */
+ fid = H5Fcreate(MISC19_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create a group */
+ gid = H5Gcreate2(fid, MISC19_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gcreate2");
+
+ /* Check the reference count */
+ rc = H5Iget_ref(gid);
+ VERIFY(rc, 1, "H5Iget_ref");
+
+ /* Inc the reference count */
+ rc = H5Iinc_ref(gid);
+ VERIFY(rc, 2, "H5Iinc_ref");
+
+ /* Close the group normally */
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Check the reference count */
+ rc = H5Iget_ref(gid);
+ VERIFY(rc, 1, "H5Iget_ref");
+
+ /* Close the group by decrementing the reference count */
+ rc = H5Idec_ref(gid);
+ VERIFY(rc, 0, "H5Idec_ref");
+
+ /* Try closing the group again (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Gclose(gid);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Gclose");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Check H5I operations on error classes */
+
+ /* Create an error class */
+ ecid = H5Eregister_class("foo", "bar", "baz");
+ CHECK(ecid, FAIL, "H5Eregister_class");
+
+ /* Check the reference count */
+ rc = H5Iget_ref(ecid);
+ VERIFY(rc, 1, "H5Iget_ref");
+
+ /* Inc the reference count */
+ rc = H5Iinc_ref(ecid);
+ VERIFY(rc, 2, "H5Iinc_ref");
+
+ /* Close the error class normally */
+ ret = H5Eunregister_class(ecid);
+ CHECK(ret, FAIL, "H5Eunregister_class");
+
+ /* Check the reference count */
+ rc = H5Iget_ref(ecid);
+ VERIFY(rc, 1, "H5Iget_ref");
+
+ /* Close the error class by decrementing the reference count */
+ rc = H5Idec_ref(ecid);
+ VERIFY(rc, 0, "H5Idec_ref");
+
+ /* Try closing the error class again (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Eunregister_class(ecid);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Eunregister_class");
+
+ /* Check H5I operations on error messages */
+
+ /* Create an error class */
+ ecid = H5Eregister_class("foo", "bar", "baz");
+ CHECK(ecid, FAIL, "H5Eregister_class");
+
+ /* Create an error message */
+ emid = H5Ecreate_msg(ecid, H5E_MAJOR, "mumble");
+ CHECK(emid, FAIL, "H5Ecreate_msg");
+
+ /* Check the reference count */
+ rc = H5Iget_ref(emid);
+ VERIFY(rc, 1, "H5Iget_ref");
+
+ /* Inc the reference count */
+ rc = H5Iinc_ref(emid);
+ VERIFY(rc, 2, "H5Iinc_ref");
+
+ /* Close the error message normally */
+ ret = H5Eclose_msg(emid);
+ CHECK(ret, FAIL, "H5Eclose_msg");
+
+ /* Check the reference count */
+ rc = H5Iget_ref(emid);
+ VERIFY(rc, 1, "H5Iget_ref");
+
+ /* Close the error message by decrementing the reference count */
+ rc = H5Idec_ref(emid);
+ VERIFY(rc, 0, "H5Idec_ref");
+
+ /* Try closing the error message again (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Eclose_msg(emid);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Eclose_msg");
+
+ /* Close the error class */
+ ret = H5Eunregister_class(ecid);
+ CHECK(ret, FAIL, "H5Eunregister_class");
+
+ /* Check H5I operations on error stacks */
+
+ /* Create an error stack */
+ esid = H5Eget_current_stack();
+ CHECK(esid, FAIL, "H5Eget_current_stack");
+
+ /* Check the reference count */
+ rc = H5Iget_ref(esid);
+ VERIFY(rc, 1, "H5Iget_ref");
+
+ /* Inc the reference count */
+ rc = H5Iinc_ref(esid);
+ VERIFY(rc, 2, "H5Iinc_ref");
+
+ /* Close the error stack normally */
+ ret = H5Eclose_stack(esid);
+ CHECK(ret, FAIL, "H5Eclose_stack");
+
+ /* Check the reference count */
+ rc = H5Iget_ref(esid);
+ VERIFY(rc, 1, "H5Iget_ref");
+
+ /* Close the error stack by decrementing the reference count */
+ rc = H5Idec_ref(esid);
+ VERIFY(rc, 0, "H5Idec_ref");
+
+ /* Try closing the error stack again (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Eclose_stack(esid);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Eclose_stack");
+
+#if 0
+ /* Check H5I operations on virtual file drivers */
+
+ /* Get a VFD class to register */
+ vfd_cls = h5_get_dummy_vfd_class();
+ CHECK_PTR(vfd_cls, "h5_get_dummy_vfd_class");
+
+ /* Register a virtual file driver */
+ vfdid = H5FDregister(vfd_cls);
+ CHECK(vfdid, FAIL, "H5FDregister");
+
+ /* Check the reference count */
+ rc = H5Iget_ref(vfdid);
+ VERIFY(rc, 1, "H5Iget_ref");
+
+ /* Increment the reference count */
+ rc = H5Iinc_ref(vfdid);
+ VERIFY(rc, 2, "H5Iinc_ref");
+
+ /* Unregister the VFD normally */
+ ret = H5FDunregister(vfdid);
+ CHECK(ret, FAIL, "H5FDunregister");
+
+ /* Check the reference count */
+ rc = H5Iget_ref(vfdid);
+ VERIFY(rc, 1, "H5Iget_ref");
+
+ /* Unregister the VFD by decrementing the reference count */
+ rc = H5Idec_ref(vfdid);
+ VERIFY(rc, 0, "H5Idec_ref");
+
+ /* Try unregistering the VFD again (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5FDunregister(vfdid);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5FDunregister");
+
+ HDfree(vfd_cls);
+
+ /* Check H5I operations on virtual object connectors */
+
+ /* Get a VOL class to register */
+ vol_cls = h5_get_dummy_vol_class();
+ CHECK_PTR(vol_cls, "h5_get_dummy_vol_class");
+
+ /* Register a VOL connector */
+ volid = H5VLregister_connector(vol_cls, H5P_DEFAULT);
+ CHECK(volid, FAIL, "H5VLregister_connector");
+
+ /* Check the reference count */
+ rc = H5Iget_ref(volid);
+ VERIFY(rc, 1, "H5Iget_ref");
+
+ /* Increment the reference count */
+ rc = H5Iinc_ref(volid);
+ VERIFY(rc, 2, "H5Iinc_ref");
+
+ /* Unregister the VOL connector normally */
+ ret = H5VLunregister_connector(volid);
+ CHECK(ret, FAIL, "H5VLunregister_connector");
+
+ /* Check the reference count */
+ rc = H5Iget_ref(volid);
+ VERIFY(rc, 1, "H5Iget_ref");
+
+ /* Unregister the VOL connector by decrementing the reference count */
+ rc = H5Idec_ref(volid);
+ VERIFY(rc, 0, "H5Idec_ref");
+
+ /* Try unregistering the VOL connector again (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5VLunregister_connector(volid);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5VLunregister_connector");
+
+ HDfree(vol_cls);
+#endif
+} /* end test_misc19() */
+
+/****************************************************************
+**
+** test_misc20(): Test problems with version 2 of storage layout
+** message truncating dimensions
+**
+****************************************************************/
+#if 0
+static void
+test_misc20(void)
+{
+ hid_t fid; /* File ID */
+ hid_t sid; /* 'Space ID */
+ hid_t did; /* Dataset ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ int rank = MISC20_SPACE_RANK; /* Rank of dataspace */
+ hsize_t big_dims[MISC20_SPACE_RANK] = {MISC20_SPACE_DIM0, MISC20_SPACE_DIM1}; /* Large dimensions */
+ hsize_t small_dims[MISC20_SPACE_RANK] = {MISC20_SPACE2_DIM0, MISC20_SPACE2_DIM1}; /* Small dimensions */
+ unsigned version; /* Version of storage layout info */
+ hsize_t contig_size; /* Size of contiguous storage size from layout into */
+ const char *testfile = H5_get_srcdir_filename(MISC20_FILE_OLD); /* Corrected test file name */
+ hbool_t driver_is_default_compatible;
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing large dimension truncation fix\n"));
+
+ ret = h5_driver_is_default_vfd_compatible(H5P_DEFAULT, &driver_is_default_compatible);
+ CHECK(ret, FAIL, "h5_driver_is_default_vfd_compatible");
+
+ if (!driver_is_default_compatible) {
+ HDprintf("-- SKIPPED --\n");
+ return;
+ }
+
+ /* Verify that chunks with dimensions that are too large get rejected */
+
+ /* Create a dataset creation property list */
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+
+ /* Try to use chunked storage for this dataset */
+ ret = H5Pset_chunk(dcpl, rank, big_dims);
+ VERIFY(ret, FAIL, "H5Pset_chunk");
+
+ /* Verify that the storage for the dataset is the correct size and hasn't
+ * been truncated.
+ */
+
+ /* Create the file */
+ fid = H5Fcreate(MISC20_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create dataspace with _really_ big dimensions */
+ sid = H5Screate_simple(rank, big_dims, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Make certain that the dataset's storage doesn't get allocated :-) */
+ ret = H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_LATE);
+ CHECK(ret, FAIL, "H5Pset_alloc_time");
+
+ /* Create dataset with big dataspace */
+ did = H5Dcreate2(fid, MISC20_DSET_NAME, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+
+ /* Close datasset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create dataspace with small dimensions */
+ sid = H5Screate_simple(rank, small_dims, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Create dataset with big dataspace */
+ did = H5Dcreate2(fid, MISC20_DSET2_NAME, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+
+ /* Close datasset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close dataset creation property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open the file */
+ fid = H5Fopen(MISC20_FILE, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Open dataset with big dimensions */
+ did = H5Dopen2(fid, MISC20_DSET_NAME, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen2");
+
+ /* Get the layout version */
+ ret = H5D__layout_version_test(did, &version);
+ CHECK(ret, FAIL, "H5D__layout_version_test");
+ VERIFY(version, 3, "H5D__layout_version_test");
+
+ /* Get the layout contiguous storage size */
+ ret = H5D__layout_contig_size_test(did, &contig_size);
+ CHECK(ret, FAIL, "H5D__layout_contig_size_test");
+ VERIFY(contig_size, (MISC20_SPACE_DIM0 * MISC20_SPACE_DIM1 * H5Tget_size(H5T_NATIVE_INT)),
+ "H5D__layout_contig_size_test");
+
+ /* Close datasset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Open dataset with small dimensions */
+ did = H5Dopen2(fid, MISC20_DSET2_NAME, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen2");
+
+ /* Get the layout version */
+ ret = H5D__layout_version_test(did, &version);
+ CHECK(ret, FAIL, "H5D__layout_version_test");
+ VERIFY(version, 3, "H5D__layout_version_test");
+
+ /* Get the layout contiguous storage size */
+ ret = H5D__layout_contig_size_test(did, &contig_size);
+ CHECK(ret, FAIL, "H5D__layout_contig_size_test");
+ VERIFY(contig_size, (MISC20_SPACE2_DIM0 * MISC20_SPACE2_DIM1 * H5Tget_size(H5T_NATIVE_INT)),
+ "H5D__layout_contig_size_test");
+
+ /* Close datasset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Verify that the storage size is computed correctly for older versions of layout info */
+
+ /*
+ * Open the old file and the dataset and get old settings.
+ */
+ fid = H5Fopen(testfile, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Open dataset with small dimensions */
+ did = H5Dopen2(fid, MISC20_DSET_NAME, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen2");
+
+ /* Get the layout version */
+ ret = H5D__layout_version_test(did, &version);
+ CHECK(ret, FAIL, "H5D__layout_version_test");
+ VERIFY(version, 2, "H5D__layout_version_test");
+
+ /* Get the layout contiguous storage size */
+ ret = H5D__layout_contig_size_test(did, &contig_size);
+ CHECK(ret, FAIL, "H5D__layout_contig_size_test");
+ VERIFY(contig_size, (MISC20_SPACE_DIM0 * MISC20_SPACE_DIM1 * H5Tget_size(H5T_STD_I32LE)),
+ "H5D__layout_contig_size_test");
+
+ /* Close datasset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* end test_misc20() */
+#endif
+
+/*
+ test_misc21 and test_misc22 should be executed when SZIP is present
+ and encoder is available.
+ EIP 2004/8/04
+*/
+#if defined(H5_HAVE_FILTER_SZIP) && !defined(H5_API_TEST_NO_FILTERS)
+
+/****************************************************************
+**
+** test_misc21(): Test that late allocation time is treated the same
+** as incremental allocation time, for chunked datasets
+** when overwriting entire dataset where the chunks
+** don't exactly match the dataspace.
+**
+****************************************************************/
+static void
+test_misc21(void)
+{
+ hid_t fid, sid, dcpl, dsid;
+ char *buf;
+ hsize_t dims[2] = {MISC21_SPACE_DIM0, MISC21_SPACE_DIM1},
+ chunk_size[2] = {MISC21_CHUNK_DIM0, MISC21_CHUNK_DIM1};
+ herr_t ret; /* Generic return value */
+
+ if (h5_szip_can_encode() != 1)
+ return;
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing late allocation time w/chunks & filters\n"));
+
+ /* Allocate space for the buffer */
+ buf = (char *)HDcalloc(MISC21_SPACE_DIM0 * MISC21_SPACE_DIM1, 1);
+ CHECK(buf, NULL, "HDcalloc");
+
+ /* Create the file */
+ fid = H5Fcreate(MISC21_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create the DCPL */
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+
+ /* Set custom DCPL properties */
+ ret = H5Pset_chunk(dcpl, MISC21_SPACE_RANK, chunk_size);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+ ret = H5Pset_szip(dcpl, H5_SZIP_NN_OPTION_MASK, 8);
+ CHECK(ret, FAIL, "H5Pset_deflate");
+ ret = H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_LATE);
+ CHECK(ret, FAIL, "H5Pset_alloc_time");
+
+ /* Create the dataspace for the dataset */
+ sid = H5Screate_simple(MISC21_SPACE_RANK, dims, NULL);
+ CHECK(ret, FAIL, "H5Screate_simple");
+
+ /* Create the dataset */
+ dsid = H5Dcreate2(fid, MISC21_DSET_NAME, H5T_NATIVE_UINT8, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dsid, FAIL, "H5Dcreate2");
+
+ /* Write out the whole dataset */
+ ret = H5Dwrite(dsid, H5T_NATIVE_UINT8, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close everything */
+ ret = H5Dclose(dsid);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ HDfree(buf);
+} /* end test_misc21() */
+
+/****************************************************************
+**
+** test_misc22(): Test SZIP bits-per-pixel parameter.
+** This should be set according to the datatype.
+** Tests for precision and offset combo's.
+**
+****************************************************************/
+static void
+test_misc22(void)
+{
+ hid_t fid, sid, dcpl, dsid, dcpl2;
+ char *buf;
+ hsize_t dims[2] = {MISC22_SPACE_DIM0, MISC22_SPACE_DIM1},
+ chunk_size[2] = {MISC22_CHUNK_DIM0, MISC22_CHUNK_DIM1};
+ herr_t ret; /* Generic return value */
+ hid_t dtype;
+ /* should extend test to signed ints */
+ hid_t idts[4];
+ /* do the same for floats
+ hid_t fdts[2]={H5T_NATIVE_FLOAT32,
+ H5T_NATIVE_FLOAT64}
+ */
+ size_t prec[4] = {3, 11, 19, 27};
+ size_t offsets[5] = {0, 3, 11, 19, 27};
+ int i, j, k;
+ unsigned int flags;
+ size_t cd_nelmts = 32;
+ unsigned int cd_values[32];
+ size_t correct;
+
+ if (h5_szip_can_encode() != 1)
+ return;
+ idts[0] = H5Tcopy(H5T_NATIVE_UINT8);
+ idts[1] = H5Tcopy(H5T_NATIVE_UINT16);
+ idts[2] = H5Tcopy(H5T_NATIVE_UINT32);
+ idts[3] = H5Tcopy(H5T_NATIVE_UINT64);
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing datatypes with SZIP filter\n"));
+
+ /* Allocate space for the buffer */
+ buf = (char *)HDcalloc(MISC22_SPACE_DIM0 * MISC22_SPACE_DIM1, 8);
+ CHECK(buf, NULL, "HDcalloc");
+
+ /* Create the file */
+ fid = H5Fcreate(MISC22_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create the dataspace for the dataset */
+ sid = H5Screate_simple(MISC22_SPACE_RANK, dims, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ for (i = 0; i < 4; i++) {
+ for (j = 0; j < 4; j++) {
+ if (prec[j] > (H5Tget_size(idts[i]) * 8))
+ continue; /* skip irrelevant combination */
+ for (k = 0; k < 5; k++) {
+ if (offsets[k] > (H5Tget_size(idts[i]) * 8))
+ continue; /* skip irrelevant combinations */
+ if ((prec[j] + offsets[k]) > (H5Tget_size(idts[i]) * 8))
+ continue;
+
+ MESSAGE(5, (" Testing datatypes size=%zu precision=%u offset=%d\n", H5Tget_size(idts[i]),
+ (unsigned)prec[j], (unsigned)offsets[k]));
+
+ /* Create the DCPL */
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+
+ /* Set DCPL properties */
+ ret = H5Pset_chunk(dcpl, MISC22_SPACE_RANK, chunk_size);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+ /* Set custom DCPL properties */
+ ret = H5Pset_szip(dcpl, H5_SZIP_NN_OPTION_MASK, 32); /* vary the PPB */
+ CHECK(ret, FAIL, "H5Pset_szip");
+
+ /* set up the datatype according to the loop */
+ dtype = H5Tcopy(idts[i]);
+ CHECK(dtype, FAIL, "H5Tcopy");
+ ret = H5Tset_precision(dtype, prec[j]);
+ CHECK(ret, FAIL, "H5Tset_precision");
+ ret = H5Tset_offset(dtype, offsets[k]);
+ CHECK(ret, FAIL, "H5Tset_precision");
+
+ /* compute the correct PPB that should be set by SZIP */
+ if (offsets[k] == 0)
+ correct = prec[j];
+ else
+ correct = H5Tget_size(idts[i]) * 8;
+ if (correct > 24) {
+ if (correct <= 32)
+ correct = 32;
+ else if (correct <= 64)
+ correct = 64;
+ } /* end if */
+
+ /* Create the dataset */
+ dsid = H5Dcreate2(fid, MISC22_DSET_NAME, dtype, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dsid, FAIL, "H5Dcreate2");
+
+ /* Write out the whole dataset */
+ ret = H5Dwrite(dsid, dtype, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close everything */
+ ret = H5Dclose(dsid);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Tclose(dtype);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ dsid = H5Dopen2(fid, MISC22_DSET_NAME, H5P_DEFAULT);
+ CHECK(dsid, FAIL, "H5Dopen2");
+
+ dcpl2 = H5Dget_create_plist(dsid);
+ CHECK(dcpl2, FAIL, "H5Dget_create_plist");
+
+ ret = H5Pget_filter_by_id2(dcpl2, H5Z_FILTER_SZIP, &flags, &cd_nelmts, cd_values, 0, NULL,
+ NULL);
+ CHECK(ret, FAIL, "H5Pget_filter_by_id2");
+
+ VERIFY(cd_values[2], (unsigned)correct, "SZIP filter returned value for precision");
+
+ ret = H5Dclose(dsid);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Ldelete(fid, MISC22_DSET_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ ret = H5Pclose(dcpl2);
+ CHECK(ret, FAIL, "H5Pclose");
+ }
+ }
+ }
+ ret = H5Tclose(idts[0]);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Tclose(idts[1]);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Tclose(idts[2]);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Tclose(idts[3]);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ HDfree(buf);
+} /* end test_misc22() */
+#endif /* H5_HAVE_FILTER_SZIP */
+
+/****************************************************************
+**
+** test_misc23(): Test intermediate group creation.
+**
+****************************************************************/
+static void
+test_misc23(void)
+{
+ hsize_t dims[] = {10};
+ hid_t file_id = 0, group_id = 0, type_id = 0, space_id = 0, tmp_id = 0, create_id = H5P_DEFAULT,
+ access_id = H5P_DEFAULT;
+#ifndef NO_OBJECT_GET_NAME
+ char objname[MISC23_NAME_BUF_SIZE]; /* Name of object */
+#endif
+ H5O_info2_t oinfo;
+ htri_t tri_status;
+#ifndef NO_OBJECT_GET_NAME
+ ssize_t namelen;
+#endif
+ herr_t status;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing intermediate group creation\n"));
+
+ /* Create a new file using default properties. */
+ file_id = H5Fcreate(MISC23_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file_id, FAIL, "H5Fcreate");
+
+ /* Build some infrastructure */
+ group_id = H5Gcreate2(file_id, "/A", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(group_id, FAIL, "H5Gcreate2");
+
+ space_id = H5Screate_simple(1, dims, NULL);
+ CHECK(space_id, FAIL, "H5Screate_simple");
+
+ type_id = H5Tcopy(H5T_STD_I32BE);
+ CHECK(type_id, FAIL, "H5Tcopy");
+
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+ /**********************************************************************
+ * test the old APIs
+ **********************************************************************/
+
+ H5E_BEGIN_TRY
+ {
+ tmp_id = H5Gcreate1(file_id, "/A/B00a/grp", (size_t)0);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_id, FAIL, "H5Gcreate1");
+
+ /* Make sure that size_hint values that can't fit into a 32-bit
+ * unsigned integer are rejected. Only necessary on systems where
+ * size_t is a 64-bit type.
+ */
+ if (SIZE_MAX > UINT32_MAX) {
+ H5E_BEGIN_TRY
+ {
+ tmp_id = H5Gcreate1(file_id, "/size_hint_too_large", SIZE_MAX);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_id, FAIL, "H5Gcreate1");
+ }
+
+ /* Make sure the largest size_hint value works */
+ H5E_BEGIN_TRY
+ {
+ tmp_id = H5Gcreate1(file_id, "/largest_size_hint", UINT32_MAX);
+ }
+ H5E_END_TRY;
+ CHECK(tmp_id, FAIL, "H5Gcreate1");
+ status = H5Gclose(tmp_id);
+ CHECK(status, FAIL, "H5Gclose");
+
+ tmp_id = H5Gcreate1(file_id, "/A/grp", (size_t)0);
+ CHECK(tmp_id, FAIL, "H5Gcreate1");
+ status = H5Gclose(tmp_id);
+ CHECK(status, FAIL, "H5Gclose");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_id = H5Dcreate1(file_id, "/A/B00c/dset", type_id, space_id, create_id);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_id, FAIL, "H5Dcreate1");
+
+ tmp_id = H5Dcreate1(file_id, "/A/dset", type_id, space_id, create_id);
+ CHECK(tmp_id, FAIL, "H5Dcreate1");
+ status = H5Dclose(tmp_id);
+ CHECK(status, FAIL, "H5Dclose");
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
+
+ /**********************************************************************
+ * test H5Gcreate2()
+ **********************************************************************/
+
+ /* Create link creation property list */
+ create_id = H5Pcreate(H5P_LINK_CREATE);
+ CHECK(create_id, FAIL, "H5Pcreate");
+
+ /* Set flag for intermediate group creation */
+ status = H5Pset_create_intermediate_group(create_id, TRUE);
+ CHECK(status, FAIL, "H5Pset_create_intermediate_group");
+
+ tmp_id = H5Gcreate2(file_id, "/A/B01/grp", create_id, H5P_DEFAULT, access_id);
+ CHECK(tmp_id, FAIL, "H5Gcreate2");
+#ifndef NO_OBJECT_GET_NAME
+ /* Query that the name of the new group is correct */
+ namelen = H5Iget_name(tmp_id, objname, (size_t)MISC23_NAME_BUF_SIZE);
+ CHECK(namelen, FAIL, "H5Iget_name");
+ VERIFY_STR(objname, "/A/B01/grp", "H5Iget_name");
+#endif
+ status = H5Gclose(tmp_id);
+ CHECK(status, FAIL, "H5Gclose");
+
+ /* Check that intermediate group is set up correctly */
+ tmp_id = H5Gopen2(file_id, "/A/B01", H5P_DEFAULT);
+ CHECK(tmp_id, FAIL, "H5Gopen2");
+
+ status = H5Oget_info3(tmp_id, &oinfo, H5O_INFO_BASIC);
+ CHECK(status, FAIL, "H5Oget_info3");
+ VERIFY(oinfo.rc, 1, "H5Oget_info3");
+
+ status = H5Gclose(tmp_id);
+ CHECK(status, FAIL, "H5Gclose");
+
+ tmp_id = H5Gcreate2(file_id, "/A/B02/C02/grp", create_id, H5P_DEFAULT, access_id);
+ CHECK(tmp_id, FAIL, "H5Gcreate2");
+
+ status = H5Gclose(tmp_id);
+ CHECK(status, FAIL, "H5Gclose");
+
+ tmp_id = H5Gcreate2(group_id, "B03/grp/", create_id, H5P_DEFAULT, access_id);
+ CHECK(tmp_id, FAIL, "H5Gcreate2");
+
+ status = H5Gclose(tmp_id);
+ CHECK(status, FAIL, "H5Gclose");
+
+ tmp_id = H5Gcreate2(group_id, "/A/B04/grp/", create_id, H5P_DEFAULT, access_id);
+ CHECK(tmp_id, FAIL, "H5Gcreate2");
+
+ status = H5Gclose(tmp_id);
+ CHECK(status, FAIL, "H5Gclose");
+
+ tmp_id = H5Gcreate2(file_id, "/A/B05/C05/A", create_id, H5P_DEFAULT, access_id);
+ CHECK(tmp_id, FAIL, "H5Gcreate2");
+
+ status = H5Gclose(tmp_id);
+ CHECK(status, FAIL, "H5Gclose");
+
+ status = H5Pclose(create_id);
+ CHECK(status, FAIL, "H5Pclose");
+
+ /**********************************************************************
+ * test H5Dcreate2()
+ **********************************************************************/
+
+ /* Create link creation property list */
+ create_id = H5Pcreate(H5P_LINK_CREATE);
+ CHECK(create_id, FAIL, "H5Pcreate");
+
+ /* Set flag for intermediate group creation */
+ status = H5Pset_create_intermediate_group(create_id, TRUE);
+ CHECK(status, FAIL, "H5Pset_create_intermediate_group");
+
+ tmp_id = H5Dcreate2(file_id, "/A/B06/dset", type_id, space_id, create_id, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(tmp_id, FAIL, "H5Dcreate2");
+
+ status = H5Dclose(tmp_id);
+ CHECK(status, FAIL, "H5Dclose");
+
+ tmp_id = H5Dcreate2(file_id, "/A/B07/B07/dset", type_id, space_id, create_id, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(tmp_id, FAIL, "H5Dcreate2");
+
+ status = H5Dclose(tmp_id);
+ CHECK(status, FAIL, "H5Dclose");
+
+ tmp_id = H5Dcreate2(group_id, "B08/dset", type_id, space_id, create_id, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(tmp_id, FAIL, "H5Dcreate2");
+
+ status = H5Dclose(tmp_id);
+ CHECK(status, FAIL, "H5Dclose");
+
+ tmp_id = H5Dcreate2(group_id, "/A/B09/dset", type_id, space_id, create_id, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(tmp_id, FAIL, "H5Dcreate2");
+
+ status = H5Dclose(tmp_id);
+ CHECK(status, FAIL, "H5Dclose");
+
+ tmp_id = H5Dcreate2(file_id, "/A/B10/C10/A/dset", type_id, space_id, create_id, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(tmp_id, FAIL, "H5Dcreate2");
+
+ status = H5Dclose(tmp_id);
+ CHECK(status, FAIL, "H5Dclose");
+
+ status = H5Tclose(type_id);
+ CHECK(status, FAIL, "H5Tclose");
+
+ status = H5Sclose(space_id);
+ CHECK(status, FAIL, "H5Sclose");
+
+ status = H5Pclose(create_id);
+ CHECK(status, FAIL, "H5Pclose");
+
+ /**********************************************************************
+ * test H5Tcommit2()
+ **********************************************************************/
+
+ /* Create link creation property list */
+ create_id = H5Pcreate(H5P_LINK_CREATE);
+ CHECK(create_id, FAIL, "H5Pcreate");
+
+ /* Set flag for intermediate group creation */
+ status = H5Pset_create_intermediate_group(create_id, TRUE);
+ CHECK(status, FAIL, "H5Pset_create_intermediate_group");
+
+ tmp_id = H5Tcopy(H5T_NATIVE_INT16);
+ CHECK(tmp_id, FAIL, "H5Tcopy");
+
+ status = H5Tcommit2(file_id, "/A/B11/dtype", tmp_id, create_id, H5P_DEFAULT, access_id);
+ CHECK(status, FAIL, "H5Tcommit2");
+
+ status = H5Tclose(tmp_id);
+ CHECK(status, FAIL, "H5Tclose");
+
+ tmp_id = H5Tcopy(H5T_NATIVE_INT32);
+ CHECK(tmp_id, FAIL, "H5Tcopy");
+
+ status = H5Tcommit2(file_id, "/A/B12/C12/dtype", tmp_id, create_id, H5P_DEFAULT, access_id);
+ CHECK(status, FAIL, "H5Tcommit2");
+
+ status = H5Tclose(tmp_id);
+ CHECK(status, FAIL, "H5Tclose");
+
+ tmp_id = H5Tcopy(H5T_NATIVE_INT64);
+ CHECK(tmp_id, FAIL, "H5Tcopy");
+
+ status = H5Tcommit2(group_id, "B13/C12/dtype", tmp_id, create_id, H5P_DEFAULT, access_id);
+ CHECK(status, FAIL, "H5Tcommit2");
+
+ status = H5Tclose(tmp_id);
+ CHECK(status, FAIL, "H5Tclose");
+
+ tmp_id = H5Tcopy(H5T_NATIVE_FLOAT);
+ CHECK(tmp_id, FAIL, "H5Tcopy");
+
+ status = H5Tcommit2(group_id, "/A/B14/dtype", tmp_id, create_id, H5P_DEFAULT, access_id);
+ CHECK(status, FAIL, "H5Tcommit2");
+
+ status = H5Tclose(tmp_id);
+ CHECK(status, FAIL, "H5Tclose");
+
+ tmp_id = H5Tcopy(H5T_NATIVE_DOUBLE);
+ CHECK(tmp_id, FAIL, "H5Tcopy");
+
+ status = H5Tcommit2(file_id, "/A/B15/C15/A/dtype", tmp_id, create_id, H5P_DEFAULT, access_id);
+ CHECK(status, FAIL, "H5Tcommit2");
+
+ status = H5Tclose(tmp_id);
+ CHECK(status, FAIL, "H5Tclose");
+
+ status = H5Pclose(create_id);
+ CHECK(status, FAIL, "H5Pclose");
+
+ /**********************************************************************
+ * test H5Lcopy()
+ **********************************************************************/
+
+ /* Create link creation property list */
+ create_id = H5Pcreate(H5P_LINK_CREATE);
+ CHECK(create_id, FAIL, "H5Pcreate");
+
+ /* Set flag for intermediate group creation */
+ status = H5Pset_create_intermediate_group(create_id, TRUE);
+ CHECK(status, FAIL, "H5Pset_create_intermediate_group");
+
+ status = H5Lcopy(file_id, "/A/B01/grp", file_id, "/A/B16/grp", create_id, access_id);
+ CHECK(status, FAIL, "H5Lcopy");
+
+ tri_status = H5Lexists(file_id, "/A/B16/grp", access_id);
+ VERIFY(tri_status, TRUE, "H5Lexists");
+
+ tri_status = H5Lexists(file_id, "/A/B01/grp", access_id);
+ VERIFY(tri_status, TRUE, "H5Lexists");
+
+ /**********************************************************************
+ * test H5Lmove()
+ **********************************************************************/
+
+ status = H5Lmove(file_id, "/A/B16/grp", file_id, "/A/B17/grp", create_id, access_id);
+ CHECK(status, FAIL, "H5Lmove");
+
+ tri_status = H5Lexists(file_id, "/A/B17/grp", access_id);
+ VERIFY(tri_status, TRUE, "H5Lexists");
+
+ tri_status = H5Lexists(file_id, "/A/B16/grp", access_id);
+ VERIFY(tri_status, FALSE, "H5Lexists");
+
+ /**********************************************************************
+ * test H5Lcreate_hard()
+ **********************************************************************/
+
+ status = H5Lcreate_hard(file_id, "/A/B01/grp", file_id, "/A/B18/grp", create_id, access_id);
+ CHECK(status, FAIL, "H5Lcreate_hard");
+
+ tri_status = H5Lexists(file_id, "/A/B18/grp", access_id);
+ VERIFY(tri_status, TRUE, "H5Lexists");
+
+ /**********************************************************************
+ * test H5Lcreate_soft()
+ **********************************************************************/
+
+ status = H5Lcreate_soft("/A/B01/grp", file_id, "/A/B19/grp", create_id, access_id);
+ CHECK(status, FAIL, "H5Lcreate_soft");
+
+ tri_status = H5Lexists(file_id, "/A/B19/grp", access_id);
+ VERIFY(tri_status, TRUE, "H5Lexists");
+
+ /**********************************************************************
+ * test H5Lcreate_external()
+ **********************************************************************/
+#ifndef NO_EXTERNAL_LINKS
+ status = H5Lcreate_external("fake_filename", "fake_path", file_id, "/A/B20/grp", create_id, access_id);
+ CHECK(status, FAIL, "H5Lcreate_external");
+
+ tri_status = H5Lexists(file_id, "/A/B20/grp", access_id);
+ VERIFY(tri_status, TRUE, "H5Lexists");
+#endif
+ /**********************************************************************
+ * test H5Lcreate_ud()
+ **********************************************************************/
+#ifndef NO_USER_DEFINED_LINKS
+ status =
+ H5Lcreate_ud(file_id, "/A/B21/grp", H5L_TYPE_EXTERNAL, "file\0obj", (size_t)9, create_id, access_id);
+ CHECK(status, FAIL, "H5Lcreate_ud");
+
+ tri_status = H5Lexists(file_id, "/A/B21/grp", access_id);
+ VERIFY(tri_status, TRUE, "H5Lexists");
+#endif
+ /**********************************************************************
+ * close
+ **********************************************************************/
+
+ status = H5Pclose(create_id);
+ CHECK(status, FAIL, "H5Pclose");
+
+ status = H5Gclose(group_id);
+ CHECK(status, FAIL, "H5Gclose");
+
+ status = H5Fclose(file_id);
+ CHECK(status, FAIL, "H5Fclose");
+
+} /* end test_misc23() */
+
+/****************************************************************
+**
+** test_misc24(): Test opening objects with inappropriate APIs
+**
+****************************************************************/
+static void
+test_misc24(void)
+{
+#if 0
+ hid_t file_id = 0, group_id = 0, type_id = 0, space_id = 0, dset_id = 0, tmp_id = 0;
+ herr_t ret; /* Generic return value */
+#endif
+
+ /* Output message about test being performed */
+ MESSAGE(5,
+ ("Testing opening objects with inappropriate APIs - SKIPPED due to causing problems in HDF5\n"));
+#if 0
+ /* Create a new file using default properties. */
+ file_id = H5Fcreate(MISC24_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file_id, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset */
+ space_id = H5Screate(H5S_SCALAR);
+ CHECK(space_id, FAIL, "H5Screate");
+
+ /* Create group, dataset & named datatype objects */
+ group_id = H5Gcreate2(file_id, MISC24_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(group_id, FAIL, "H5Gcreate2");
+
+ dset_id = H5Dcreate2(file_id, MISC24_DATASET_NAME, H5T_NATIVE_INT, space_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT);
+ CHECK(dset_id, FAIL, "H5Dcreate2");
+
+ type_id = H5Tcopy(H5T_NATIVE_INT);
+ CHECK(type_id, FAIL, "H5Tcopy");
+
+ ret = H5Tcommit2(file_id, MISC24_DATATYPE_NAME, type_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+
+ /* Create soft links to the objects created */
+ ret = H5Lcreate_soft(MISC24_GROUP_NAME, file_id, MISC24_GROUP_LINK, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lcreate_soft");
+
+ ret = H5Lcreate_soft(MISC24_DATASET_NAME, file_id, MISC24_DATASET_LINK, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lcreate_soft");
+
+ ret = H5Lcreate_soft(MISC24_DATATYPE_NAME, file_id, MISC24_DATATYPE_LINK, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lcreate_soft");
+
+ /* Close IDs for objects */
+ ret = H5Dclose(dset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Sclose(space_id);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Gclose(group_id);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Tclose(type_id);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Attempt to open each kind of object with wrong API, including using soft links */
+ H5E_BEGIN_TRY
+ {
+ tmp_id = H5Dopen2(file_id, MISC24_GROUP_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_id, FAIL, "H5Dopen2");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_id = H5Dopen2(file_id, MISC24_GROUP_LINK, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_id, FAIL, "H5Dopen2");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_id = H5Topen2(file_id, MISC24_GROUP_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_id, FAIL, "H5Topen2");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_id = H5Topen2(file_id, MISC24_GROUP_LINK, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_id, FAIL, "H5Topen2");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_id = H5Gopen2(file_id, MISC24_DATASET_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_id, FAIL, "H5Gopen2");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_id = H5Gopen2(file_id, MISC24_DATASET_LINK, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_id, FAIL, "H5Gopen2");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_id = H5Topen2(file_id, MISC24_DATASET_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_id, FAIL, "H5Topen2");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_id = H5Topen2(file_id, MISC24_DATASET_LINK, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_id, FAIL, "H5Topen2");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_id = H5Gopen2(file_id, MISC24_DATATYPE_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_id, FAIL, "H5Gopen2");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_id = H5Gopen2(file_id, MISC24_DATATYPE_LINK, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_id, FAIL, "H5Gopen2");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_id = H5Dopen2(file_id, MISC24_DATATYPE_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_id, FAIL, "H5Dopen2");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_id = H5Dopen2(file_id, MISC24_DATATYPE_LINK, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_id, FAIL, "H5Dopen2");
+
+ /* Try again, with the object already open through valid call */
+ /* Open group */
+ group_id = H5Gopen2(file_id, MISC24_GROUP_NAME, H5P_DEFAULT);
+ CHECK(group_id, FAIL, "H5Gopen2");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_id = H5Dopen2(file_id, MISC24_GROUP_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_id, FAIL, "H5Dopen2");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_id = H5Dopen2(file_id, MISC24_GROUP_LINK, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_id, FAIL, "H5Dopen2");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_id = H5Topen2(file_id, MISC24_GROUP_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_id, FAIL, "H5Topen2");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_id = H5Topen2(file_id, MISC24_GROUP_LINK, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_id, FAIL, "H5Topen2");
+
+ ret = H5Gclose(group_id);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Open dataset */
+ dset_id = H5Dopen2(file_id, MISC24_DATASET_NAME, H5P_DEFAULT);
+ CHECK(dset_id, FAIL, "H5Dopen2");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_id = H5Gopen2(file_id, MISC24_DATASET_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_id, FAIL, "H5Gopen2");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_id = H5Gopen2(file_id, MISC24_DATASET_LINK, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_id, FAIL, "H5Gopen2");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_id = H5Topen2(file_id, MISC24_DATASET_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_id, FAIL, "H5Topen2");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_id = H5Topen2(file_id, MISC24_DATASET_LINK, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_id, FAIL, "H5Topen2");
+
+ ret = H5Dclose(dset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Open named datatype */
+ type_id = H5Topen2(file_id, MISC24_DATATYPE_NAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Topen2");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_id = H5Gopen2(file_id, MISC24_DATATYPE_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_id, FAIL, "H5Gopen2");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_id = H5Gopen2(file_id, MISC24_DATATYPE_LINK, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_id, FAIL, "H5Gopen2");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_id = H5Dopen2(file_id, MISC24_DATATYPE_NAME, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_id, FAIL, "H5Dopen2");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_id = H5Dopen2(file_id, MISC24_DATATYPE_LINK, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_id, FAIL, "H5Dopen2");
+
+ ret = H5Tclose(type_id);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close file */
+ ret = H5Fclose(file_id);
+ CHECK(ret, FAIL, "H5Fclose");
+#endif
+} /* end test_misc24() */
+
+/****************************************************************
+**
+** test_misc25a(): Exercise null object header message merge bug
+** with new file
+**
+****************************************************************/
+static void
+test_misc25a(void)
+{
+ hid_t fid; /* File ID */
+ hid_t gid, gid2, gid3; /* Group IDs */
+ hid_t aid; /* Attribute ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t tid; /* Datatype ID */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Exercise null object header message bug\n"));
+
+ /* Create file */
+ fid = H5Fcreate(MISC25A_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create top group */
+ gid = H5Gcreate2(fid, MISC25A_GROUP0_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gcreate2");
+
+ /* Close top group */
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Create first group */
+ gid = H5Gcreate2(fid, MISC25A_GROUP1_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gcreate2");
+
+ /* Close first group */
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Create second group */
+ gid2 = H5Gcreate2(fid, MISC25A_GROUP2_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid2, FAIL, "H5Gcreate2");
+
+ /* Close second group */
+ ret = H5Gclose(gid2);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open file */
+ fid = H5Fopen(MISC25A_FILE, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Re-open first group */
+ gid = H5Gopen2(fid, MISC25A_GROUP1_NAME, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gopen2");
+
+ /* Create dataspace for attribute */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create dataype for attribute */
+ tid = H5Tcopy(H5T_C_S1);
+ CHECK(tid, FAIL, "H5Tcopy");
+ ret = H5Tset_size(tid, (size_t)MISC25A_ATTR1_LEN);
+ CHECK(ret, FAIL, "H5Tset_size");
+
+ /* Add 1st attribute on first group */
+ aid = H5Acreate2(gid, MISC25A_ATTR1_NAME, tid, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close attribute */
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Create dataspace for 2nd attribute */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create dataype for attribute */
+ tid = H5Tcopy(H5T_C_S1);
+ CHECK(tid, FAIL, "H5Tcopy");
+ ret = H5Tset_size(tid, (size_t)MISC25A_ATTR2_LEN);
+ CHECK(ret, FAIL, "H5Tset_size");
+
+ /* Add 2nd attribute on first group */
+ aid = H5Acreate2(gid, MISC25A_ATTR2_NAME, tid, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close 2nd attribute */
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close first group */
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open file */
+ fid = H5Fopen(MISC25A_FILE, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Create third group */
+ gid3 = H5Gcreate2(fid, MISC25A_GROUP3_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid3, FAIL, "H5Gcreate2");
+
+ /* Close third group */
+ ret = H5Gclose(gid3);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Re-open first group */
+ gid = H5Gopen2(fid, MISC25A_GROUP1_NAME, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gopen2");
+
+ /* Delete 2nd attribute */
+ ret = H5Adelete(gid, MISC25A_ATTR2_NAME);
+ CHECK(ret, FAIL, "H5Adelete");
+
+ /* Close first group */
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open file */
+ fid = H5Fopen(MISC25A_FILE, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Re-open first group */
+ gid = H5Gopen2(fid, MISC25A_GROUP1_NAME, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gopen2");
+
+ /* Create dataspace for 3rd attribute */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create dataype for attribute */
+ tid = H5Tcopy(H5T_C_S1);
+ CHECK(tid, FAIL, "H5Tcopy");
+ ret = H5Tset_size(tid, (size_t)MISC25A_ATTR3_LEN);
+ CHECK(ret, FAIL, "H5Tset_size");
+
+ /* Add 3rd attribute on first group (smaller than 2nd attribute) */
+ aid = H5Acreate2(gid, MISC25A_ATTR3_NAME, tid, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close 3rd attribute */
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close first group */
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open file */
+ fid = H5Fopen(MISC25A_FILE, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Re-open first group */
+ gid = H5Gopen2(fid, MISC25A_GROUP1_NAME, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gopen2");
+
+ /* Delete 3rd attribute */
+ ret = H5Adelete(gid, MISC25A_ATTR3_NAME);
+ CHECK(ret, FAIL, "H5Adelete");
+
+ /* Create dataspace for 3rd attribute */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create dataype for attribute */
+ tid = H5Tcopy(H5T_C_S1);
+ CHECK(tid, FAIL, "H5Tcopy");
+ ret = H5Tset_size(tid, (size_t)MISC25A_ATTR2_LEN);
+ CHECK(ret, FAIL, "H5Tset_size");
+
+ /* Re-create 2nd attribute on first group */
+ aid = H5Acreate2(gid, MISC25A_ATTR2_NAME, tid, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close 2nd attribute */
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close first group */
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open file */
+ fid = H5Fopen(MISC25A_FILE, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Re-open first group */
+ gid = H5Gopen2(fid, MISC25A_GROUP1_NAME, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gopen2");
+
+ /* Delete 2nd attribute */
+ ret = H5Adelete(gid, MISC25A_ATTR2_NAME);
+ CHECK(ret, FAIL, "H5Adelete");
+
+ /* Close first group */
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open file */
+ fid = H5Fopen(MISC25A_FILE, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Re-open first group */
+ gid = H5Gopen2(fid, MISC25A_GROUP1_NAME, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gopen2");
+
+ /* Create dataspace for 3rd attribute */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create dataype for attribute */
+ tid = H5Tcopy(H5T_C_S1);
+ CHECK(tid, FAIL, "H5Tcopy");
+ ret = H5Tset_size(tid, (size_t)MISC25A_ATTR2_LEN);
+ CHECK(ret, FAIL, "H5Tset_size");
+
+ /* Re-create 2nd attribute on first group */
+ aid = H5Acreate2(gid, MISC25A_ATTR2_NAME, tid, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close 2nd attribute */
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close first group */
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_misc25a() */
+
+/****************************************************************
+**
+** test_misc25b(): Exercise null object header message merge bug
+** with existing file (This test relies on
+** the file produced by test/gen_mergemsg.c)
+**
+****************************************************************/
+#if 0
+static void
+test_misc25b(void)
+{
+ hid_t fid; /* File ID */
+ hid_t gid; /* Group ID */
+ const char *testfile = H5_get_srcdir_filename(MISC25B_FILE); /* Corrected test file name */
+ hbool_t driver_is_default_compatible;
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Exercise null object header message bug\n"));
+
+ ret = h5_driver_is_default_vfd_compatible(H5P_DEFAULT, &driver_is_default_compatible);
+ CHECK(ret, FAIL, "h5_driver_is_default_vfd_compatible");
+
+ if (!driver_is_default_compatible) {
+ HDprintf("-- SKIPPED --\n");
+ return;
+ }
+
+ /* Open file */
+ fid = H5Fopen(testfile, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Re-open group with object header messages that will merge */
+ gid = H5Gopen2(fid, MISC25B_GROUP, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gopen2");
+
+ /* Close first group */
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_misc25b() */
+#endif
+
+/****************************************************************
+**
+** test_misc25c(): Exercise another null object header message merge bug.
+**
+****************************************************************/
+static void
+test_misc25c(void)
+{
+ hid_t fid; /* File ID */
+ hid_t fapl; /* File access property list ID */
+ hid_t gcpl; /* Group creation property list ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t did; /* Dataset ID */
+ hid_t gid; /* Group ID */
+ hid_t gid2; /* Group ID */
+ hid_t aid; /* Attribute ID */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Exercise another null object header message bug\n"));
+
+ /* Compose file access property list */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+ ret = H5Pset_libver_bounds(fapl, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST);
+ CHECK(ret, FAIL, "H5Pset_libver_bounds");
+
+ /* Create the file */
+ fid = H5Fcreate(MISC25C_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Compose group creation property list */
+ gcpl = H5Pcreate(H5P_GROUP_CREATE);
+ CHECK(gcpl, FAIL, "H5Pcreate");
+ ret = H5Pset_link_creation_order(gcpl, (H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED));
+ CHECK(ret, FAIL, "H5Pset_link_creation_order");
+ ret = H5Pset_attr_creation_order(gcpl, (H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED));
+ CHECK(ret, FAIL, "H5Pset_attr_creation_order");
+ ret = H5Pset_est_link_info(gcpl, 1, 18);
+ CHECK(ret, FAIL, "H5Pset_est_link_info");
+
+ /* Create a group for the dataset */
+ gid = H5Gcreate2(fid, MISC25C_DSETGRPNAME, H5P_DEFAULT, gcpl, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gcreate2");
+
+ /* Create the dataspace */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create the dataset */
+ did = H5Dcreate2(gid, MISC25C_DSETNAME, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+
+ /* Create an extra group */
+ gid2 = H5Gcreate2(fid, MISC25C_GRPNAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid2, FAIL, "H5Gcreate2");
+
+ /* Close the extra group */
+ ret = H5Gclose(gid2);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Add an attribute to the dataset group */
+ aid = H5Acreate2(gid, MISC25C_ATTRNAME, H5T_NATIVE_CHAR, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+
+ /* Close the attribute */
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Create a second extra group */
+ gid2 = H5Gcreate2(fid, MISC25C_GRPNAME2, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid2, FAIL, "H5Gcreate2");
+
+ /* Close the second extra group */
+ ret = H5Gclose(gid2);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Add second attribute to the dataset group */
+ aid = H5Acreate2(gid, MISC25C_ATTRNAME2, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid, FAIL, "H5Acreate2");
+
+ /* Close the attribute */
+ ret = H5Aclose(aid);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close the dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close the dataset group */
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close the dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Close the property lists */
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Pclose(gcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Re-open the file */
+ fid = H5Fopen(MISC25C_FILE, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Re-open the dataset group */
+ gid = H5Gopen2(fid, MISC25C_DSETGRPNAME, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gopen2");
+
+ /* Rename the dataset */
+ ret = H5Lmove(gid, MISC25C_DSETNAME, H5L_SAME_LOC, MISC25C_DSETNAME2, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lmove");
+
+ /* Delete the first attribute */
+ ret = H5Adelete(gid, MISC25C_ATTRNAME);
+ CHECK(ret, FAIL, "H5Adelete");
+
+ /* Close the dataset group */
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_misc25c() */
+
+/****************************************************************
+**
+** test_misc26(): Regression test: ensure that copying filter
+** pipelines works properly.
+**
+****************************************************************/
+static void
+test_misc26(void)
+{
+ hid_t fid; /* File ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t did; /* Dataset ID */
+ hid_t dcpl1, dcpl2, dcpl3; /* Property List IDs */
+ hsize_t dims[] = {1};
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Copying filter pipelines\n"));
+
+ /* Create the property list. It needs chunking so we can add filters */
+ dcpl1 = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK_I(dcpl1, "H5Pcreate");
+ ret = H5Pset_chunk(dcpl1, 1, dims);
+ CHECK_I(ret, "H5Pset_chunk");
+
+ /* Add a filter with a data value to the property list */
+ ret = H5Pset_deflate(dcpl1, 1);
+ CHECK_I(ret, "H5Pset_deflate");
+
+ /* Copy the property list */
+ dcpl2 = H5Pcopy(dcpl1);
+ CHECK_I(dcpl2, "H5Pcopy");
+
+ /* Add a filter with no data values to the copy */
+ ret = H5Pset_shuffle(dcpl2);
+ CHECK_I(ret, "H5Pset_shuffle");
+
+ /* Copy the copy */
+ dcpl3 = H5Pcopy(dcpl2);
+ CHECK_I(dcpl3, "H5Pcopy");
+
+ /* Add another filter */
+ ret = H5Pset_deflate(dcpl3, 2);
+ CHECK_I(ret, "H5Pset_deflate");
+
+ /* Create a new file and datasets within that file that use these
+ * property lists
+ */
+ fid = H5Fcreate(MISC26_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ sid = H5Screate_simple(1, dims, dims);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ did = H5Dcreate2(fid, "dataset1", H5T_NATIVE_FLOAT, sid, H5P_DEFAULT, dcpl1, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+ ret = H5Dclose(did);
+ CHECK_I(ret, "H5Dclose");
+
+ did = H5Dcreate2(fid, "dataset2", H5T_NATIVE_FLOAT, sid, H5P_DEFAULT, dcpl2, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+ ret = H5Dclose(did);
+ CHECK_I(ret, "H5Dclose");
+
+ did = H5Dcreate2(fid, "dataset3", H5T_NATIVE_FLOAT, sid, H5P_DEFAULT, dcpl3, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+ ret = H5Dclose(did);
+ CHECK_I(ret, "H5Dclose");
+
+ /* Close the dataspace and file */
+ ret = H5Sclose(sid);
+ CHECK_I(ret, "H5Sclose");
+ ret = H5Fclose(fid);
+ CHECK_I(ret, "H5Fclose");
+
+ /* Close the property lists. */
+ ret = H5Pclose(dcpl1);
+ CHECK_I(ret, "H5Pclose");
+ ret = H5Pclose(dcpl2);
+ CHECK_I(ret, "H5Pclose");
+ ret = H5Pclose(dcpl3);
+ CHECK_I(ret, "H5Pclose");
+}
+
+/****************************************************************
+**
+** test_misc27(): Ensure that objects with incorrect # of object
+** header messages are handled appropriately.
+**
+** (Note that this test file is generated by the "gen_bad_ohdr.c" code)
+**
+****************************************************************/
+#if 0
+static void
+test_misc27(void)
+{
+ hid_t fid; /* File ID */
+ hid_t gid; /* Group ID */
+ const char *testfile = H5_get_srcdir_filename(MISC27_FILE); /* Corrected test file name */
+ hbool_t driver_is_default_compatible;
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Corrupt object header handling\n"));
+
+ ret = h5_driver_is_default_vfd_compatible(H5P_DEFAULT, &driver_is_default_compatible);
+ CHECK(ret, FAIL, "h5_driver_is_default_vfd_compatible");
+
+ if (!driver_is_default_compatible) {
+ HDprintf("-- SKIPPED --\n");
+ return;
+ }
+
+ /* Open the file */
+ fid = H5Fopen(testfile, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+#ifdef H5_STRICT_FORMAT_CHECKS
+ /* Open group with incorrect # of object header messages (should fail) */
+ H5E_BEGIN_TRY
+ {
+ gid = H5Gopen2(fid, MISC27_GROUP, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(gid, FAIL, "H5Gopen2");
+#else /* H5_STRICT_FORMAT_CHECKS */
+ /* Open group with incorrect # of object header messages */
+ gid = H5Gopen2(fid, MISC27_GROUP, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gopen2");
+
+ /* Close group */
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+#endif /* H5_STRICT_FORMAT_CHECKS */
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_misc27() */
+#endif
+
+/****************************************************************
+**
+** test_misc28(): Ensure that the dataset chunk cache will hold
+** the correct number of chunks in cache without
+** evicting them.
+**
+****************************************************************/
+static void
+test_misc28(void)
+{
+ hid_t fid; /* File ID */
+ hid_t sidf; /* File Dataspace ID */
+ hid_t sidm; /* Memory Dataspace ID */
+ hid_t did; /* Dataset ID */
+ hid_t dcpl, fapl; /* Property List IDs */
+ hsize_t dims[] = {MISC28_SIZE, MISC28_SIZE};
+ hsize_t mdims[] = {MISC28_SIZE};
+ hsize_t cdims[] = {1, 1};
+ hsize_t start[] = {0, 0};
+ hsize_t count[] = {MISC28_SIZE, 1};
+#if 0
+ size_t nbytes_used;
+ int nused;
+#endif
+ char buf[MISC28_SIZE];
+ int i;
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Dataset chunk cache\n"));
+
+ /* Create the fapl and set the cache size. Set nelmts to larger than the
+ * file size so we can be guaranteed that no chunks will be evicted due to
+ * a hash collision. Set nbytes to fit exactly 1 column of chunks (10
+ * bytes). */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+ ret = H5Pset_cache(fapl, MISC28_NSLOTS, MISC28_NSLOTS, MISC28_SIZE, 0.75);
+ CHECK(ret, FAIL, "H5Pset_cache");
+
+ /* Create the dcpl and set the chunk size */
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ ret = H5Pset_chunk(dcpl, 2, cdims);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+
+ /* Create a new file and datasets within that file that use these
+ * property lists
+ */
+ fid = H5Fcreate(MISC28_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ sidf = H5Screate_simple(2, dims, NULL);
+ CHECK(sidf, FAIL, "H5Screate_simple");
+
+ did = H5Dcreate2(fid, "dataset", H5T_NATIVE_CHAR, sidf, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+#if 0
+ /* Verify that the chunk cache is empty */
+ ret = H5D__current_cache_size_test(did, &nbytes_used, &nused);
+ CHECK(ret, FAIL, "H5D__current_cache_size_test");
+ VERIFY(nbytes_used, (size_t)0, "H5D__current_cache_size_test");
+ VERIFY(nused, 0, "H5D__current_cache_size_test");
+#endif
+ /* Initialize write buffer */
+ for (i = 0; i < MISC28_SIZE; i++)
+ buf[i] = (char)i;
+
+ /* Create memory dataspace and selection in file dataspace */
+ sidm = H5Screate_simple(1, mdims, NULL);
+ CHECK(sidm, FAIL, "H5Screate_simple");
+
+ ret = H5Sselect_hyperslab(sidf, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Write hypserslab */
+ ret = H5Dwrite(did, H5T_NATIVE_CHAR, sidm, sidf, H5P_DEFAULT, buf);
+ CHECK(ret, FAIL, "H5Dwrite");
+#if 0
+ /* Verify that all 10 chunks written have been cached */
+ ret = H5D__current_cache_size_test(did, &nbytes_used, &nused);
+ CHECK(ret, FAIL, "H5D__current_cache_size_test");
+ VERIFY(nbytes_used, (size_t)MISC28_SIZE, "H5D__current_cache_size_test");
+ VERIFY(nused, MISC28_SIZE, "H5D__current_cache_size_test");
+#endif
+ /* Initialize write buffer */
+ for (i = 0; i < MISC28_SIZE; i++)
+ buf[i] = (char)(MISC28_SIZE - 1 - i);
+
+ /* Select new hyperslab */
+ start[1] = 1;
+ ret = H5Sselect_hyperslab(sidf, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Write hyperslab */
+ ret = H5Dwrite(did, H5T_NATIVE_CHAR, sidm, sidf, H5P_DEFAULT, buf);
+ CHECK(ret, FAIL, "H5Dwrite");
+#if 0
+ /* Verify that the size of the cache remains at 10 */
+ ret = H5D__current_cache_size_test(did, &nbytes_used, &nused);
+ CHECK(ret, FAIL, "H5D__current_cache_size_test");
+ VERIFY(nbytes_used, (size_t)MISC28_SIZE, "H5D__current_cache_size_test");
+ VERIFY(nused, MISC28_SIZE, "H5D__current_cache_size_test");
+#endif
+ /* Close dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Re open dataset */
+ did = H5Dopen2(fid, "dataset", H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dopen2");
+#if 0
+ /* Verify that the chunk cache is empty */
+ ret = H5D__current_cache_size_test(did, &nbytes_used, &nused);
+ CHECK(ret, FAIL, "H5D__current_cache_size_test");
+ VERIFY(nbytes_used, (size_t)0, "H5D__current_cache_size_test");
+ VERIFY(nused, 0, "H5D__current_cache_size_test");
+#endif
+ /* Select hyperslabe for reading */
+ start[1] = 0;
+ ret = H5Sselect_hyperslab(sidf, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Read hypserslab */
+ ret = H5Dread(did, H5T_NATIVE_CHAR, sidm, sidf, H5P_DEFAULT, buf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Verify the data read */
+ for (i = 0; i < MISC28_SIZE; i++)
+ VERIFY(buf[i], i, "H5Dread");
+#if 0
+ /* Verify that all 10 chunks read have been cached */
+ ret = H5D__current_cache_size_test(did, &nbytes_used, &nused);
+ CHECK(ret, FAIL, "H5D__current_cache_size_test");
+ VERIFY(nbytes_used, (size_t)MISC28_SIZE, "H5D__current_cache_size_test");
+ VERIFY(nused, MISC28_SIZE, "H5D__current_cache_size_test");
+#endif
+ /* Select new hyperslab */
+ start[1] = 1;
+ ret = H5Sselect_hyperslab(sidf, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Read hyperslab */
+ ret = H5Dread(did, H5T_NATIVE_CHAR, sidm, sidf, H5P_DEFAULT, buf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Verify the data read */
+ for (i = 0; i < MISC28_SIZE; i++)
+ VERIFY(buf[i], MISC28_SIZE - 1 - i, "H5Dread");
+#if 0
+ /* Verify that the size of the cache remains at 10 */
+ ret = H5D__current_cache_size_test(did, &nbytes_used, &nused);
+ CHECK(ret, FAIL, "H5D__current_cache_size_test");
+ VERIFY(nbytes_used, (size_t)MISC28_SIZE, "H5D__current_cache_size_test");
+ VERIFY(nused, MISC28_SIZE, "H5D__current_cache_size_test");
+#endif
+ /* Close dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close the dataspaces and file */
+ ret = H5Sclose(sidf);
+ CHECK_I(ret, "H5Sclose");
+ ret = H5Sclose(sidm);
+ CHECK_I(ret, "H5Sclose");
+ ret = H5Fclose(fid);
+ CHECK_I(ret, "H5Fclose");
+
+ /* Close the property lists. */
+ ret = H5Pclose(dcpl);
+ CHECK_I(ret, "H5Pclose");
+ ret = H5Pclose(fapl);
+ CHECK_I(ret, "H5Pclose");
+} /* end test_misc28() */
+
+/****************************************************************
+**
+** test_misc29(): Ensure that speculative metadata reads don't
+** get raw data into the metadata accumulator.
+**
+****************************************************************/
+#if 0
+static void
+test_misc29(void)
+{
+ hbool_t driver_is_default_compatible;
+ hid_t fid; /* File ID */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Speculative metadata reads\n"));
+
+ ret = h5_driver_is_default_vfd_compatible(H5P_DEFAULT, &driver_is_default_compatible);
+ CHECK(ret, FAIL, "h5_driver_is_default_vfd_compatible");
+
+ if (!driver_is_default_compatible) {
+ HDprintf("-- SKIPPED --\n");
+ return;
+ }
+
+ /* Make a copy of the data file from svn. */
+ ret = h5_make_local_copy(MISC29_ORIG_FILE, MISC29_COPY_FILE);
+ CHECK(ret, -1, "h5_make_local_copy");
+
+ /* Open the copied file */
+ fid = H5Fopen(MISC29_COPY_FILE, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Delete the last dataset */
+ ret = H5Ldelete(fid, MISC29_DSETNAME, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_misc29() */
+#endif
+
+#if 0
+static int
+test_misc30_get_info_cb(hid_t loc_id, const char *name, const H5L_info2_t H5_ATTR_UNUSED *info,
+ void H5_ATTR_UNUSED *op_data)
+{
+ H5O_info2_t object_info;
+
+ return H5Oget_info_by_name3(loc_id, name, &object_info, H5O_INFO_BASIC, H5P_DEFAULT);
+}
+
+static int
+test_misc30_get_info(hid_t loc_id)
+{
+ return H5Literate2(loc_id, H5_INDEX_NAME, H5_ITER_INC, NULL, test_misc30_get_info_cb, NULL);
+}
+#endif
+
+/****************************************************************
+**
+** test_misc30(): Exercise local heap code that loads prefix
+** separately from data block, causing the free
+** block information to get lost.
+**
+****************************************************************/
+#if 0
+static void
+test_misc30(void)
+{
+ hsize_t file_size[] = {0, 0}; /* Sizes of file created */
+ unsigned get_info; /* Whether to perform the get info call */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Local heap dropping free block info\n"));
+
+ for (get_info = FALSE; get_info <= TRUE; get_info++) {
+ hid_t fid; /* File ID */
+ hid_t gid; /* Group ID */
+ int i; /* Local index counter */
+ herr_t ret; /* Generic return value */
+
+ fid = H5Fcreate(MISC30_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+ gid = H5Gcreate2(fid, "/g0", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gcreate2");
+
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ for (i = 0; i < 20; i++) {
+ char gname[32];
+
+ fid = H5Fopen(MISC30_FILE, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ if (get_info) {
+ ret = test_misc30_get_info(fid);
+ CHECK(ret, FAIL, "test_misc30_get_info");
+ }
+
+ HDsnprintf(gname, sizeof(gname), "/g0/group%d", i);
+ gid = H5Gcreate2(fid, gname, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid, FAIL, "H5Gcreate2");
+
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ }
+
+ fid = H5Fopen(MISC30_FILE, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+ ret = H5Fget_filesize(fid, &file_size[get_info]);
+ CHECK(fid, FAIL, "H5Fget_filesize");
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+ }
+
+ VERIFY(file_size[0], file_size[1], "test_misc30");
+} /* end test_misc30() */
+#endif
+
+/****************************************************************
+**
+** test_misc31(): Test reentering library through deprecated
+* routines that register an id after calling
+* H5close().
+**
+****************************************************************/
+#if 0
+static void
+test_misc31(void)
+{
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+ hid_t file_id; /* File id */
+ hid_t space_id; /* Dataspace id */
+ hid_t dset_id; /* Dataset id */
+ hid_t attr_id; /* Attribute id */
+ hid_t group_id; /* Group id */
+ hid_t dtype_id; /* Datatype id */
+ herr_t ret; /* Generic return value */
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Deprecated routines initialize after H5close()\n"));
+
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+ file_id = H5Fcreate(MISC31_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file_id, FAIL, "H5Fcreate");
+
+ /* Test dataset package */
+ space_id = H5Screate(H5S_SCALAR);
+ CHECK(space_id, FAIL, "H5Screate");
+ dset_id = H5Dcreate1(file_id, MISC31_DSETNAME, H5T_NATIVE_INT, space_id, H5P_DEFAULT);
+ CHECK(dset_id, FAIL, "H5Dcreate1");
+ ret = H5close();
+ CHECK(ret, FAIL, "H5close");
+ file_id = H5Fopen(MISC31_FILE, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(file_id, FAIL, "H5Fopen");
+ dset_id = H5Dopen1(file_id, MISC31_DSETNAME);
+ CHECK(dset_id, FAIL, "H5Dopen1");
+
+ /* Test attribute package */
+ space_id = H5Screate(H5S_SCALAR);
+ CHECK(space_id, FAIL, "H5Screate");
+ attr_id = H5Acreate1(dset_id, MISC31_ATTRNAME1, H5T_NATIVE_INT, space_id, H5P_DEFAULT);
+ CHECK(attr_id, FAIL, "H5Acreate1");
+ ret = H5close();
+ CHECK(ret, FAIL, "H5close");
+ file_id = H5Fopen(MISC31_FILE, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(file_id, FAIL, "H5Fopen");
+ dset_id = H5Dopen1(file_id, MISC31_DSETNAME);
+ CHECK(dset_id, FAIL, "H5Dopen1");
+ space_id = H5Screate(H5S_SCALAR);
+ CHECK(space_id, FAIL, "H5Screate");
+ attr_id = H5Acreate1(dset_id, MISC31_ATTRNAME2, H5T_NATIVE_INT, space_id, H5P_DEFAULT);
+ CHECK(attr_id, FAIL, "H5Acreate1");
+
+ /* Test group package */
+ group_id = H5Gcreate1(file_id, MISC31_GROUPNAME, 0);
+ CHECK(group_id, FAIL, "H5Gcreate1");
+ ret = H5close();
+ CHECK(ret, FAIL, "H5close");
+ file_id = H5Fopen(MISC31_FILE, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(file_id, FAIL, "H5Fopen");
+ group_id = H5Gopen1(file_id, MISC31_GROUPNAME);
+ CHECK(group_id, FAIL, "H5Gopen1");
+
+ /* Test property list package */
+ ret = H5Pregister1(H5P_OBJECT_CREATE, MISC31_PROPNAME, 0, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK(ret, FAIL, "H5Pregister1");
+ ret = H5close();
+ CHECK(ret, FAIL, "H5close");
+ ret = H5Pregister1(H5P_OBJECT_CREATE, MISC31_PROPNAME, 0, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+ CHECK(ret, FAIL, "H5Pregister1");
+ ret = H5close();
+ CHECK(ret, FAIL, "H5close");
+
+ /* Test datatype package */
+ file_id = H5Fopen(MISC31_FILE, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(file_id, FAIL, "H5Fopen");
+ dtype_id = H5Tcopy(H5T_NATIVE_INT);
+ CHECK(dtype_id, FAIL, "H5Tcopy");
+ ret = H5Tcommit1(file_id, MISC31_DTYPENAME, dtype_id);
+ CHECK(ret, FAIL, "H5Tcommit1");
+ ret = H5close();
+ CHECK(ret, FAIL, "H5close");
+ file_id = H5Fopen(MISC31_FILE, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(file_id, FAIL, "H5Fopen");
+ dtype_id = H5Topen1(file_id, MISC31_DTYPENAME);
+ CHECK(ret, FAIL, "H5Topen1");
+ ret = H5Fclose(file_id);
+ CHECK(ret, FAIL, "H5Fclose");
+ ret = H5Tclose(dtype_id);
+ CHECK(ret, FAIL, "H5Tclose");
+
+#else /* H5_NO_DEPRECATED_SYMBOLS */
+ /* Output message about test being skipped */
+ MESSAGE(5, (" ...Skipped"));
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
+} /* end test_misc31() */
+#endif
+
+/****************************************************************
+ *
+ * test_misc32(): Simple test of filter memory allocation
+ * functions.
+ *
+ ***************************************************************/
+static void
+test_misc32(void)
+{
+ void *buffer;
+ void *resized;
+ size_t size;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Edge case test of filter memory allocation functions\n"));
+
+ /* Test that the filter memory allocation functions behave correctly
+ * at edge cases.
+ */
+
+ /* FREE */
+
+ /* Test freeing a NULL pointer.
+ * No real confirmation check here, but Valgrind will confirm no
+ * shenanigans.
+ */
+ buffer = NULL;
+ H5free_memory(buffer);
+
+ /* ALLOCATE */
+
+ /* Size zero returns NULL.
+ * Also checks that a size of zero and setting the buffer clear flag
+ * to TRUE can be used together.
+ *
+ * Note that we have asserts in the code, so only check when NDEBUG
+ * is defined.
+ */
+#ifdef NDEBUG
+ buffer = H5allocate_memory(0, FALSE);
+ CHECK_PTR_NULL(buffer, "H5allocate_memory"); /*BAD*/
+ buffer = H5allocate_memory(0, TRUE);
+ CHECK_PTR_NULL(buffer, "H5allocate_memory"); /*BAD*/
+#endif /* NDEBUG */
+
+ /* RESIZE */
+
+ /* Size zero returns NULL. Valgrind will confirm buffer is freed. */
+ size = 1024;
+ buffer = H5allocate_memory(size, TRUE);
+ resized = H5resize_memory(buffer, 0);
+ CHECK_PTR_NULL(resized, "H5resize_memory");
+
+ /* NULL input pointer returns new buffer */
+ resized = H5resize_memory(NULL, 1024);
+ CHECK_PTR(resized, "H5resize_memory");
+ H5free_memory(resized);
+
+ /* NULL input pointer and size zero returns NULL */
+#ifdef NDEBUG
+ resized = H5resize_memory(NULL, 0);
+ CHECK_PTR_NULL(resized, "H5resize_memory"); /*BAD*/
+#endif /* NDEBUG */
+
+} /* end test_misc32() */
+
+/****************************************************************
+**
+** test_misc33(): Test for H5FFV-10216
+** --verify that H5HL_offset_into() returns error if the
+** input parameter "offset" exceeds heap data block size.
+** --case (1), (2), (3) are scenarios that will traverse to the
+** the 3 locations in the file having bad offset values to
+** the heap. (See description in gen_bad_offset.c)
+**
+****************************************************************/
+#if 0
+static void
+test_misc33(void)
+{
+ hid_t fid = -1; /* File ID */
+ const char *testfile = H5_get_srcdir_filename(MISC33_FILE); /* Corrected test file name */
+ H5O_info2_t oinfo; /* Structure for object metadata information */
+ hbool_t driver_is_default_compatible;
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing that bad offset into the heap returns error"));
+
+ ret = h5_driver_is_default_vfd_compatible(H5P_DEFAULT, &driver_is_default_compatible);
+ CHECK(ret, FAIL, "h5_driver_is_default_vfd_compatible");
+
+ if (!driver_is_default_compatible) {
+ HDprintf("-- SKIPPED --\n");
+ return;
+ }
+
+ /* Open the test file */
+ fid = H5Fopen(testfile, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Case (1) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Oget_info_by_name3(fid, "/soft_two", &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Oget_info_by_name3");
+
+ /* Case (2) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Oget_info_by_name3(fid, "/dsetA", &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Oget_info_by_name3");
+
+ /* Case (3) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Oget_info_by_name3(fid, "/soft_one", &oinfo, H5O_INFO_BASIC, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Oget_info_by_name3");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(fid, FAIL, "H5Fclose");
+
+} /* end test_misc33() */
+#endif
+
+/****************************************************************
+**
+** test_misc34(): Ensure zero-size memory allocations work
+**
+****************************************************************/
+#if 0
+static void
+test_misc34(void)
+{
+ void *mem = NULL; /* allocated buffer */
+ char *dup = NULL; /* 'duplicated' string */
+ size_t sz = 0; /* buffer size */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing O and NULL behavior in H5MM API calls"));
+
+ /* H5MM_xfree(): Ensure that passing NULL is allowed and returns NULL */
+ mem = H5MM_xfree(mem);
+ CHECK_PTR_NULL(mem, "H5MM_xfree");
+
+ /* H5MM_realloc(): Check behavior:
+ *
+ * H5MM_realloc(NULL, size) <==> H5MM_malloc(size)
+ * H5MM_realloc(ptr, 0) <==> H5MM_xfree(ptr)
+ * H5MM_realloc(NULL, 0) <==> NULL
+ */
+ mem = H5MM_xfree(mem);
+
+ sz = 1024;
+ mem = H5MM_realloc(mem, sz);
+ CHECK_PTR(mem, "H5MM_realloc (case 1)");
+ /* Don't free mem here! */
+
+ sz = 0;
+ mem = H5MM_realloc(mem, sz);
+ CHECK_PTR_NULL(mem, "H5MM_realloc (case 2)");
+ mem = H5MM_xfree(mem);
+
+ mem = H5MM_realloc(mem, sz);
+ CHECK_PTR_NULL(mem, "H5MM_realloc (case 3)");
+ mem = H5MM_xfree(mem);
+
+ /* H5MM_xstrdup(): Ensure NULL returns NULL */
+ dup = H5MM_xstrdup((const char *)mem);
+ CHECK_PTR_NULL(dup, "H5MM_xstrdup");
+ dup = (char *)H5MM_xfree((void *)dup);
+
+} /* end test_misc34() */
+
+/****************************************************************
+**
+** test_misc35(): Check operation of free-list routines
+**
+****************************************************************/
+static void
+test_misc35(void)
+{
+ hid_t sid = H5I_INVALID_HID; /* Dataspace ID */
+ hsize_t dims[] = {MISC35_SPACE_DIM1, MISC35_SPACE_DIM2, MISC35_SPACE_DIM3}; /* Dataspace dims */
+ hsize_t coord[MISC35_NPOINTS][MISC35_SPACE_RANK] = /* Coordinates for point selection */
+ {{0, 10, 5}, {1, 2, 7}, {2, 4, 9}, {0, 6, 11}, {1, 8, 13},
+ {2, 12, 0}, {0, 14, 2}, {1, 0, 4}, {2, 1, 6}, {0, 3, 8}};
+ size_t reg_size_start; /* Initial amount of regular memory allocated */
+ size_t arr_size_start; /* Initial amount of array memory allocated */
+ size_t blk_size_start; /* Initial amount of block memory allocated */
+ size_t fac_size_start; /* Initial amount of factory memory allocated */
+ size_t reg_size_final; /* Final amount of regular memory allocated */
+ size_t arr_size_final; /* Final amount of array memory allocated */
+ size_t blk_size_final; /* Final amount of block memory allocated */
+ size_t fac_size_final; /* Final amount of factory memory allocated */
+ herr_t ret; /* Return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Free-list API calls"));
+
+ /* Create dataspace */
+ /* (Allocates array free-list nodes) */
+ sid = H5Screate_simple(MISC35_SPACE_RANK, dims, NULL);
+ CHECK(sid, H5I_INVALID_HID, "H5Screate_simple");
+
+ /* Select sequence of ten points */
+ ret = H5Sselect_elements(sid, H5S_SELECT_SET, (size_t)MISC35_NPOINTS, (const hsize_t *)coord);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Retrieve initial free list values */
+ ret = H5get_free_list_sizes(&reg_size_start, &arr_size_start, &blk_size_start, &fac_size_start);
+ CHECK(ret, FAIL, "H5get_free_list_sizes");
+
+#if !defined H5_NO_FREE_LISTS && !defined H5_USING_MEMCHECKER
+ /* All the free list values should be >0 */
+ CHECK(reg_size_start, 0, "H5get_free_list_sizes");
+ CHECK(arr_size_start, 0, "H5get_free_list_sizes");
+ CHECK(blk_size_start, 0, "H5get_free_list_sizes");
+ CHECK(fac_size_start, 0, "H5get_free_list_sizes");
+#else
+ /* All the values should be == 0 */
+ VERIFY(reg_size_start, 0, "H5get_free_list_sizes");
+ VERIFY(arr_size_start, 0, "H5get_free_list_sizes");
+ VERIFY(blk_size_start, 0, "H5get_free_list_sizes");
+ VERIFY(fac_size_start, 0, "H5get_free_list_sizes");
+#endif
+
+ /* Garbage collect the free lists */
+ ret = H5garbage_collect();
+ CHECK(ret, FAIL, "H5garbage_collect");
+
+ /* Retrieve free list values again */
+ ret = H5get_free_list_sizes(&reg_size_final, &arr_size_final, &blk_size_final, &fac_size_final);
+ CHECK(ret, FAIL, "H5get_free_list_sizes");
+
+ /* All the free list values should be <= previous values */
+ if (reg_size_final > reg_size_start)
+ ERROR("reg_size_final > reg_size_start");
+ if (arr_size_final > arr_size_start)
+ ERROR("arr_size_final > arr_size_start");
+ if (blk_size_final > blk_size_start)
+ ERROR("blk_size_final > blk_size_start");
+ if (fac_size_final > fac_size_start)
+ ERROR("fac_size_final > fac_size_start");
+
+} /* end test_misc35() */
+#endif
+
+/* Context to pass to 'atclose' callbacks */
+static int test_misc36_context;
+
+/* 'atclose' callbacks for test_misc36 */
+static void
+test_misc36_cb1(void *_ctx)
+{
+ int *ctx = (int *)_ctx; /* Set up context pointer */
+ hbool_t is_terminating; /* Flag indicating the library is terminating */
+ herr_t ret; /* Return value */
+
+ /* Check whether the library thinks it's terminating */
+ is_terminating = FALSE;
+ ret = H5is_library_terminating(&is_terminating);
+ CHECK(ret, FAIL, "H5is_library_terminating");
+ VERIFY(is_terminating, TRUE, "H5is_library_terminating");
+
+ /* Verify correct ordering for 'atclose' callbacks */
+ if (0 != *ctx)
+ HDabort();
+
+ /* Update context value */
+ *ctx = 1;
+}
+
+static void
+test_misc36_cb2(void *_ctx)
+{
+ int *ctx = (int *)_ctx; /* Set up context pointer */
+ hbool_t is_terminating; /* Flag indicating the library is terminating */
+ herr_t ret; /* Return value */
+
+ /* Check whether the library thinks it's terminating */
+ is_terminating = FALSE;
+ ret = H5is_library_terminating(&is_terminating);
+ CHECK(ret, FAIL, "H5is_library_terminating");
+ VERIFY(is_terminating, TRUE, "H5is_library_terminating");
+
+ /* Verify correct ordering for 'atclose' callbacks */
+ if (1 != *ctx)
+ HDabort();
+
+ /* Update context value */
+ *ctx = 2;
+}
+
+/****************************************************************
+**
+** test_misc36(): Exercise H5atclose and H5is_library_terminating
+**
+****************************************************************/
+static void
+test_misc36(void)
+{
+ hbool_t is_terminating; /* Flag indicating the library is terminating */
+ herr_t ret; /* Return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("H5atclose and H5is_library_terminating API calls"));
+
+ /* Check whether the library thinks it's terminating */
+ is_terminating = TRUE;
+ ret = H5is_library_terminating(&is_terminating);
+ CHECK(ret, FAIL, "H5is_library_terminating");
+ VERIFY(is_terminating, FALSE, "H5is_library_terminating");
+
+ /* Shut the library down */
+ test_misc36_context = 0;
+ H5close();
+
+ /* Check whether the library thinks it's terminating */
+ is_terminating = TRUE;
+ ret = H5is_library_terminating(&is_terminating);
+ CHECK(ret, FAIL, "H5is_library_terminating");
+ VERIFY(is_terminating, FALSE, "H5is_library_terminating");
+
+ /* Check the close context was not changed */
+ VERIFY(test_misc36_context, 0, "H5atclose");
+
+ /* Restart the library */
+ H5open();
+
+ /* Check whether the library thinks it's terminating */
+ is_terminating = TRUE;
+ ret = H5is_library_terminating(&is_terminating);
+ CHECK(ret, FAIL, "H5is_library_terminating");
+ VERIFY(is_terminating, FALSE, "H5is_library_terminating");
+
+ /* Register the 'atclose' callbacks */
+ /* (Note that these will be called in reverse order, which is checked) */
+ ret = H5atclose(&test_misc36_cb2, &test_misc36_context);
+ CHECK(ret, FAIL, "H5atclose");
+ ret = H5atclose(&test_misc36_cb1, &test_misc36_context);
+ CHECK(ret, FAIL, "H5atclose");
+
+ /* Shut the library down */
+ test_misc36_context = 0;
+ H5close();
+
+ /* Check the close context was changed correctly */
+ VERIFY(test_misc36_context, 2, "H5atclose");
+
+ /* Restart the library */
+ H5open();
+
+ /* Close the library again */
+ test_misc36_context = 0;
+ H5close();
+
+ /* Check the close context was not changed */
+ VERIFY(test_misc36_context, 0, "H5atclose");
+} /* end test_misc36() */
+
+#if 0
+/****************************************************************
+**
+** test_misc37():
+** Test for seg fault issue when closing the provided test file
+** which has an illegal file size in its cache image.
+** See HDFFV-11052/CVE-2020-10812 for details.
+**
+****************************************************************/
+static void
+test_misc37(void)
+{
+ const char *testfile = H5_get_srcdir_filename(CVE_2020_10812_FILENAME);
+ hbool_t driver_is_default_compatible;
+ hid_t fid;
+ herr_t ret;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Fix for HDFFV-11052/CVE-2020-10812"));
+
+ ret = h5_driver_is_default_vfd_compatible(H5P_DEFAULT, &driver_is_default_compatible);
+ CHECK(ret, FAIL, "h5_driver_is_default_vfd_compatible");
+
+ if (!driver_is_default_compatible) {
+ HDprintf("-- SKIPPED --\n");
+ return;
+ }
+
+ fid = H5Fopen(testfile, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* This should fail due to the illegal file size.
+ It should fail gracefully and not seg fault */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Fclose(fid);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Fclose");
+
+} /* end test_misc37() */
+#endif
+
+/****************************************************************
+**
+** test_misc(): Main misc. test routine.
+**
+****************************************************************/
+void
+test_misc(void)
+{
+ hbool_t default_driver = h5_using_default_driver(NULL);
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Miscellaneous Routines\n"));
+
+ test_misc1(); /* Test unlinking a dataset & immediately re-using name */
+ test_misc2(); /* Test storing a VL-derived datatype in two different files */
+ test_misc3(); /* Test reading from chunked dataset with non-zero fill value */
+ test_misc4(); /* Test retrieving the fileno for various objects with H5Oget_info() */
+ test_misc5(); /* Test several level deep nested compound & VL datatypes */
+ test_misc6(); /* Test object header continuation code */
+#if 0
+ test_misc7(); /* Test for sensible datatypes stored on disk */
+ test_misc8(); /* Test storage sizes of various types of dataset storage */
+#endif
+ test_misc9(); /* Test for opening (not creating) core files */
+#if 0
+ test_misc10(); /* Test for using dataset creation property lists from old files */
+#endif
+
+ if (default_driver) {
+ test_misc11(); /* Test for all properties of a file creation property list being stored */
+ }
+
+ test_misc12(); /* Test VL-strings in chunked datasets operating correctly */
+#if 0
+ if (default_driver) {
+ test_misc13(); /* Test that a user block can be insert in front of file contents */
+ }
+#endif
+ test_misc14(); /* Test that deleted dataset's data is removed from sieve buffer correctly */
+ test_misc15(); /* Test that checking a file's access property list more than once works */
+ test_misc16(); /* Test array of fixed-length string */
+ test_misc17(); /* Test array of ASCII character */
+ test_misc18(); /* Test new object header information in H5O_info2_t struct */
+ test_misc19(); /* Test incrementing & decrementing ref count on IDs */
+#if 0
+ test_misc20(); /* Test problems with truncated dimensions in version 2 of storage layout message */
+#endif
+#if defined(H5_HAVE_FILTER_SZIP) && !defined(H5_API_TEST_NO_FILTERS)
+ test_misc21(); /* Test that "late" allocation time is treated the same as "incremental", for chunked
+ datasets w/a filters */
+ test_misc22(); /* check szip bits per pixel */
+#endif /* H5_HAVE_FILTER_SZIP */
+ test_misc23(); /* Test intermediate group creation */
+ test_misc24(); /* Test inappropriate API opens of objects */
+ test_misc25a(); /* Exercise null object header message merge bug */
+#if 0
+ test_misc25b(); /* Exercise null object header message merge bug on existing file */
+#endif
+ test_misc25c(); /* Exercise another null object header message merge bug */
+ test_misc26(); /* Test closing property lists with long filter pipelines */
+#if 0
+ test_misc27(); /* Test opening file with object that has bad # of object header messages */
+#endif
+ test_misc28(); /* Test that chunks are cached appropriately */
+#if 0
+ test_misc29(); /* Test that speculative metadata reads are handled correctly */
+ test_misc30(); /* Exercise local heap loading bug where free lists were getting dropped */
+
+ if (default_driver) {
+ test_misc31(); /* Test Reentering library through deprecated routines after H5close() */
+ }
+#endif
+ test_misc32(); /* Test filter memory allocation functions */
+#if 0
+ test_misc33(); /* Test to verify that H5HL_offset_into() returns error if offset exceeds heap block */
+ test_misc34(); /* Test behavior of 0 and NULL in H5MM API calls */
+ test_misc35(); /* Test behavior of free-list & allocation statistics API calls */
+#endif
+ test_misc36(); /* Exercise H5atclose and H5is_library_terminating */
+#if 0
+ test_misc37(); /* Test for seg fault failure at file close */
+#endif
+} /* test_misc() */
+
+/*-------------------------------------------------------------------------
+ * Function: cleanup_misc
+ *
+ * Purpose: Cleanup temporary test files
+ *
+ * Return: none
+ *
+ * Programmer: Albert Cheng
+ * July 2, 1998
+ *-------------------------------------------------------------------------
+ */
+void
+cleanup_misc(void)
+{
+ H5Fdelete(MISC1_FILE, H5P_DEFAULT);
+ H5Fdelete(MISC2_FILE_1, H5P_DEFAULT);
+ H5Fdelete(MISC2_FILE_2, H5P_DEFAULT);
+ H5Fdelete(MISC3_FILE, H5P_DEFAULT);
+ H5Fdelete(MISC4_FILE_1, H5P_DEFAULT);
+ H5Fdelete(MISC4_FILE_2, H5P_DEFAULT);
+ H5Fdelete(MISC5_FILE, H5P_DEFAULT);
+ H5Fdelete(MISC6_FILE, H5P_DEFAULT);
+ H5Fdelete(MISC7_FILE, H5P_DEFAULT);
+ H5Fdelete(MISC8_FILE, H5P_DEFAULT);
+ H5Fdelete(MISC9_FILE, H5P_DEFAULT);
+ H5Fdelete(MISC10_FILE_NEW, H5P_DEFAULT);
+ H5Fdelete(MISC11_FILE, H5P_DEFAULT);
+ H5Fdelete(MISC12_FILE, H5P_DEFAULT);
+ H5Fdelete(MISC13_FILE_1, H5P_DEFAULT);
+ H5Fdelete(MISC13_FILE_2, H5P_DEFAULT);
+ H5Fdelete(MISC14_FILE, H5P_DEFAULT);
+ H5Fdelete(MISC15_FILE, H5P_DEFAULT);
+ H5Fdelete(MISC16_FILE, H5P_DEFAULT);
+ H5Fdelete(MISC17_FILE, H5P_DEFAULT);
+ H5Fdelete(MISC18_FILE, H5P_DEFAULT);
+ H5Fdelete(MISC19_FILE, H5P_DEFAULT);
+ H5Fdelete(MISC20_FILE, H5P_DEFAULT);
+#if defined(H5_HAVE_FILTER_SZIP) && !defined(H5_API_TEST_NO_FILTERS)
+ H5Fdelete(MISC21_FILE, H5P_DEFAULT);
+ H5Fdelete(MISC22_FILE, H5P_DEFAULT);
+#endif /* H5_HAVE_FILTER_SZIP */
+ H5Fdelete(MISC23_FILE, H5P_DEFAULT);
+ H5Fdelete(MISC24_FILE, H5P_DEFAULT);
+ H5Fdelete(MISC25A_FILE, H5P_DEFAULT);
+ H5Fdelete(MISC25C_FILE, H5P_DEFAULT);
+ H5Fdelete(MISC26_FILE, H5P_DEFAULT);
+ H5Fdelete(MISC28_FILE, H5P_DEFAULT);
+ H5Fdelete(MISC29_COPY_FILE, H5P_DEFAULT);
+ H5Fdelete(MISC30_FILE, H5P_DEFAULT);
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+ H5Fdelete(MISC31_FILE, H5P_DEFAULT);
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
+} /* end cleanup_misc() */
diff --git a/test/API/trefer.c b/test/API/trefer.c
new file mode 100644
index 0000000..af0b11b
--- /dev/null
+++ b/test/API/trefer.c
@@ -0,0 +1,3641 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/***********************************************************
+ *
+ * Test program: trefer
+ *
+ * Test the Reference functionality
+ *
+ *************************************************************/
+
+#include "testhdf5.h"
+
+#define FILE_REF_PARAM "trefer_param.h5"
+#define FILE_REF_OBJ "trefer_obj.h5"
+#define FILE_REF_VL_OBJ "trefer_vl_obj.h5"
+#define FILE_REF_CMPND_OBJ "trefer_cmpnd_obj.h5"
+#define FILE_REF_REG "trefer_reg.h5"
+#define FILE_REF_REG_1D "trefer_reg_1d.h5"
+#define FILE_REF_OBJ_DEL "trefer_obj_del.h5"
+#define FILE_REF_GRP "trefer_grp.h5"
+#define FILE_REF_ATTR "trefer_attr.h5"
+#define FILE_REF_EXT1 "trefer_ext1.h5"
+#define FILE_REF_EXT2 "trefer_ext2.h5"
+#define FILE_REF_COMPAT "trefer_compat.h5"
+
+/* 1-D dataset with fixed dimensions */
+#define SPACE1_RANK 1
+#define SPACE1_DIM1 4
+
+/* 2-D dataset with fixed dimensions */
+#define SPACE2_RANK 2
+#define SPACE2_DIM1 10
+#define SPACE2_DIM2 10
+
+/* Larger 1-D dataset with fixed dimensions */
+#define SPACE3_RANK 1
+#define SPACE3_DIM1 100
+
+/* Element selection information */
+#define POINT1_NPOINTS 10
+
+/* Compound datatype */
+typedef struct s1_t {
+ unsigned int a;
+ unsigned int b;
+ float c;
+} s1_t;
+
+/* Compound datatype with reference */
+typedef struct s2_t {
+ H5R_ref_t ref0; /* reference */
+ H5R_ref_t ref1; /* reference */
+ H5R_ref_t ref2; /* reference */
+ H5R_ref_t ref3; /* reference */
+ unsigned int dim_idx; /* dimension index of the dataset */
+} s2_t;
+
+#define GROUPNAME "/group"
+#define GROUPNAME2 "group2"
+#define GROUPNAME3 "group3"
+#define DSETNAME "/dset"
+#define DSETNAME2 "dset2"
+#define NAME_SIZE 16
+
+#define MAX_ITER_CREATE 1000
+#define MAX_ITER_WRITE MAX_ITER_CREATE
+#define MAX_ITER_READ MAX_ITER_CREATE
+
+/****************************************************************
+**
+** test_reference_params(): Test basic H5R (reference) parameters
+** for correct processing
+**
+****************************************************************/
+static void
+test_reference_params(void)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset, /* Dataset ID */
+ dset2; /* Dereferenced dataset ID */
+ hid_t group; /* Group ID */
+ hid_t attr; /* Attribute ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1; /* Datatype ID */
+ hid_t aapl_id; /* Attribute access property list */
+ hid_t dapl_id; /* Dataset access property list */
+ hsize_t dims1[] = {SPACE1_DIM1};
+ H5R_ref_t *wbuf, /* buffer to write to disk */
+ *rbuf, /* buffer read from disk */
+ *tbuf; /* temp. buffer read from disk */
+ unsigned *obuf;
+ H5R_type_t type; /* Reference type */
+ unsigned int i; /* Counters */
+#if 0
+ const char *write_comment = "Foo!"; /* Comments for group */
+#endif
+ hid_t ret_id; /* Generic hid_t return value */
+ ssize_t name_size; /* Size of reference name */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Reference Parameters\n"));
+
+ /* Allocate write & read buffers */
+ wbuf = (H5R_ref_t *)HDcalloc(sizeof(H5R_ref_t), SPACE1_DIM1);
+ rbuf = (H5R_ref_t *)HDcalloc(sizeof(H5R_ref_t), SPACE1_DIM1);
+ tbuf = (H5R_ref_t *)HDcalloc(sizeof(H5R_ref_t), SPACE1_DIM1);
+ obuf = HDcalloc(sizeof(unsigned), SPACE1_DIM1);
+
+ for (i = 0; i < SPACE1_DIM1; i++)
+ obuf[i] = i * 3;
+
+ /* Create file */
+ fid1 = H5Fcreate(FILE_REF_PARAM, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, H5I_INVALID_HID, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, H5I_INVALID_HID, "H5Screate_simple");
+
+ /* Create attribute access property list */
+ aapl_id = H5Pcreate(H5P_ATTRIBUTE_ACCESS);
+ CHECK(aapl_id, H5I_INVALID_HID, "H5Pcreate");
+
+ /* Create dataset access property list */
+ dapl_id = H5Pcreate(H5P_DATASET_ACCESS);
+ CHECK(dapl_id, H5I_INVALID_HID, "H5Pcreate");
+
+ /* Create a group */
+ group = H5Gcreate2(fid1, "Group1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(group, H5I_INVALID_HID, "H5Gcreate2");
+#if 0
+ /* Set group's comment */
+ ret = H5Oset_comment(group, write_comment);
+ CHECK(ret, FAIL, "H5Oset_comment");
+#endif
+ /* Create a dataset (inside Group1) */
+ dataset = H5Dcreate2(group, "Dataset1", H5T_NATIVE_UINT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, obuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Create another dataset (inside Group1) */
+ dataset = H5Dcreate2(group, "Dataset2", H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Create an attribute for the dataset */
+ attr = H5Acreate2(dataset, "Attr", H5T_NATIVE_UINT, sid1, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, H5I_INVALID_HID, "H5Acreate2");
+
+ /* Write attribute to disk */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, obuf);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Create a datatype to refer to */
+ tid1 = H5Tcreate(H5T_COMPOUND, sizeof(s1_t));
+ CHECK(tid1, H5I_INVALID_HID, "H5Tcreate");
+
+ /* Insert fields */
+ ret = H5Tinsert(tid1, "a", HOFFSET(s1_t, a), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(tid1, "b", HOFFSET(s1_t, b), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(tid1, "c", HOFFSET(s1_t, c), H5T_NATIVE_FLOAT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Save datatype for later */
+ ret = H5Tcommit2(group, "Datatype1", tid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close group */
+ ret = H5Gclose(group);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, "Dataset3", H5T_STD_REF, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Test parameters to H5Rcreate_object */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Rcreate_object(fid1, "/Group1/Dataset1", H5P_DEFAULT, NULL);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Rcreate_object ref");
+ H5E_BEGIN_TRY
+ {
+ ret = H5Rcreate_object(H5I_INVALID_HID, "/Group1/Dataset1", H5P_DEFAULT, &wbuf[0]);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Rcreate_object loc_id");
+ H5E_BEGIN_TRY
+ {
+ ret = H5Rcreate_object(fid1, NULL, H5P_DEFAULT, &wbuf[0]);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Rcreate_object name");
+ H5E_BEGIN_TRY
+ {
+ ret = H5Rcreate_object(fid1, "", H5P_DEFAULT, &wbuf[0]);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Rcreate_object null name");
+
+ /* Test parameters to H5Rcreate_region */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Rcreate_region(fid1, "/Group1/Dataset1", sid1, H5P_DEFAULT, NULL);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Rcreate_region ref");
+ H5E_BEGIN_TRY
+ {
+ ret = H5Rcreate_region(H5I_INVALID_HID, "/Group1/Dataset1", sid1, H5P_DEFAULT, &wbuf[0]);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Rcreate_region loc_id");
+ H5E_BEGIN_TRY
+ {
+ ret = H5Rcreate_region(fid1, NULL, sid1, H5P_DEFAULT, &wbuf[0]);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Rcreate_region name");
+ H5E_BEGIN_TRY
+ {
+ ret = H5Rcreate_region(fid1, "/Group1/Dataset1", H5I_INVALID_HID, H5P_DEFAULT, &wbuf[0]);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Rcreate_region dataspace");
+
+ /* Test parameters to H5Rcreate_attr */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Rcreate_attr(fid1, "/Group1/Dataset2", "Attr", H5P_DEFAULT, NULL);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Rcreate_attr ref");
+ H5E_BEGIN_TRY
+ {
+ ret = H5Rcreate_attr(H5I_INVALID_HID, "/Group1/Dataset2", "Attr", H5P_DEFAULT, &wbuf[0]);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Rcreate_attr loc_id");
+ H5E_BEGIN_TRY
+ {
+ ret = H5Rcreate_attr(fid1, NULL, "Attr", H5P_DEFAULT, &wbuf[0]);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Rcreate_attr name");
+ H5E_BEGIN_TRY
+ {
+ ret = H5Rcreate_attr(fid1, "/Group1/Dataset2", NULL, H5P_DEFAULT, &wbuf[0]);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Rcreate_attr attr_name");
+
+ /* Test parameters to H5Rdestroy */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Rdestroy(NULL);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Rdestroy");
+
+ /* Test parameters to H5Rget_type */
+ H5E_BEGIN_TRY
+ {
+ type = H5Rget_type(NULL);
+ }
+ H5E_END_TRY;
+ VERIFY(type, H5R_BADTYPE, "H5Rget_type ref");
+
+ /* Test parameters to H5Requal */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Requal(NULL, &rbuf[0]);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Requal ref1");
+ H5E_BEGIN_TRY
+ {
+ ret = H5Requal(&rbuf[0], NULL);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Requal ref2");
+
+ /* Test parameters to H5Rcopy */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Rcopy(NULL, &wbuf[0]);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Rcopy src_ref");
+ H5E_BEGIN_TRY
+ {
+ ret = H5Rcopy(&rbuf[0], NULL);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Rcopy dest_ref");
+
+ /* Test parameters to H5Ropen_object */
+ H5E_BEGIN_TRY
+ {
+ dset2 = H5Ropen_object(&rbuf[0], H5I_INVALID_HID, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+ VERIFY(dset2, H5I_INVALID_HID, "H5Ropen_object oapl_id");
+ H5E_BEGIN_TRY
+ {
+ dset2 = H5Ropen_object(NULL, H5P_DEFAULT, dapl_id);
+ }
+ H5E_END_TRY;
+ VERIFY(dset2, H5I_INVALID_HID, "H5Ropen_object ref");
+
+ /* Test parameters to H5Ropen_region */
+ H5E_BEGIN_TRY
+ {
+ ret_id = H5Ropen_region(NULL, H5I_INVALID_HID, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+ VERIFY(ret_id, H5I_INVALID_HID, "H5Ropen_region ref");
+
+ /* Test parameters to H5Ropen_attr */
+ H5E_BEGIN_TRY
+ {
+ ret_id = H5Ropen_attr(NULL, H5P_DEFAULT, aapl_id);
+ }
+ H5E_END_TRY;
+ VERIFY(ret_id, H5I_INVALID_HID, "H5Ropen_attr ref");
+
+ /* Test parameters to H5Rget_obj_type3 */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Rget_obj_type3(NULL, H5P_DEFAULT, NULL);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Rget_obj_type3 ref");
+
+ /* Test parameters to H5Rget_file_name */
+ H5E_BEGIN_TRY
+ {
+ name_size = H5Rget_file_name(NULL, NULL, 0);
+ }
+ H5E_END_TRY;
+ VERIFY(name_size, (-1), "H5Rget_file_name ref");
+
+ /* Test parameters to H5Rget_obj_name */
+ H5E_BEGIN_TRY
+ {
+ name_size = H5Rget_obj_name(NULL, H5P_DEFAULT, NULL, 0);
+ }
+ H5E_END_TRY;
+ VERIFY(name_size, (-1), "H5Rget_obj_name ref");
+
+ /* Test parameters to H5Rget_attr_name */
+ H5E_BEGIN_TRY
+ {
+ name_size = H5Rget_attr_name(NULL, NULL, 0);
+ }
+ H5E_END_TRY;
+ VERIFY(name_size, (-1), "H5Rget_attr_name ref");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close dataset access property list */
+ ret = H5Pclose(dapl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close attribute access property list */
+ ret = H5Pclose(aapl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+ HDfree(tbuf);
+ HDfree(obuf);
+} /* test_reference_params() */
+
+/****************************************************************
+**
+** test_reference_obj(): Test basic H5R (reference) object reference code.
+** Tests references to various kinds of objects
+**
+****************************************************************/
+static void
+test_reference_obj(void)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset, /* Dataset ID */
+ dset2; /* Dereferenced dataset ID */
+ hid_t group; /* Group ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1; /* Datatype ID */
+ hsize_t dims1[] = {SPACE1_DIM1};
+ hid_t dapl_id; /* Dataset access property list */
+ H5R_ref_t *wbuf, /* buffer to write to disk */
+ *rbuf; /* buffer read from disk */
+ unsigned *ibuf, *obuf;
+ unsigned i, j; /* Counters */
+ H5O_type_t obj_type; /* Object type */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Object Reference Functions\n"));
+
+ /* Allocate write & read buffers */
+ wbuf = HDcalloc(sizeof(H5R_ref_t), SPACE1_DIM1);
+ rbuf = HDcalloc(sizeof(H5R_ref_t), SPACE1_DIM1);
+ ibuf = HDcalloc(sizeof(unsigned), SPACE1_DIM1);
+ obuf = HDcalloc(sizeof(unsigned), SPACE1_DIM1);
+
+ for (i = 0; i < SPACE1_DIM1; i++)
+ obuf[i] = i * 3;
+
+ /* Create file */
+ fid1 = H5Fcreate(FILE_REF_OBJ, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, H5I_INVALID_HID, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, H5I_INVALID_HID, "H5Screate_simple");
+
+ /* Create dataset access property list */
+ dapl_id = H5Pcreate(H5P_DATASET_ACCESS);
+ CHECK(dapl_id, H5I_INVALID_HID, "H5Pcreate");
+
+ /* Create a group */
+ group = H5Gcreate2(fid1, "Group1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(group, H5I_INVALID_HID, "H5Gcreate2");
+
+ /* Create a dataset (inside Group1) */
+ dataset = H5Dcreate2(group, "Dataset1", H5T_NATIVE_UINT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, obuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Create another dataset (inside Group1) */
+ dataset = H5Dcreate2(group, "Dataset2", H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Create a datatype to refer to */
+ tid1 = H5Tcreate(H5T_COMPOUND, sizeof(s1_t));
+ CHECK(tid1, H5I_INVALID_HID, "H5Tcreate");
+
+ /* Insert fields */
+ ret = H5Tinsert(tid1, "a", HOFFSET(s1_t, a), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(tid1, "b", HOFFSET(s1_t, b), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(tid1, "c", HOFFSET(s1_t, c), H5T_NATIVE_FLOAT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Save datatype for later */
+ ret = H5Tcommit2(group, "Datatype1", tid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close group */
+ ret = H5Gclose(group);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, "Dataset3", H5T_STD_REF, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Create reference to dataset */
+ ret = H5Rcreate_object(fid1, "/Group1/Dataset1", H5P_DEFAULT, &wbuf[0]);
+ CHECK(ret, FAIL, "H5Rcreate_object");
+ ret = H5Rget_obj_type3(&wbuf[0], H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3");
+
+ /* Create reference to dataset */
+ ret = H5Rcreate_object(fid1, "/Group1/Dataset2", H5P_DEFAULT, &wbuf[1]);
+ CHECK(ret, FAIL, "H5Rcreate_object");
+ ret = H5Rget_obj_type3(&wbuf[1], H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3");
+
+ /* Create reference to group */
+ ret = H5Rcreate_object(fid1, "/Group1", H5P_DEFAULT, &wbuf[2]);
+ CHECK(ret, FAIL, "H5Rcreate_object");
+ ret = H5Rget_obj_type3(&wbuf[2], H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_GROUP, "H5Rget_obj_type3");
+
+ /* Create reference to named datatype */
+ ret = H5Rcreate_object(fid1, "/Group1/Datatype1", H5P_DEFAULT, &wbuf[3]);
+ CHECK(ret, FAIL, "H5Rcreate_object");
+ ret = H5Rget_obj_type3(&wbuf[3], H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_NAMED_DATATYPE, "H5Rget_obj_type3");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open the file */
+ fid1 = H5Fopen(FILE_REF_OBJ, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid1, H5I_INVALID_HID, "H5Fopen");
+
+ /* Open the dataset */
+ dataset = H5Dopen2(fid1, "/Dataset3", H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dopen2");
+
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Open dataset object */
+ dset2 = H5Ropen_object(&rbuf[0], H5P_DEFAULT, dapl_id);
+ CHECK(dset2, H5I_INVALID_HID, "H5Ropen_object");
+
+ /* Check information in referenced dataset */
+ sid1 = H5Dget_space(dset2);
+ CHECK(sid1, H5I_INVALID_HID, "H5Dget_space");
+
+ ret = (int)H5Sget_simple_extent_npoints(sid1);
+ VERIFY(ret, SPACE1_DIM1, "H5Sget_simple_extent_npoints");
+
+ /* Read from disk */
+ ret = H5Dread(dset2, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, ibuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ for (i = 0; i < SPACE1_DIM1; i++)
+ VERIFY(ibuf[i], i * 3, "Data");
+
+ /* Close dereferenced Dataset */
+ ret = H5Dclose(dset2);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Open group object. GAPL isn't supported yet. But it's harmless to pass in */
+ group = H5Ropen_object(&rbuf[2], H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(group, H5I_INVALID_HID, "H5Ropen_object");
+
+ /* Close group */
+ ret = H5Gclose(group);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Open datatype object. TAPL isn't supported yet. But it's harmless to pass in */
+ tid1 = H5Ropen_object(&rbuf[3], H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(tid1, H5I_INVALID_HID, "H5Ropen_object");
+
+ /* Verify correct datatype */
+ {
+ H5T_class_t tclass;
+
+ tclass = H5Tget_class(tid1);
+ VERIFY(tclass, H5T_COMPOUND, "H5Tget_class");
+
+ ret = H5Tget_nmembers(tid1);
+ VERIFY(ret, 3, "H5Tget_nmembers");
+ }
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close dataset access property list */
+ ret = H5Pclose(dapl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Destroy references */
+ for (j = 0; j < SPACE1_DIM1; j++) {
+ ret = H5Rdestroy(&wbuf[j]);
+ CHECK(ret, FAIL, "H5Rdestroy");
+ ret = H5Rdestroy(&rbuf[j]);
+ CHECK(ret, FAIL, "H5Rdestroy");
+ }
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+ HDfree(ibuf);
+ HDfree(obuf);
+} /* test_reference_obj() */
+
+/****************************************************************
+**
+** test_reference_vlen_obj(): Test basic H5R (reference) object reference
+** within a vlen type.
+** Tests references to various kinds of objects
+**
+****************************************************************/
+static void
+test_reference_vlen_obj(void)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset, /* Dataset ID */
+ dset2; /* Dereferenced dataset ID */
+ hid_t group; /* Group ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1; /* Datatype ID */
+ hsize_t dims1[] = {SPACE1_DIM1};
+ hsize_t vl_dims[] = {1};
+ hid_t dapl_id; /* Dataset access property list */
+ H5R_ref_t *wbuf, /* buffer to write to disk */
+ *rbuf = NULL; /* buffer read from disk */
+ unsigned *ibuf, *obuf;
+ unsigned i, j; /* Counters */
+ H5O_type_t obj_type; /* Object type */
+ herr_t ret; /* Generic return value */
+ hvl_t vl_wbuf = {0, NULL}, vl_rbuf = {0, NULL};
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Object Reference Functions within VLEN type\n"));
+
+ /* Allocate write & read buffers */
+ wbuf = HDcalloc(sizeof(H5R_ref_t), SPACE1_DIM1);
+ ibuf = HDcalloc(sizeof(unsigned), SPACE1_DIM1);
+ obuf = HDcalloc(sizeof(unsigned), SPACE1_DIM1);
+
+ for (i = 0; i < SPACE1_DIM1; i++)
+ obuf[i] = i * 3;
+
+ /* Create file */
+ fid1 = H5Fcreate(FILE_REF_VL_OBJ, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, H5I_INVALID_HID, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, H5I_INVALID_HID, "H5Screate_simple");
+
+ /* Create dataset access property list */
+ dapl_id = H5Pcreate(H5P_DATASET_ACCESS);
+ CHECK(dapl_id, H5I_INVALID_HID, "H5Pcreate");
+
+ /* Create a group */
+ group = H5Gcreate2(fid1, "Group1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(group, H5I_INVALID_HID, "H5Gcreate2");
+
+ /* Create a dataset (inside Group1) */
+ dataset = H5Dcreate2(group, "Dataset1", H5T_NATIVE_UINT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, obuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Create another dataset (inside Group1) */
+ dataset = H5Dcreate2(group, "Dataset2", H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create a datatype to refer to */
+ tid1 = H5Tcreate(H5T_COMPOUND, sizeof(s1_t));
+ CHECK(tid1, H5I_INVALID_HID, "H5Tcreate");
+
+ /* Insert fields */
+ ret = H5Tinsert(tid1, "a", HOFFSET(s1_t, a), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(tid1, "b", HOFFSET(s1_t, b), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(tid1, "c", HOFFSET(s1_t, c), H5T_NATIVE_FLOAT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Save datatype for later */
+ ret = H5Tcommit2(group, "Datatype1", tid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close group */
+ ret = H5Gclose(group);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Create vlen type */
+ tid1 = H5Tvlen_create(H5T_STD_REF);
+ CHECK(tid1, H5I_INVALID_HID, "H5Tvlen_create");
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, vl_dims, NULL);
+ CHECK(sid1, H5I_INVALID_HID, "H5Screate_simple");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, "Dataset3", tid1, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Create reference to dataset */
+ ret = H5Rcreate_object(fid1, "/Group1/Dataset1", H5P_DEFAULT, &wbuf[0]);
+ CHECK(ret, FAIL, "H5Rcreate_object");
+ ret = H5Rget_obj_type3(&wbuf[0], H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3");
+
+ /* Create reference to dataset */
+ ret = H5Rcreate_object(fid1, "/Group1/Dataset2", H5P_DEFAULT, &wbuf[1]);
+ CHECK(ret, FAIL, "H5Rcreate_object");
+ ret = H5Rget_obj_type3(&wbuf[1], H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3");
+
+ /* Create reference to group */
+ ret = H5Rcreate_object(fid1, "/Group1", H5P_DEFAULT, &wbuf[2]);
+ CHECK(ret, FAIL, "H5Rcreate_object");
+ ret = H5Rget_obj_type3(&wbuf[2], H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_GROUP, "H5Rget_obj_type3");
+
+ /* Create reference to named datatype */
+ ret = H5Rcreate_object(fid1, "/Group1/Datatype1", H5P_DEFAULT, &wbuf[3]);
+ CHECK(ret, FAIL, "H5Rcreate_object");
+ ret = H5Rget_obj_type3(&wbuf[3], H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_NAMED_DATATYPE, "H5Rget_obj_type3");
+
+ /* Store references into vlen */
+ vl_wbuf.len = SPACE1_DIM1;
+ vl_wbuf.p = wbuf;
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, &vl_wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open the file */
+ fid1 = H5Fopen(FILE_REF_VL_OBJ, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid1, H5I_INVALID_HID, "H5Fopen");
+
+ /* Open the dataset */
+ dataset = H5Dopen2(fid1, "/Dataset3", H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dopen2");
+
+ tid1 = H5Dget_type(dataset);
+ CHECK(tid1, H5I_INVALID_HID, "H5Dget_type");
+
+ /* Read selection from disk */
+ ret = H5Dread(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, &vl_rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ VERIFY(vl_rbuf.len, SPACE1_DIM1, "H5Dread");
+ rbuf = vl_rbuf.p;
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Open dataset object */
+ dset2 = H5Ropen_object(&rbuf[0], H5P_DEFAULT, dapl_id);
+ CHECK(dset2, H5I_INVALID_HID, "H5Ropen_object");
+
+ /* Check information in referenced dataset */
+ sid1 = H5Dget_space(dset2);
+ CHECK(sid1, H5I_INVALID_HID, "H5Dget_space");
+
+ ret = (int)H5Sget_simple_extent_npoints(sid1);
+ VERIFY(ret, SPACE1_DIM1, "H5Sget_simple_extent_npoints");
+
+ /* Read from disk */
+ ret = H5Dread(dset2, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, ibuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ for (i = 0; i < SPACE1_DIM1; i++)
+ VERIFY(ibuf[i], i * 3, "Data");
+
+ /* Close dereferenced Dataset */
+ ret = H5Dclose(dset2);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Open group object. GAPL isn't supported yet. But it's harmless to pass in */
+ group = H5Ropen_object(&rbuf[2], H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(group, H5I_INVALID_HID, "H5Ropen_object");
+
+ /* Close group */
+ ret = H5Gclose(group);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Open datatype object. TAPL isn't supported yet. But it's harmless to pass in */
+ tid1 = H5Ropen_object(&rbuf[3], H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(tid1, H5I_INVALID_HID, "H5Ropen_object");
+
+ /* Verify correct datatype */
+ {
+ H5T_class_t tclass;
+
+ tclass = H5Tget_class(tid1);
+ VERIFY(tclass, H5T_COMPOUND, "H5Tget_class");
+
+ ret = H5Tget_nmembers(tid1);
+ VERIFY(ret, 3, "H5Tget_nmembers");
+ }
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close dataset access property list */
+ ret = H5Pclose(dapl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Destroy references */
+ for (j = 0; j < SPACE1_DIM1; j++) {
+ ret = H5Rdestroy(&wbuf[j]);
+ CHECK(ret, FAIL, "H5Rdestroy");
+ ret = H5Rdestroy(&rbuf[j]);
+ CHECK(ret, FAIL, "H5Rdestroy");
+ }
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+ HDfree(ibuf);
+ HDfree(obuf);
+} /* test_reference_vlen_obj() */
+
+/****************************************************************
+**
+** test_reference_cmpnd_obj(): Test basic H5R (reference) object reference
+** within a compound type.
+** Tests references to various kinds of objects
+**
+****************************************************************/
+static void
+test_reference_cmpnd_obj(void)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset, /* Dataset ID */
+ dset2; /* Dereferenced dataset ID */
+ hid_t group; /* Group ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1; /* Datatype ID */
+ hsize_t dims1[] = {SPACE1_DIM1};
+ hsize_t cmpnd_dims[] = {1};
+ hid_t dapl_id; /* Dataset access property list */
+ unsigned *ibuf, *obuf;
+ unsigned i; /* Counter */
+ H5O_type_t obj_type; /* Object type */
+ herr_t ret; /* Generic return value */
+ s2_t cmpnd_wbuf, cmpnd_rbuf;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Object Reference Functions within compound type\n"));
+
+ /* Allocate write & read buffers */
+ ibuf = HDcalloc(sizeof(unsigned), SPACE1_DIM1);
+ obuf = HDcalloc(sizeof(unsigned), SPACE1_DIM1);
+
+ for (i = 0; i < SPACE1_DIM1; i++)
+ obuf[i] = i * 3;
+
+ /* Create file */
+ fid1 = H5Fcreate(FILE_REF_CMPND_OBJ, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, H5I_INVALID_HID, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, H5I_INVALID_HID, "H5Screate_simple");
+
+ /* Create dataset access property list */
+ dapl_id = H5Pcreate(H5P_DATASET_ACCESS);
+ CHECK(dapl_id, H5I_INVALID_HID, "H5Pcreate");
+
+ /* Create a group */
+ group = H5Gcreate2(fid1, "Group1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(group, H5I_INVALID_HID, "H5Gcreate2");
+
+ /* Create a dataset (inside Group1) */
+ dataset = H5Dcreate2(group, "Dataset1", H5T_NATIVE_UINT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, obuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Create another dataset (inside Group1) */
+ dataset = H5Dcreate2(group, "Dataset2", H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create a datatype to refer to */
+ tid1 = H5Tcreate(H5T_COMPOUND, sizeof(s1_t));
+ CHECK(tid1, H5I_INVALID_HID, "H5Tcreate");
+
+ /* Insert fields */
+ ret = H5Tinsert(tid1, "a", HOFFSET(s1_t, a), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(tid1, "b", HOFFSET(s1_t, b), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(tid1, "c", HOFFSET(s1_t, c), H5T_NATIVE_FLOAT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Save datatype for later */
+ ret = H5Tcommit2(group, "Datatype1", tid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close group */
+ ret = H5Gclose(group);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Create compound type */
+ tid1 = H5Tcreate(H5T_COMPOUND, sizeof(s2_t));
+ CHECK(tid1, H5I_INVALID_HID, "H5Tcreate");
+
+ /* Insert fields */
+ ret = H5Tinsert(tid1, "ref0", HOFFSET(s2_t, ref0), H5T_STD_REF);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(tid1, "ref1", HOFFSET(s2_t, ref1), H5T_STD_REF);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(tid1, "ref2", HOFFSET(s2_t, ref2), H5T_STD_REF);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(tid1, "ref3", HOFFSET(s2_t, ref3), H5T_STD_REF);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(tid1, "dim_idx", HOFFSET(s2_t, dim_idx), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, cmpnd_dims, NULL);
+ CHECK(sid1, H5I_INVALID_HID, "H5Screate_simple");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, "Dataset3", tid1, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Reset buffer for writing */
+ HDmemset(&cmpnd_wbuf, 0, sizeof(cmpnd_wbuf));
+
+ /* Create reference to dataset */
+ ret = H5Rcreate_object(fid1, "/Group1/Dataset1", H5P_DEFAULT, &cmpnd_wbuf.ref0);
+ CHECK(ret, FAIL, "H5Rcreate_object");
+ ret = H5Rget_obj_type3(&cmpnd_wbuf.ref0, H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3");
+
+ /* Create reference to dataset */
+ ret = H5Rcreate_object(fid1, "/Group1/Dataset2", H5P_DEFAULT, &cmpnd_wbuf.ref1);
+ CHECK(ret, FAIL, "H5Rcreate_object");
+ ret = H5Rget_obj_type3(&cmpnd_wbuf.ref1, H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3");
+
+ /* Create reference to group */
+ ret = H5Rcreate_object(fid1, "/Group1", H5P_DEFAULT, &cmpnd_wbuf.ref2);
+ CHECK(ret, FAIL, "H5Rcreate_object");
+ ret = H5Rget_obj_type3(&cmpnd_wbuf.ref2, H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_GROUP, "H5Rget_obj_type3");
+
+ /* Create reference to named datatype */
+ ret = H5Rcreate_object(fid1, "/Group1/Datatype1", H5P_DEFAULT, &cmpnd_wbuf.ref3);
+ CHECK(ret, FAIL, "H5Rcreate_object");
+ ret = H5Rget_obj_type3(&cmpnd_wbuf.ref3, H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_NAMED_DATATYPE, "H5Rget_obj_type3");
+
+ /* Store dimensions */
+ cmpnd_wbuf.dim_idx = SPACE1_DIM1;
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, &cmpnd_wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open the file */
+ fid1 = H5Fopen(FILE_REF_CMPND_OBJ, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid1, H5I_INVALID_HID, "H5Fopen");
+
+ /* Open the dataset */
+ dataset = H5Dopen2(fid1, "/Dataset3", H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dopen2");
+
+ tid1 = H5Dget_type(dataset);
+ CHECK(tid1, H5I_INVALID_HID, "H5Dget_type");
+
+ /* Read selection from disk */
+ ret = H5Dread(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, &cmpnd_rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ VERIFY(cmpnd_rbuf.dim_idx, SPACE1_DIM1, "H5Dread");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Open dataset object */
+ dset2 = H5Ropen_object(&cmpnd_rbuf.ref0, H5P_DEFAULT, dapl_id);
+ CHECK(dset2, H5I_INVALID_HID, "H5Ropen_object");
+
+ /* Check information in referenced dataset */
+ sid1 = H5Dget_space(dset2);
+ CHECK(sid1, H5I_INVALID_HID, "H5Dget_space");
+
+ ret = (int)H5Sget_simple_extent_npoints(sid1);
+ VERIFY(ret, SPACE1_DIM1, "H5Sget_simple_extent_npoints");
+
+ /* Read from disk */
+ ret = H5Dread(dset2, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, ibuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ for (i = 0; i < SPACE1_DIM1; i++)
+ VERIFY(ibuf[i], i * 3, "Data");
+
+ /* Close dereferenced Dataset */
+ ret = H5Dclose(dset2);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Open group object. GAPL isn't supported yet. But it's harmless to pass in */
+ group = H5Ropen_object(&cmpnd_rbuf.ref2, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(group, H5I_INVALID_HID, "H5Ropen_object");
+
+ /* Close group */
+ ret = H5Gclose(group);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Open datatype object. TAPL isn't supported yet. But it's harmless to pass in */
+ tid1 = H5Ropen_object(&cmpnd_rbuf.ref3, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(tid1, H5I_INVALID_HID, "H5Ropen_object");
+
+ /* Verify correct datatype */
+ {
+ H5T_class_t tclass;
+
+ tclass = H5Tget_class(tid1);
+ VERIFY(tclass, H5T_COMPOUND, "H5Tget_class");
+
+ ret = H5Tget_nmembers(tid1);
+ VERIFY(ret, 3, "H5Tget_nmembers");
+ }
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close dataset access property list */
+ ret = H5Pclose(dapl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Destroy references */
+ ret = H5Rdestroy(&cmpnd_wbuf.ref0);
+ CHECK(ret, FAIL, "H5Rdestroy");
+ ret = H5Rdestroy(&cmpnd_wbuf.ref1);
+ CHECK(ret, FAIL, "H5Rdestroy");
+ ret = H5Rdestroy(&cmpnd_wbuf.ref2);
+ CHECK(ret, FAIL, "H5Rdestroy");
+ ret = H5Rdestroy(&cmpnd_wbuf.ref3);
+ CHECK(ret, FAIL, "H5Rdestroy");
+
+ ret = H5Rdestroy(&cmpnd_rbuf.ref0);
+ CHECK(ret, FAIL, "H5Rdestroy");
+ ret = H5Rdestroy(&cmpnd_rbuf.ref1);
+ CHECK(ret, FAIL, "H5Rdestroy");
+ ret = H5Rdestroy(&cmpnd_rbuf.ref2);
+ CHECK(ret, FAIL, "H5Rdestroy");
+ ret = H5Rdestroy(&cmpnd_rbuf.ref3);
+ CHECK(ret, FAIL, "H5Rdestroy");
+
+ /* Free memory buffers */
+ HDfree(ibuf);
+ HDfree(obuf);
+} /* test_reference_cmpnd_obj() */
+
+/****************************************************************
+**
+** test_reference_region(): Test basic H5R (reference) object reference code.
+** Tests references to various kinds of objects
+**
+** Note: The libver_low/libver_high parameters are added to create the file
+** with the low and high bounds setting in fapl.
+** Please see the RFC for "H5Sencode/H5Sdecode Format Change".
+**
+****************************************************************/
+static void
+test_reference_region(H5F_libver_t libver_low, H5F_libver_t libver_high)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t fapl; /* File access property list */
+ hid_t dset1, /* Dataset ID */
+ dset2; /* Dereferenced dataset ID */
+ hid_t sid1, /* Dataspace ID #1 */
+ sid2; /* Dataspace ID #2 */
+ hid_t dapl_id; /* Dataset access property list */
+ hsize_t dims1[] = {SPACE1_DIM1}, dims2[] = {SPACE2_DIM1, SPACE2_DIM2};
+ hsize_t start[SPACE2_RANK]; /* Starting location of hyperslab */
+ hsize_t stride[SPACE2_RANK]; /* Stride of hyperslab */
+ hsize_t count[SPACE2_RANK]; /* Element count of hyperslab */
+ hsize_t block[SPACE2_RANK]; /* Block size of hyperslab */
+ hsize_t coord1[POINT1_NPOINTS][SPACE2_RANK]; /* Coordinates for point selection */
+ hsize_t *coords; /* Coordinate buffer */
+ hsize_t low[SPACE2_RANK]; /* Selection bounds */
+ hsize_t high[SPACE2_RANK]; /* Selection bounds */
+ H5R_ref_t *wbuf = NULL, /* buffer to write to disk */
+ *rbuf = NULL; /* buffer read from disk */
+ H5R_ref_t nvrbuf[3] = {{{{0}}}, {{{101}}}, {{{255}}}}; /* buffer with non-valid refs */
+ uint8_t *dwbuf = NULL, /* Buffer for writing numeric data to disk */
+ *drbuf = NULL; /* Buffer for reading numeric data from disk */
+ uint8_t *tu8; /* Temporary pointer to uint8 data */
+ H5O_type_t obj_type; /* Type of object */
+ int i, j; /* Counters */
+ hssize_t hssize_ret; /* hssize_t return value */
+ htri_t tri_ret; /* htri_t return value */
+ herr_t ret; /* Generic return value */
+ hid_t dset_NA; /* Dataset id for undefined reference */
+ hid_t space_NA; /* Dataspace id for undefined reference */
+ hsize_t dims_NA[1] = {1}; /* Dims array for undefined reference */
+ H5R_ref_t rdata_NA[1]; /* Read buffer */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Dataset Region Reference Functions\n"));
+
+ /* Allocate write & read buffers */
+ wbuf = HDcalloc(sizeof(H5R_ref_t), SPACE1_DIM1);
+ rbuf = HDcalloc(sizeof(H5R_ref_t), SPACE1_DIM1);
+ dwbuf = (uint8_t *)HDcalloc(sizeof(uint8_t), (size_t)(SPACE2_DIM1 * SPACE2_DIM2));
+ drbuf = (uint8_t *)HDcalloc(sizeof(uint8_t), (size_t)(SPACE2_DIM1 * SPACE2_DIM2));
+
+ for (tu8 = dwbuf, i = 0; i < (SPACE2_DIM1 * SPACE2_DIM2); i++)
+ *tu8++ = (uint8_t)(i * 3);
+
+ /* Create file access property list */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, H5I_INVALID_HID, "H5Pcreate");
+
+ /* Set the low/high version bounds in fapl */
+ ret = H5Pset_libver_bounds(fapl, libver_low, libver_high);
+ CHECK(ret, FAIL, "H5Pset_libver_bounds");
+
+ /* Create file with the fapl */
+ fid1 = H5Fcreate(FILE_REF_REG, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(fid1, H5I_INVALID_HID, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid2 = H5Screate_simple(SPACE2_RANK, dims2, NULL);
+ CHECK(sid2, H5I_INVALID_HID, "H5Screate_simple");
+
+ /* Create dataset access property list */
+ dapl_id = H5Pcreate(H5P_DATASET_ACCESS);
+ CHECK(dapl_id, H5I_INVALID_HID, "H5Pcreate");
+
+ /* Create a dataset */
+ dset2 = H5Dcreate2(fid1, "Dataset2", H5T_STD_U8LE, sid2, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset2, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dset2, H5T_STD_U8LE, H5S_ALL, H5S_ALL, H5P_DEFAULT, dwbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close Dataset */
+ ret = H5Dclose(dset2);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Create dataspace for the reference dataset */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, H5I_INVALID_HID, "H5Screate_simple");
+
+ /* Create a dataset */
+ H5E_BEGIN_TRY
+ {
+ dset1 = H5Dcreate2(fid1, "Dataset1", H5T_STD_REF, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (dset1 < 0) {
+ VERIFY(libver_high <= H5F_LIBVER_V110, TRUE, "H5Dcreate2");
+
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+ }
+ else {
+
+ CHECK(dset1, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Create references */
+
+ /* Select 6x6 hyperslab for first reference */
+ start[0] = 2;
+ start[1] = 2;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 1;
+ count[1] = 1;
+ block[0] = 6;
+ block[1] = 6;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ ret = (int)H5Sget_select_npoints(sid2);
+ VERIFY(ret, 36, "H5Sget_select_npoints");
+
+ /* Store first dataset region */
+ ret = H5Rcreate_region(fid1, "/Dataset2", sid2, H5P_DEFAULT, &wbuf[0]);
+ CHECK(ret, FAIL, "H5Rcreate_region");
+ ret = H5Rget_obj_type3(&wbuf[0], H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3");
+
+ /* Select sequence of ten points for second reference */
+ coord1[0][0] = 6;
+ coord1[0][1] = 9;
+ coord1[1][0] = 2;
+ coord1[1][1] = 2;
+ coord1[2][0] = 8;
+ coord1[2][1] = 4;
+ coord1[3][0] = 1;
+ coord1[3][1] = 6;
+ coord1[4][0] = 2;
+ coord1[4][1] = 8;
+ coord1[5][0] = 3;
+ coord1[5][1] = 2;
+ coord1[6][0] = 0;
+ coord1[6][1] = 4;
+ coord1[7][0] = 9;
+ coord1[7][1] = 0;
+ coord1[8][0] = 7;
+ coord1[8][1] = 1;
+ coord1[9][0] = 3;
+ coord1[9][1] = 3;
+ ret = H5Sselect_elements(sid2, H5S_SELECT_SET, (size_t)POINT1_NPOINTS, (const hsize_t *)coord1);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ ret = (int)H5Sget_select_npoints(sid2);
+ VERIFY(ret, SPACE2_DIM2, "H5Sget_select_npoints");
+
+ /* Store second dataset region */
+ ret = H5Rcreate_region(fid1, "/Dataset2", sid2, H5P_DEFAULT, &wbuf[1]);
+ CHECK(ret, FAIL, "H5Rcreate_region");
+
+ /* Select unlimited hyperslab for third reference */
+ start[0] = 1;
+ start[1] = 8;
+ stride[0] = 4;
+ stride[1] = 1;
+ count[0] = H5S_UNLIMITED;
+ count[1] = 1;
+ block[0] = 2;
+ block[1] = 2;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ hssize_ret = H5Sget_select_npoints(sid2);
+ VERIFY(hssize_ret, (hssize_t)H5S_UNLIMITED, "H5Sget_select_npoints");
+
+ /* Store third dataset region */
+ ret = H5Rcreate_region(fid1, "/Dataset2", sid2, H5P_DEFAULT, &wbuf[2]);
+ CHECK(ret, FAIL, "H5Rcreate_region");
+
+ ret = H5Rget_obj_type3(&wbuf[2], H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3");
+
+ /* Store fourth dataset region */
+ ret = H5Rcreate_region(fid1, "/Dataset2", sid2, H5P_DEFAULT, &wbuf[3]);
+ CHECK(ret, FAIL, "H5Rcreate_region");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dset1, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf);
+
+ /*
+ * Store a dataset region reference which will not get written to disk
+ */
+
+ /* Create the dataspace of the region references */
+ space_NA = H5Screate_simple(1, dims_NA, NULL);
+ CHECK(space_NA, H5I_INVALID_HID, "H5Screate_simple");
+
+ /* Create the dataset and write the region references to it */
+ dset_NA = H5Dcreate2(fid1, "DS_NA", H5T_STD_REF, space_NA, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset_NA, H5I_INVALID_HID, "H5Dcreate");
+
+ /* Close and release resources for undefined region reference tests */
+ ret = H5Dclose(dset_NA);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Sclose(space_NA);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dset1);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close uint8 dataset dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open the file */
+ fid1 = H5Fopen(FILE_REF_REG, H5F_ACC_RDWR, fapl);
+ CHECK(fid1, H5I_INVALID_HID, "H5Fopen");
+
+ /*
+ * Start the test of an undefined reference
+ */
+
+ /* Open the dataset of the undefined references */
+ dset_NA = H5Dopen2(fid1, "DS_NA", H5P_DEFAULT);
+ CHECK(dset_NA, H5I_INVALID_HID, "H5Dopen2");
+
+ /* Read the data */
+ ret = H5Dread(dset_NA, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata_NA);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /*
+ * Dereference an undefined reference (should fail)
+ */
+ H5E_BEGIN_TRY
+ {
+ dset2 = H5Ropen_object(&rdata_NA[0], H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(dset2, H5I_INVALID_HID, "H5Ropen_object");
+
+ /* Close and release resources. */
+ ret = H5Dclose(dset_NA);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* This close should fail since H5Ropen_object never created
+ * the id of the referenced object. */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Dclose(dset2);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Dclose");
+
+ /*
+ * End the test of an undefined reference
+ */
+
+ /* Open the dataset */
+ dset1 = H5Dopen2(fid1, "/Dataset1", H5P_DEFAULT);
+ CHECK(dset1, H5I_INVALID_HID, "H5Dopen2");
+
+ /* Read selection from disk */
+ ret = H5Dread(dset1, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Try to open objects */
+ dset2 = H5Ropen_object(&rbuf[0], H5P_DEFAULT, dapl_id);
+ CHECK(dset2, H5I_INVALID_HID, "H5Ropen_object");
+
+ /* Check what H5Rget_obj_type3 function returns */
+ ret = H5Rget_obj_type3(&rbuf[0], H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3");
+
+ /* Check information in referenced dataset */
+ sid1 = H5Dget_space(dset2);
+ CHECK(sid1, H5I_INVALID_HID, "H5Dget_space");
+
+ ret = (int)H5Sget_simple_extent_npoints(sid1);
+ VERIFY(ret, (SPACE2_DIM1 * SPACE2_DIM2), "H5Sget_simple_extent_npoints");
+
+ /* Read from disk */
+ ret = H5Dread(dset2, H5T_STD_U8LE, H5S_ALL, H5S_ALL, H5P_DEFAULT, drbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ for (tu8 = (uint8_t *)drbuf, i = 0; i < (SPACE2_DIM1 * SPACE2_DIM2); i++, tu8++)
+ VERIFY(*tu8, (uint8_t)(i * 3), "Data");
+
+ /* Get the hyperslab selection */
+ sid2 = H5Ropen_region(&rbuf[0], H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(sid2, H5I_INVALID_HID, "H5Ropen_region");
+
+ /* Verify correct hyperslab selected */
+ ret = (int)H5Sget_select_npoints(sid2);
+ VERIFY(ret, 36, "H5Sget_select_npoints");
+ ret = (int)H5Sget_select_hyper_nblocks(sid2);
+ VERIFY(ret, 1, "H5Sget_select_hyper_nblocks");
+
+ /* allocate space for the hyperslab blocks */
+ coords = (hsize_t *)HDmalloc((size_t)ret * SPACE2_RANK * sizeof(hsize_t) * 2);
+
+ ret = H5Sget_select_hyper_blocklist(sid2, (hsize_t)0, (hsize_t)ret, coords);
+ CHECK(ret, FAIL, "H5Sget_select_hyper_blocklist");
+ VERIFY(coords[0], 2, "Hyperslab Coordinates");
+ VERIFY(coords[1], 2, "Hyperslab Coordinates");
+ VERIFY(coords[2], 7, "Hyperslab Coordinates");
+ VERIFY(coords[3], 7, "Hyperslab Coordinates");
+ HDfree(coords);
+ ret = H5Sget_select_bounds(sid2, low, high);
+ CHECK(ret, FAIL, "H5Sget_select_bounds");
+ VERIFY(low[0], 2, "Selection Bounds");
+ VERIFY(low[1], 2, "Selection Bounds");
+ VERIFY(high[0], 7, "Selection Bounds");
+ VERIFY(high[1], 7, "Selection Bounds");
+
+ /* Close region space */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Get the element selection */
+ sid2 = H5Ropen_region(&rbuf[1], H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(sid2, H5I_INVALID_HID, "H5Ropen_region");
+
+ /* Verify correct elements selected */
+ ret = (int)H5Sget_select_npoints(sid2);
+ VERIFY(ret, SPACE2_DIM2, "H5Sget_select_npoints");
+ ret = (int)H5Sget_select_elem_npoints(sid2);
+ VERIFY(ret, SPACE2_DIM2, "H5Sget_select_elem_npoints");
+
+ /* allocate space for the element points */
+ coords = (hsize_t *)HDmalloc((size_t)ret * SPACE2_RANK * sizeof(hsize_t));
+
+ ret = H5Sget_select_elem_pointlist(sid2, (hsize_t)0, (hsize_t)ret, coords);
+ CHECK(ret, FAIL, "H5Sget_select_elem_pointlist");
+ VERIFY(coords[0], coord1[0][0], "Element Coordinates");
+ VERIFY(coords[1], coord1[0][1], "Element Coordinates");
+ VERIFY(coords[2], coord1[1][0], "Element Coordinates");
+ VERIFY(coords[3], coord1[1][1], "Element Coordinates");
+ VERIFY(coords[4], coord1[2][0], "Element Coordinates");
+ VERIFY(coords[5], coord1[2][1], "Element Coordinates");
+ VERIFY(coords[6], coord1[3][0], "Element Coordinates");
+ VERIFY(coords[7], coord1[3][1], "Element Coordinates");
+ VERIFY(coords[8], coord1[4][0], "Element Coordinates");
+ VERIFY(coords[9], coord1[4][1], "Element Coordinates");
+ VERIFY(coords[10], coord1[5][0], "Element Coordinates");
+ VERIFY(coords[11], coord1[5][1], "Element Coordinates");
+ VERIFY(coords[12], coord1[6][0], "Element Coordinates");
+ VERIFY(coords[13], coord1[6][1], "Element Coordinates");
+ VERIFY(coords[14], coord1[7][0], "Element Coordinates");
+ VERIFY(coords[15], coord1[7][1], "Element Coordinates");
+ VERIFY(coords[16], coord1[8][0], "Element Coordinates");
+ VERIFY(coords[17], coord1[8][1], "Element Coordinates");
+ VERIFY(coords[18], coord1[9][0], "Element Coordinates");
+ VERIFY(coords[19], coord1[9][1], "Element Coordinates");
+ HDfree(coords);
+ ret = H5Sget_select_bounds(sid2, low, high);
+ CHECK(ret, FAIL, "H5Sget_select_bounds");
+ VERIFY(low[0], 0, "Selection Bounds");
+ VERIFY(low[1], 0, "Selection Bounds");
+ VERIFY(high[0], 9, "Selection Bounds");
+ VERIFY(high[1], 9, "Selection Bounds");
+
+ /* Close region space */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Get the unlimited selection */
+ sid2 = H5Ropen_region(&rbuf[2], H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(sid2, H5I_INVALID_HID, "H5Ropen_region");
+
+ /* Verify correct hyperslab selected */
+ hssize_ret = H5Sget_select_npoints(sid2);
+ VERIFY(hssize_ret, (hssize_t)H5S_UNLIMITED, "H5Sget_select_npoints");
+ tri_ret = H5Sis_regular_hyperslab(sid2);
+ CHECK(tri_ret, FAIL, "H5Sis_regular_hyperslab");
+ VERIFY(tri_ret, TRUE, "H5Sis_regular_hyperslab Result");
+ ret = H5Sget_regular_hyperslab(sid2, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sget_regular_hyperslab");
+ VERIFY(start[0], (hsize_t)1, "Hyperslab Coordinates");
+ VERIFY(start[1], (hsize_t)8, "Hyperslab Coordinates");
+ VERIFY(stride[0], (hsize_t)4, "Hyperslab Coordinates");
+ VERIFY(stride[1], (hsize_t)1, "Hyperslab Coordinates");
+ VERIFY(count[0], H5S_UNLIMITED, "Hyperslab Coordinates");
+ VERIFY(count[1], (hsize_t)1, "Hyperslab Coordinates");
+ VERIFY(block[0], (hsize_t)2, "Hyperslab Coordinates");
+ VERIFY(block[1], (hsize_t)2, "Hyperslab Coordinates");
+
+ /* Close region space */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close first space */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close dereferenced Dataset */
+ ret = H5Dclose(dset2);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Attempting to retrieve type of object using non-valid refs */
+ for (j = 0; j < 3; j++) {
+ H5E_BEGIN_TRY
+ {
+ ret = H5Rget_obj_type3(&nvrbuf[j], H5P_DEFAULT, &obj_type);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Rget_obj_type3");
+ } /* end for */
+
+ /* Close Dataset */
+ ret = H5Dclose(dset1);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close dataset access property list */
+ ret = H5Pclose(dapl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Destroy references */
+ for (j = 0; j < SPACE1_DIM1; j++) {
+ ret = H5Rdestroy(&wbuf[j]);
+ CHECK(ret, FAIL, "H5Rdestroy");
+ ret = H5Rdestroy(&rbuf[j]);
+ CHECK(ret, FAIL, "H5Rdestroy");
+ }
+ }
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+ HDfree(dwbuf);
+ HDfree(drbuf);
+
+} /* test_reference_region() */
+
+/****************************************************************
+**
+** test_reference_region_1D(): Test H5R (reference) object reference code.
+** Tests 1-D references to various kinds of objects
+**
+** Note: The libver_low/libver_high parameters are added to create the file
+** with the low and high bounds setting in fapl.
+** Please see the RFC for "H5Sencode/H5Sdecode Format Change".
+**
+****************************************************************/
+static void
+test_reference_region_1D(H5F_libver_t libver_low, H5F_libver_t libver_high)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t fapl; /* File access property list */
+ hid_t dset1, /* Dataset ID */
+ dset3; /* Dereferenced dataset ID */
+ hid_t sid1, /* Dataspace ID #1 */
+ sid3; /* Dataspace ID #3 */
+ hid_t dapl_id; /* Dataset access property list */
+ hsize_t dims1[] = {2}, /* Must be 2 */
+ dims3[] = {SPACE3_DIM1};
+ hsize_t start[SPACE3_RANK]; /* Starting location of hyperslab */
+ hsize_t stride[SPACE3_RANK]; /* Stride of hyperslab */
+ hsize_t count[SPACE3_RANK]; /* Element count of hyperslab */
+ hsize_t block[SPACE3_RANK]; /* Block size of hyperslab */
+ hsize_t coord1[POINT1_NPOINTS][SPACE3_RANK]; /* Coordinates for point selection */
+ hsize_t *coords; /* Coordinate buffer */
+ hsize_t low[SPACE3_RANK]; /* Selection bounds */
+ hsize_t high[SPACE3_RANK]; /* Selection bounds */
+ H5R_ref_t *wbuf = NULL, /* buffer to write to disk */
+ *rbuf = NULL; /* buffer read from disk */
+ uint8_t *dwbuf = NULL, /* Buffer for writing numeric data to disk */
+ *drbuf = NULL; /* Buffer for reading numeric data from disk */
+ uint8_t *tu8; /* Temporary pointer to uint8 data */
+ H5O_type_t obj_type; /* Object type */
+ int i; /* Counter */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing 1-D Dataset Region Reference Functions\n"));
+
+ /* Allocate write & read buffers */
+ wbuf = HDcalloc(sizeof(H5R_ref_t), (size_t)SPACE1_DIM1);
+ rbuf = HDcalloc(sizeof(H5R_ref_t), (size_t)SPACE1_DIM1);
+ dwbuf = (uint8_t *)HDcalloc(sizeof(uint8_t), (size_t)SPACE3_DIM1);
+ drbuf = (uint8_t *)HDcalloc(sizeof(uint8_t), (size_t)SPACE3_DIM1);
+
+ for (tu8 = dwbuf, i = 0; i < SPACE3_DIM1; i++)
+ *tu8++ = (uint8_t)(i * 3);
+
+ /* Create the file access property list */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, H5I_INVALID_HID, "H5Pcreate");
+
+ /* Set the low/high version bounds in fapl */
+ ret = H5Pset_libver_bounds(fapl, libver_low, libver_high);
+ CHECK(ret, FAIL, "H5Pset_libver_bounds");
+
+ /* Create file with the fapl */
+ fid1 = H5Fcreate(FILE_REF_REG_1D, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(fid1, H5I_INVALID_HID, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid3 = H5Screate_simple(SPACE3_RANK, dims3, NULL);
+ CHECK(sid3, H5I_INVALID_HID, "H5Screate_simple");
+
+ /* Create dataset access property list */
+ dapl_id = H5Pcreate(H5P_DATASET_ACCESS);
+ CHECK(dapl_id, H5I_INVALID_HID, "H5Pcreate");
+
+ /* Create a dataset */
+ dset3 = H5Dcreate2(fid1, "Dataset2", H5T_STD_U8LE, sid3, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset3, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dset3, H5T_STD_U8LE, H5S_ALL, H5S_ALL, H5P_DEFAULT, dwbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close Dataset */
+ ret = H5Dclose(dset3);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Create dataspace for the reference dataset */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, H5I_INVALID_HID, "H5Screate_simple");
+
+ /* Create a dataset */
+ H5E_BEGIN_TRY
+ {
+ dset1 = H5Dcreate2(fid1, "Dataset1", H5T_STD_REF, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+
+ if (dset1 < 0) {
+
+ VERIFY(libver_high <= H5F_LIBVER_V110, TRUE, "H5Dcreate2");
+
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(sid3);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+ }
+ else {
+
+ CHECK(ret, FAIL, "H5Dcreate2");
+
+ /* Create references */
+
+ /* Select 15 2x1 hyperslabs for first reference */
+ start[0] = 2;
+ stride[0] = 5;
+ count[0] = 15;
+ block[0] = 2;
+ ret = H5Sselect_hyperslab(sid3, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ ret = (int)H5Sget_select_npoints(sid3);
+ VERIFY(ret, (block[0] * count[0]), "H5Sget_select_npoints");
+
+ /* Store first dataset region */
+ ret = H5Rcreate_region(fid1, "/Dataset2", sid3, H5P_DEFAULT, &wbuf[0]);
+ CHECK(ret, FAIL, "H5Rcreate_region");
+ ret = H5Rget_obj_type3(&wbuf[0], H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3");
+
+ /* Select sequence of ten points for second reference */
+ coord1[0][0] = 16;
+ coord1[1][0] = 22;
+ coord1[2][0] = 38;
+ coord1[3][0] = 41;
+ coord1[4][0] = 52;
+ coord1[5][0] = 63;
+ coord1[6][0] = 70;
+ coord1[7][0] = 89;
+ coord1[8][0] = 97;
+ coord1[9][0] = 03;
+ ret = H5Sselect_elements(sid3, H5S_SELECT_SET, (size_t)POINT1_NPOINTS, (const hsize_t *)coord1);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ ret = (int)H5Sget_select_npoints(sid3);
+ VERIFY(ret, POINT1_NPOINTS, "H5Sget_select_npoints");
+
+ /* Store second dataset region */
+ ret = H5Rcreate_region(fid1, "/Dataset2", sid3, H5P_DEFAULT, &wbuf[1]);
+ CHECK(ret, FAIL, "H5Rcreate_region");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dset1, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dset1);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close uint8 dataset dataspace */
+ ret = H5Sclose(sid3);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open the file */
+ fid1 = H5Fopen(FILE_REF_REG_1D, H5F_ACC_RDWR, fapl);
+ CHECK(fid1, H5I_INVALID_HID, "H5Fopen");
+
+ /* Open the dataset */
+ dset1 = H5Dopen2(fid1, "/Dataset1", H5P_DEFAULT);
+ CHECK(dset1, H5I_INVALID_HID, "H5Dopen2");
+
+ /* Read selection from disk */
+ ret = H5Dread(dset1, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Try to open objects */
+ dset3 = H5Ropen_object(&rbuf[0], H5P_DEFAULT, dapl_id);
+ CHECK(dset3, H5I_INVALID_HID, "H5Ropen_object");
+
+ /* Check what H5Rget_obj_type3 function returns */
+ ret = H5Rget_obj_type3(&rbuf[0], H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3");
+
+ /* Check information in referenced dataset */
+ sid1 = H5Dget_space(dset3);
+ CHECK(sid1, H5I_INVALID_HID, "H5Dget_space");
+
+ ret = (int)H5Sget_simple_extent_npoints(sid1);
+ VERIFY(ret, SPACE3_DIM1, "H5Sget_simple_extent_npoints");
+
+ /* Read from disk */
+ ret = H5Dread(dset3, H5T_STD_U8LE, H5S_ALL, H5S_ALL, H5P_DEFAULT, drbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ for (tu8 = (uint8_t *)drbuf, i = 0; i < SPACE3_DIM1; i++, tu8++)
+ VERIFY(*tu8, (uint8_t)(i * 3), "Data");
+
+ /* Get the hyperslab selection */
+ sid3 = H5Ropen_region(&rbuf[0], H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(sid3, H5I_INVALID_HID, "H5Ropen_region");
+
+ /* Verify correct hyperslab selected */
+ ret = (int)H5Sget_select_npoints(sid3);
+ VERIFY(ret, 30, "H5Sget_select_npoints");
+ ret = (int)H5Sget_select_hyper_nblocks(sid3);
+ VERIFY(ret, 15, "H5Sget_select_hyper_nblocks");
+
+ /* allocate space for the hyperslab blocks */
+ coords = (hsize_t *)HDmalloc((size_t)ret * SPACE3_RANK * sizeof(hsize_t) * 2);
+
+ ret = H5Sget_select_hyper_blocklist(sid3, (hsize_t)0, (hsize_t)ret, coords);
+ CHECK(ret, FAIL, "H5Sget_select_hyper_blocklist");
+ VERIFY(coords[0], 2, "Hyperslab Coordinates");
+ VERIFY(coords[1], 3, "Hyperslab Coordinates");
+ VERIFY(coords[2], 7, "Hyperslab Coordinates");
+ VERIFY(coords[3], 8, "Hyperslab Coordinates");
+ VERIFY(coords[4], 12, "Hyperslab Coordinates");
+ VERIFY(coords[5], 13, "Hyperslab Coordinates");
+ VERIFY(coords[6], 17, "Hyperslab Coordinates");
+ VERIFY(coords[7], 18, "Hyperslab Coordinates");
+ VERIFY(coords[8], 22, "Hyperslab Coordinates");
+ VERIFY(coords[9], 23, "Hyperslab Coordinates");
+ VERIFY(coords[10], 27, "Hyperslab Coordinates");
+ VERIFY(coords[11], 28, "Hyperslab Coordinates");
+ VERIFY(coords[12], 32, "Hyperslab Coordinates");
+ VERIFY(coords[13], 33, "Hyperslab Coordinates");
+ VERIFY(coords[14], 37, "Hyperslab Coordinates");
+ VERIFY(coords[15], 38, "Hyperslab Coordinates");
+ VERIFY(coords[16], 42, "Hyperslab Coordinates");
+ VERIFY(coords[17], 43, "Hyperslab Coordinates");
+ VERIFY(coords[18], 47, "Hyperslab Coordinates");
+ VERIFY(coords[19], 48, "Hyperslab Coordinates");
+ VERIFY(coords[20], 52, "Hyperslab Coordinates");
+ VERIFY(coords[21], 53, "Hyperslab Coordinates");
+ VERIFY(coords[22], 57, "Hyperslab Coordinates");
+ VERIFY(coords[23], 58, "Hyperslab Coordinates");
+ VERIFY(coords[24], 62, "Hyperslab Coordinates");
+ VERIFY(coords[25], 63, "Hyperslab Coordinates");
+ VERIFY(coords[26], 67, "Hyperslab Coordinates");
+ VERIFY(coords[27], 68, "Hyperslab Coordinates");
+ VERIFY(coords[28], 72, "Hyperslab Coordinates");
+ VERIFY(coords[29], 73, "Hyperslab Coordinates");
+ HDfree(coords);
+ ret = H5Sget_select_bounds(sid3, low, high);
+ CHECK(ret, FAIL, "H5Sget_select_bounds");
+ VERIFY(low[0], 2, "Selection Bounds");
+ VERIFY(high[0], 73, "Selection Bounds");
+
+ /* Close region space */
+ ret = H5Sclose(sid3);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Get the element selection */
+ sid3 = H5Ropen_region(&rbuf[1], H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(sid3, H5I_INVALID_HID, "H5Ropen_region");
+
+ /* Verify correct elements selected */
+ ret = (int)H5Sget_select_npoints(sid3);
+ VERIFY(ret, 10, "H5Sget_select_npoints");
+ ret = (int)H5Sget_select_elem_npoints(sid3);
+ VERIFY(ret, 10, "H5Sget_select_elem_npoints");
+
+ /* allocate space for the element points */
+ coords = (hsize_t *)HDmalloc((size_t)ret * SPACE3_RANK * sizeof(hsize_t));
+
+ ret = H5Sget_select_elem_pointlist(sid3, (hsize_t)0, (hsize_t)ret, coords);
+ CHECK(ret, FAIL, "H5Sget_select_elem_pointlist");
+ VERIFY(coords[0], coord1[0][0], "Element Coordinates");
+ VERIFY(coords[1], coord1[1][0], "Element Coordinates");
+ VERIFY(coords[2], coord1[2][0], "Element Coordinates");
+ VERIFY(coords[3], coord1[3][0], "Element Coordinates");
+ VERIFY(coords[4], coord1[4][0], "Element Coordinates");
+ VERIFY(coords[5], coord1[5][0], "Element Coordinates");
+ VERIFY(coords[6], coord1[6][0], "Element Coordinates");
+ VERIFY(coords[7], coord1[7][0], "Element Coordinates");
+ VERIFY(coords[8], coord1[8][0], "Element Coordinates");
+ VERIFY(coords[9], coord1[9][0], "Element Coordinates");
+ HDfree(coords);
+ ret = H5Sget_select_bounds(sid3, low, high);
+ CHECK(ret, FAIL, "H5Sget_select_bounds");
+ VERIFY(low[0], 3, "Selection Bounds");
+ VERIFY(high[0], 97, "Selection Bounds");
+
+ /* Close region space */
+ ret = H5Sclose(sid3);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close first space */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close dereferenced Dataset */
+ ret = H5Dclose(dset3);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dset1);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close dataset access property list */
+ ret = H5Pclose(dapl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file access property list */
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Destroy references */
+ for (i = 0; i < 2; i++) {
+ ret = H5Rdestroy(&wbuf[i]);
+ CHECK(ret, FAIL, "H5Rdestroy");
+ ret = H5Rdestroy(&rbuf[i]);
+ CHECK(ret, FAIL, "H5Rdestroy");
+ }
+ }
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+ HDfree(dwbuf);
+ HDfree(drbuf);
+
+} /* test_reference_region_1D() */
+
+/****************************************************************
+**
+** test_reference_obj_deleted(): Test H5R (reference) object reference code.
+** Tests for correct failures for deleted and non-existent objects
+**
+****************************************************************/
+static void
+test_reference_obj_deleted(void)
+{
+#ifndef NO_REFERENCE_TO_DELETED
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset, /* Dataset ID */
+ dset2; /* Dereferenced dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ H5R_ref_t oref; /* Object Reference to test */
+ H5O_type_t obj_type; /* Object type */
+ herr_t ret; /* Generic return value */
+#endif
+ MESSAGE(5, ("Testing References to Deleted Objects - SKIPPED for now due to no support\n"));
+#ifndef NO_REFERENCE_TO_DELETED
+ /* Create file */
+ fid1 = H5Fcreate(FILE_REF_OBJ_DEL, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, H5I_INVALID_HID, "H5Fcreate");
+
+ /* Create scalar dataspace for datasets */
+ sid1 = H5Screate_simple(0, NULL, NULL);
+ CHECK(sid1, H5I_INVALID_HID, "H5Screate_simple");
+
+ /* Create a dataset to reference (deleted later) */
+ dataset = H5Dcreate2(fid1, "Dataset1", H5T_NATIVE_INT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, "Dataset2", H5T_STD_REF, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Create reference to dataset */
+ ret = H5Rcreate_object(fid1, "/Dataset1", H5P_DEFAULT, &oref);
+ CHECK(ret, FAIL, "H5Rcreate_object");
+ ret = H5Rget_obj_type3(&oref, H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, &oref);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Delete referenced dataset */
+ ret = H5Ldelete(fid1, "/Dataset1", H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Destroy reference */
+ ret = H5Rdestroy(&oref);
+ CHECK(ret, FAIL, "H5Rdestroy");
+
+ /* Re-open the file */
+ fid1 = H5Fopen(FILE_REF_OBJ_DEL, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid1, H5I_INVALID_HID, "H5Fopen");
+
+ /* Open the dataset */
+ dataset = H5Dopen2(fid1, "/Dataset2", H5P_DEFAULT);
+ CHECK(ret, H5I_INVALID_HID, "H5Dopen2");
+
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, &oref);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Open deleted dataset object */
+ dset2 = H5Ropen_object(&oref, H5P_DEFAULT, H5P_DEFAULT);
+ VERIFY(dset2, H5I_INVALID_HID, "H5Ropen_object");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Destroy reference */
+ ret = H5Rdestroy(&oref);
+ CHECK(ret, FAIL, "H5Rdestroy");
+#endif
+} /* test_reference_obj_deleted() */
+
+/****************************************************************
+**
+** test_deref_iter_op(): Iterator callback for test_reference_group_iterate()
+** test.
+**
+****************************************************************/
+static herr_t
+test_deref_iter_op(hid_t H5_ATTR_UNUSED group, const char *name, const H5L_info2_t H5_ATTR_UNUSED *info,
+ void *op_data)
+{
+ int *count = (int *)op_data; /* Pointer to name counter */
+ herr_t ret_value;
+
+ /* Simple check for correct names */
+ if (*count == 0) {
+ if (HDstrcmp(name, DSETNAME2) == 0)
+ ret_value = 0;
+ else
+ ret_value = -1;
+ } /* end if */
+ else if (*count == 1) {
+ if (HDstrcmp(name, GROUPNAME2) == 0)
+ ret_value = 0;
+ else
+ ret_value = -1;
+ } /* end if */
+ else if (*count == 2) {
+ if (HDstrcmp(name, GROUPNAME3) == 0)
+ ret_value = 0;
+ else
+ ret_value = -1;
+ } /* end if */
+ else
+ ret_value = -1;
+
+ (*count)++;
+
+ return (ret_value);
+} /* end test_deref_iter_op() */
+
+/****************************************************************
+**
+** test_reference_group(): Test H5R (reference) object reference code.
+** Tests for correct behavior of various routines on dereferenced group
+**
+****************************************************************/
+static void
+test_reference_group(void)
+{
+ hid_t fid = -1; /* File ID */
+ hid_t gid = -1, gid2 = -1; /* Group IDs */
+ hid_t did; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ H5R_ref_t wref; /* Reference to write */
+ H5R_ref_t rref; /* Reference to read */
+ H5G_info_t ginfo; /* Group info struct */
+ char objname[NAME_SIZE]; /* Buffer to store name */
+ H5O_info2_t oinfo; /* Object info struct */
+ int count = 0; /* Count within iterated group */
+ ssize_t size; /* Name length */
+ herr_t ret;
+
+ /* Create file with a group and a dataset containing an object reference to the group */
+ fid = H5Fcreate(FILE_REF_GRP, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, H5I_INVALID_HID, "H5Fcreate");
+
+ /* Create dataspace to use for dataset */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, H5I_INVALID_HID, "H5Screate");
+
+ /* Create group to refer to */
+ gid = H5Gcreate2(fid, GROUPNAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid, H5I_INVALID_HID, "H5Gcreate2");
+
+ /* Create nested groups */
+ gid2 = H5Gcreate2(gid, GROUPNAME2, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid2, H5I_INVALID_HID, "H5Gcreate2");
+ ret = H5Gclose(gid2);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ gid2 = H5Gcreate2(gid, GROUPNAME3, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid2, H5I_INVALID_HID, "H5Gcreate2");
+ ret = H5Gclose(gid2);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Create bottom dataset */
+ did = H5Dcreate2(gid, DSETNAME2, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(did, H5I_INVALID_HID, "H5Dcreate2");
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Create dataset */
+ did = H5Dcreate2(fid, DSETNAME, H5T_STD_REF, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(did, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Create reference to group */
+ ret = H5Rcreate_object(fid, GROUPNAME, H5P_DEFAULT, &wref);
+ CHECK(ret, FAIL, "H5Rcreate_object");
+
+ /* Write reference to disk */
+ ret = H5Dwrite(did, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, &wref);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close objects */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Destroy reference */
+ ret = H5Rdestroy(&wref);
+ CHECK(ret, FAIL, "H5Rdestroy");
+
+ /* Re-open file */
+ fid = H5Fopen(FILE_REF_GRP, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid, H5I_INVALID_HID, "H5Fopen");
+
+ /* Re-open dataset */
+ did = H5Dopen2(fid, DSETNAME, H5P_DEFAULT);
+ CHECK(did, H5I_INVALID_HID, "H5Dopen2");
+
+ /* Read in the reference */
+ ret = H5Dread(did, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, &rref);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Dereference to get the group */
+ gid = H5Ropen_object(&rref, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(gid, H5I_INVALID_HID, "H5Ropen_object");
+
+ /* Iterate through objects in dereferenced group */
+ ret = H5Literate2(gid, H5_INDEX_NAME, H5_ITER_INC, NULL, test_deref_iter_op, &count);
+ CHECK(ret, FAIL, "H5Literate");
+
+ /* Various queries on the group opened */
+ ret = H5Gget_info(gid, &ginfo);
+ CHECK(ret, FAIL, "H5Gget_info");
+ VERIFY(ginfo.nlinks, 3, "H5Gget_info");
+
+ size = H5Lget_name_by_idx(gid, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)0, objname, (size_t)NAME_SIZE,
+ H5P_DEFAULT);
+ CHECK(size, (-1), "H5Lget_name_by_idx");
+ VERIFY_STR(objname, DSETNAME2, "H5Lget_name_by_idx");
+
+ ret = H5Oget_info_by_idx3(gid, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)0, &oinfo, H5O_INFO_BASIC,
+ H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oget_info_by_idx3");
+ VERIFY(oinfo.type, H5O_TYPE_DATASET, "H5Oget_info_by_idx3");
+
+ /* Unlink one of the objects in the dereferenced group */
+ ret = H5Ldelete(gid, GROUPNAME2, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ /* Delete dataset object in dereferenced group (with other dataset still open) */
+ ret = H5Ldelete(gid, DSETNAME2, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+
+ /* Close objects */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Gclose(gid);
+ CHECK(ret, FAIL, "H5Gclose");
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Destroy reference */
+ ret = H5Rdestroy(&rref);
+ CHECK(ret, FAIL, "H5Rdestroy");
+} /* test_reference_group() */
+
+/****************************************************************
+**
+** test_reference_attr(): Test basic H5R (reference) attribute reference code.
+** Tests references to attributes on various kinds of objects
+**
+****************************************************************/
+static void
+test_reference_attr(void)
+{
+ hid_t fid; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t group; /* Group ID */
+ hid_t attr; /* Attribute ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t tid; /* Datatype ID */
+ hsize_t dims[] = {SPACE1_DIM1};
+ hid_t dapl_id; /* Dataset access property list */
+ H5R_ref_t ref_wbuf[SPACE1_DIM1], /* Buffer to write to disk */
+ ref_rbuf[SPACE1_DIM1]; /* Buffer read from disk */
+ unsigned wbuf[SPACE1_DIM1], rbuf[SPACE1_DIM1];
+ unsigned i; /* Local index variables */
+ H5O_type_t obj_type; /* Object type */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Attribute Reference Functions\n"));
+
+ /* Create file */
+ fid = H5Fcreate(FILE_REF_ATTR, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, H5I_INVALID_HID, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid = H5Screate_simple(SPACE1_RANK, dims, NULL);
+ CHECK(sid, H5I_INVALID_HID, "H5Screate_simple");
+
+ /* Create dataset access property list */
+ dapl_id = H5Pcreate(H5P_DATASET_ACCESS);
+ CHECK(dapl_id, H5I_INVALID_HID, "H5Pcreate");
+
+ /* Create a group */
+ group = H5Gcreate2(fid, "Group1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(group, H5I_INVALID_HID, "H5Gcreate2");
+
+ /* Create an attribute for the dataset */
+ attr = H5Acreate2(group, "Attr2", H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, H5I_INVALID_HID, "H5Acreate2");
+
+ for (i = 0; i < SPACE1_DIM1; i++)
+ wbuf[i] = (i * 3) + 1;
+
+ /* Write attribute to disk */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, wbuf);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Create a dataset (inside Group1) */
+ dataset = H5Dcreate2(group, "Dataset1", H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Create an attribute for the dataset */
+ attr = H5Acreate2(dataset, "Attr1", H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, H5I_INVALID_HID, "H5Acreate2");
+
+ for (i = 0; i < SPACE1_DIM1; i++)
+ wbuf[i] = i * 3;
+
+ /* Write attribute to disk */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, wbuf);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Create another dataset (inside Group1) */
+ dataset = H5Dcreate2(group, "Dataset2", H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Create a datatype to refer to */
+ tid = H5Tcreate(H5T_COMPOUND, sizeof(s1_t));
+ CHECK(tid, H5I_INVALID_HID, "H5Tcreate");
+
+ /* Insert fields */
+ ret = H5Tinsert(tid, "a", HOFFSET(s1_t, a), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(tid, "b", HOFFSET(s1_t, b), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(tid, "c", HOFFSET(s1_t, c), H5T_NATIVE_FLOAT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Save datatype for later */
+ ret = H5Tcommit2(group, "Datatype1", tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+
+ /* Create an attribute for the datatype */
+ attr = H5Acreate2(tid, "Attr3", H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, H5I_INVALID_HID, "H5Acreate2");
+
+ for (i = 0; i < SPACE1_DIM1; i++)
+ wbuf[i] = (i * 3) + 2;
+
+ /* Write attribute to disk */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, wbuf);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close group */
+ ret = H5Gclose(group);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid, "Dataset3", H5T_STD_REF, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Create reference to dataset1 attribute */
+ ret = H5Rcreate_attr(fid, "/Group1/Dataset1", "Attr1", H5P_DEFAULT, &ref_wbuf[0]);
+ CHECK(ret, FAIL, "H5Rcreate_attr");
+ ret = H5Rget_obj_type3(&ref_wbuf[0], H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3");
+
+ /* Create reference to dataset2 attribute */
+ ret = H5Rcreate_attr(fid, "/Group1/Dataset2", "Attr1", H5P_DEFAULT, &ref_wbuf[1]);
+ CHECK(ret, FAIL, "H5Rcreate_attr");
+ ret = H5Rget_obj_type3(&ref_wbuf[1], H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3");
+
+ /* Create reference to group attribute */
+ ret = H5Rcreate_attr(fid, "/Group1", "Attr2", H5P_DEFAULT, &ref_wbuf[2]);
+ CHECK(ret, FAIL, "H5Rcreate_attr");
+ ret = H5Rget_obj_type3(&ref_wbuf[2], H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_GROUP, "H5Rget_obj_type3");
+
+ /* Create reference to named datatype attribute */
+ ret = H5Rcreate_attr(fid, "/Group1/Datatype1", "Attr3", H5P_DEFAULT, &ref_wbuf[3]);
+ CHECK(ret, FAIL, "H5Rcreate_attr");
+ ret = H5Rget_obj_type3(&ref_wbuf[3], H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_NAMED_DATATYPE, "H5Rget_obj_type3");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, ref_wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open the file */
+ fid = H5Fopen(FILE_REF_ATTR, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Open the dataset */
+ dataset = H5Dopen2(fid, "/Dataset3", H5P_DEFAULT);
+ CHECK(ret, H5I_INVALID_HID, "H5Dopen2");
+
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, ref_rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Open attribute on dataset object */
+ attr = H5Ropen_attr(&ref_rbuf[0], H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, H5I_INVALID_HID, "H5Ropen_attr");
+
+ /* Check information in referenced dataset */
+ sid = H5Aget_space(attr);
+ CHECK(sid, H5I_INVALID_HID, "H5Aget_space");
+
+ ret = (int)H5Sget_simple_extent_npoints(sid);
+ VERIFY(ret, SPACE1_DIM1, "H5Sget_simple_extent_npoints");
+
+ /* Read from disk */
+ ret = H5Aread(attr, H5T_NATIVE_UINT, rbuf);
+ CHECK(ret, FAIL, "H5Aread");
+
+ for (i = 0; i < SPACE1_DIM1; i++)
+ VERIFY(rbuf[i], i * 3, "Data");
+
+ /* Close dereferenced Dataset */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Open attribute on group object */
+ attr = H5Ropen_attr(&ref_rbuf[2], H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, H5I_INVALID_HID, "H5Ropen_attr");
+
+ /* Read from disk */
+ ret = H5Aread(attr, H5T_NATIVE_UINT, rbuf);
+ CHECK(ret, FAIL, "H5Aread");
+
+ for (i = 0; i < SPACE1_DIM1; i++)
+ VERIFY(rbuf[i], (i * 3) + 1, "Data");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Open attribute on named datatype object */
+ attr = H5Ropen_attr(&ref_rbuf[3], H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, H5I_INVALID_HID, "H5Ropen_attr");
+
+ /* Read from disk */
+ ret = H5Aread(attr, H5T_NATIVE_UINT, rbuf);
+ CHECK(ret, FAIL, "H5Aread");
+
+ for (i = 0; i < SPACE1_DIM1; i++)
+ VERIFY(rbuf[i], (i * 3) + 2, "Data");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close dataset access property list */
+ ret = H5Pclose(dapl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free memory buffers */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ ret = H5Rdestroy(&ref_wbuf[i]);
+ CHECK(ret, FAIL, "H5Rdestroy");
+ ret = H5Rdestroy(&ref_rbuf[i]);
+ CHECK(ret, FAIL, "H5Rdestroy");
+ }
+} /* test_reference_attr() */
+
+/****************************************************************
+**
+** test_reference_external():
+** Tests external references on various kinds of objects
+**
+****************************************************************/
+static void
+test_reference_external(void)
+{
+ hid_t fid1, fid2; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t group; /* Group ID */
+ hid_t attr; /* Attribute ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t tid; /* Datatype ID */
+ hsize_t dims[] = {SPACE1_DIM1};
+ hid_t dapl_id; /* Dataset access property list */
+ H5R_ref_t ref_wbuf[SPACE1_DIM1], /* Buffer to write to disk */
+ ref_rbuf[SPACE1_DIM1]; /* Buffer read from disk */
+ unsigned wbuf[SPACE1_DIM1], rbuf[SPACE1_DIM1];
+ unsigned i; /* Local index variables */
+ H5O_type_t obj_type; /* Object type */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing External References Functions\n"));
+
+ /* Create file */
+ fid1 = H5Fcreate(FILE_REF_EXT1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, H5I_INVALID_HID, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid = H5Screate_simple(SPACE1_RANK, dims, NULL);
+ CHECK(sid, H5I_INVALID_HID, "H5Screate_simple");
+
+ /* Create dataset access property list */
+ dapl_id = H5Pcreate(H5P_DATASET_ACCESS);
+ CHECK(dapl_id, H5I_INVALID_HID, "H5Pcreate");
+
+ /* Create a group */
+ group = H5Gcreate2(fid1, "Group1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(group, H5I_INVALID_HID, "H5Gcreate2");
+
+ /* Create an attribute for the dataset */
+ attr = H5Acreate2(group, "Attr2", H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, H5I_INVALID_HID, "H5Acreate2");
+
+ for (i = 0; i < SPACE1_DIM1; i++)
+ wbuf[i] = (i * 3) + 1;
+
+ /* Write attribute to disk */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, wbuf);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Create a dataset (inside Group1) */
+ dataset = H5Dcreate2(group, "Dataset1", H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Create an attribute for the dataset */
+ attr = H5Acreate2(dataset, "Attr1", H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, H5I_INVALID_HID, "H5Acreate2");
+
+ for (i = 0; i < SPACE1_DIM1; i++)
+ wbuf[i] = i * 3;
+
+ /* Write attribute to disk */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, wbuf);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Create another dataset (inside Group1) */
+ dataset = H5Dcreate2(group, "Dataset2", H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Create a datatype to refer to */
+ tid = H5Tcreate(H5T_COMPOUND, sizeof(s1_t));
+ CHECK(tid, H5I_INVALID_HID, "H5Tcreate");
+
+ /* Insert fields */
+ ret = H5Tinsert(tid, "a", HOFFSET(s1_t, a), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(tid, "b", HOFFSET(s1_t, b), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(tid, "c", HOFFSET(s1_t, c), H5T_NATIVE_FLOAT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Save datatype for later */
+ ret = H5Tcommit2(group, "Datatype1", tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+
+ /* Create an attribute for the datatype */
+ attr = H5Acreate2(tid, "Attr3", H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, H5I_INVALID_HID, "H5Acreate2");
+
+ for (i = 0; i < SPACE1_DIM1; i++)
+ wbuf[i] = (i * 3) + 2;
+
+ /* Write attribute to disk */
+ ret = H5Awrite(attr, H5T_NATIVE_UINT, wbuf);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close group */
+ ret = H5Gclose(group);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Create reference to dataset1 attribute */
+ ret = H5Rcreate_attr(fid1, "/Group1/Dataset1", "Attr1", H5P_DEFAULT, &ref_wbuf[0]);
+ CHECK(ret, FAIL, "H5Rcreate_attr");
+ ret = H5Rget_obj_type3(&ref_wbuf[0], H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3");
+
+ /* Create reference to dataset2 attribute */
+ ret = H5Rcreate_attr(fid1, "/Group1/Dataset2", "Attr1", H5P_DEFAULT, &ref_wbuf[1]);
+ CHECK(ret, FAIL, "H5Rcreate_attr");
+ ret = H5Rget_obj_type3(&ref_wbuf[1], H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3");
+
+ /* Create reference to group attribute */
+ ret = H5Rcreate_attr(fid1, "/Group1", "Attr2", H5P_DEFAULT, &ref_wbuf[2]);
+ CHECK(ret, FAIL, "H5Rcreate_attr");
+ ret = H5Rget_obj_type3(&ref_wbuf[2], H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_GROUP, "H5Rget_obj_type3");
+
+ /* Create reference to named datatype attribute */
+ ret = H5Rcreate_attr(fid1, "/Group1/Datatype1", "Attr3", H5P_DEFAULT, &ref_wbuf[3]);
+ CHECK(ret, FAIL, "H5Rcreate_attr");
+ ret = H5Rget_obj_type3(&ref_wbuf[3], H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_NAMED_DATATYPE, "H5Rget_obj_type3");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Create file */
+ fid2 = H5Fcreate(FILE_REF_EXT2, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid2, H5I_INVALID_HID, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid = H5Screate_simple(SPACE1_RANK, dims, NULL);
+ CHECK(sid, H5I_INVALID_HID, "H5Screate_simple");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid2, "Dataset3", H5T_STD_REF, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, ref_wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid2);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open the file */
+ fid2 = H5Fopen(FILE_REF_EXT2, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid2, FAIL, "H5Fopen");
+
+ /* Open the dataset */
+ dataset = H5Dopen2(fid2, "/Dataset3", H5P_DEFAULT);
+ CHECK(ret, H5I_INVALID_HID, "H5Dopen2");
+
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, ref_rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Open attribute on dataset object */
+ attr = H5Ropen_attr(&ref_rbuf[0], H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, H5I_INVALID_HID, "H5Ropen_attr");
+
+ /* Check information in referenced dataset */
+ sid = H5Aget_space(attr);
+ CHECK(sid, H5I_INVALID_HID, "H5Aget_space");
+
+ ret = (int)H5Sget_simple_extent_npoints(sid);
+ VERIFY(ret, SPACE1_DIM1, "H5Sget_simple_extent_npoints");
+
+ /* Read from disk */
+ ret = H5Aread(attr, H5T_NATIVE_UINT, rbuf);
+ CHECK(ret, FAIL, "H5Aread");
+
+ for (i = 0; i < SPACE1_DIM1; i++)
+ VERIFY(rbuf[i], i * 3, "Data");
+
+ /* Close dereferenced Dataset */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Open attribute on group object */
+ attr = H5Ropen_attr(&ref_rbuf[2], H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, H5I_INVALID_HID, "H5Ropen_attr");
+
+ /* Read from disk */
+ ret = H5Aread(attr, H5T_NATIVE_UINT, rbuf);
+ CHECK(ret, FAIL, "H5Aread");
+
+ for (i = 0; i < SPACE1_DIM1; i++)
+ VERIFY(rbuf[i], (i * 3) + 1, "Data");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Open attribute on named datatype object */
+ attr = H5Ropen_attr(&ref_rbuf[3], H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr, H5I_INVALID_HID, "H5Ropen_attr");
+
+ /* Read from disk */
+ ret = H5Aread(attr, H5T_NATIVE_UINT, rbuf);
+ CHECK(ret, FAIL, "H5Aread");
+
+ for (i = 0; i < SPACE1_DIM1; i++)
+ VERIFY(rbuf[i], (i * 3) + 2, "Data");
+
+ /* Close attribute */
+ ret = H5Aclose(attr);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ /* Close dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close dataset access property list */
+ ret = H5Pclose(dapl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid2);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free memory buffers */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ ret = H5Rdestroy(&ref_wbuf[i]);
+ CHECK(ret, FAIL, "H5Rdestroy");
+ ret = H5Rdestroy(&ref_rbuf[i]);
+ CHECK(ret, FAIL, "H5Rdestroy");
+ }
+} /* test_reference_external() */
+
+/****************************************************************
+**
+** test_reference_compat_conv(): Test basic H5R (reference) object reference code.
+** Tests deprecated API routines and type conversion.
+**
+****************************************************************/
+#if 0
+static void
+test_reference_compat_conv(void)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset, dset2; /* Dataset ID */
+ hid_t group, group2; /* Group ID */
+ hid_t sid1, sid2, sid3; /* Dataspace IDs */
+ hid_t tid1, tid2; /* Datatype ID */
+ hsize_t dims1[] = {SPACE1_DIM1}, dims2[] = {SPACE2_DIM1, SPACE2_DIM2},
+ dims3[] = {SPACE1_DIM1}; /* Purposely set dimension larger to test NULL references */
+ hsize_t start[SPACE2_RANK]; /* Starting location of hyperslab */
+ hsize_t stride[SPACE2_RANK]; /* Stride of hyperslab */
+ hsize_t count[SPACE2_RANK]; /* Element count of hyperslab */
+ hsize_t block[SPACE2_RANK]; /* Block size of hyperslab */
+ hsize_t coord1[POINT1_NPOINTS][SPACE2_RANK]; /* Coordinates for point selection */
+ hobj_ref_t *wbuf_obj = NULL; /* Buffer to write to disk */
+ H5R_ref_t *rbuf_obj = NULL; /* Buffer read from disk */
+ hdset_reg_ref_t *wbuf_reg = NULL; /* Buffer to write to disk */
+ H5R_ref_t *rbuf_reg = NULL; /* Buffer read from disk */
+ H5O_type_t obj_type; /* Object type */
+ herr_t ret; /* Generic return value */
+ unsigned int i; /* Counter */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Deprecated Object Reference Functions\n"));
+
+ /* Allocate write & read buffers */
+ wbuf_obj = (hobj_ref_t *)HDcalloc(sizeof(hobj_ref_t), SPACE1_DIM1);
+ rbuf_obj = HDcalloc(sizeof(H5R_ref_t), SPACE1_DIM1);
+ wbuf_reg = HDcalloc(sizeof(hdset_reg_ref_t), SPACE1_DIM1);
+ rbuf_reg = HDcalloc(sizeof(H5R_ref_t), SPACE1_DIM1);
+
+ /* Create file */
+ fid1 = H5Fcreate(FILE_REF_COMPAT, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, H5I_INVALID_HID, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, H5I_INVALID_HID, "H5Screate_simple");
+
+ /* Create another dataspace for datasets */
+ sid2 = H5Screate_simple(SPACE2_RANK, dims2, NULL);
+ CHECK(sid2, H5I_INVALID_HID, "H5Screate_simple");
+
+ /* Create another dataspace for datasets */
+ sid3 = H5Screate_simple(SPACE1_RANK, dims3, NULL);
+ CHECK(sid3, H5I_INVALID_HID, "H5Screate_simple");
+
+ /* Create a group */
+ group = H5Gcreate2(fid1, "Group1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(group, H5I_INVALID_HID, "H5Gcreate2");
+
+ /* Create a dataset (inside Group1) */
+ dataset = H5Dcreate2(group, "Dataset1", H5T_NATIVE_UINT, sid2, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Create another dataset (inside Group1) */
+ dataset = H5Dcreate2(group, "Dataset2", H5T_NATIVE_UCHAR, sid2, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Create a datatype to refer to */
+ tid1 = H5Tcreate(H5T_COMPOUND, sizeof(s1_t));
+ CHECK(tid1, H5I_INVALID_HID, "H5Tcreate");
+
+ /* Insert fields */
+ ret = H5Tinsert(tid1, "a", HOFFSET(s1_t, a), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(tid1, "b", HOFFSET(s1_t, b), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(tid1, "c", HOFFSET(s1_t, c), H5T_NATIVE_FLOAT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Save datatype for later */
+ ret = H5Tcommit2(group, "Datatype1", tid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close group */
+ ret = H5Gclose(group);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Create a dataset with object reference datatype */
+ dataset = H5Dcreate2(fid1, "Dataset3", H5T_STD_REF_OBJ, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Create reference to dataset */
+ ret = H5Rcreate(&wbuf_obj[0], fid1, "/Group1/Dataset1", H5R_OBJECT, H5I_INVALID_HID);
+ CHECK(ret, FAIL, "H5Rcreate");
+
+ /* Create reference to dataset */
+ ret = H5Rcreate(&wbuf_obj[1], fid1, "/Group1/Dataset2", H5R_OBJECT, H5I_INVALID_HID);
+ CHECK(ret, FAIL, "H5Rcreate");
+
+ /* Create reference to group */
+ ret = H5Rcreate(&wbuf_obj[2], fid1, "/Group1", H5R_OBJECT, H5I_INVALID_HID);
+ CHECK(ret, FAIL, "H5Rcreate");
+
+ /* Create reference to named datatype */
+ ret = H5Rcreate(&wbuf_obj[3], fid1, "/Group1/Datatype1", H5R_OBJECT, H5I_INVALID_HID);
+ CHECK(ret, FAIL, "H5Rcreate");
+
+ /* Write references to disk */
+ ret = H5Dwrite(dataset, H5T_STD_REF_OBJ, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf_obj);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Create a dataset with region reference datatype */
+ dataset = H5Dcreate2(fid1, "Dataset4", H5T_STD_REF_DSETREG, sid3, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Select 6x6 hyperslab for first reference */
+ start[0] = 2;
+ start[1] = 2;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 1;
+ count[1] = 1;
+ block[0] = 6;
+ block[1] = 6;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create first dataset region */
+ ret = H5Rcreate(&wbuf_reg[0], fid1, "/Group1/Dataset1", H5R_DATASET_REGION, sid2);
+ CHECK(ret, FAIL, "H5Rcreate");
+
+ /* Select sequence of ten points for second reference */
+ coord1[0][0] = 6;
+ coord1[0][1] = 9;
+ coord1[1][0] = 2;
+ coord1[1][1] = 2;
+ coord1[2][0] = 8;
+ coord1[2][1] = 4;
+ coord1[3][0] = 1;
+ coord1[3][1] = 6;
+ coord1[4][0] = 2;
+ coord1[4][1] = 8;
+ coord1[5][0] = 3;
+ coord1[5][1] = 2;
+ coord1[6][0] = 0;
+ coord1[6][1] = 4;
+ coord1[7][0] = 9;
+ coord1[7][1] = 0;
+ coord1[8][0] = 7;
+ coord1[8][1] = 1;
+ coord1[9][0] = 3;
+ coord1[9][1] = 3;
+ ret = H5Sselect_elements(sid2, H5S_SELECT_SET, (size_t)POINT1_NPOINTS, (const hsize_t *)coord1);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Create second dataset region */
+ ret = H5Rcreate(&wbuf_reg[1], fid1, "/Group1/Dataset2", H5R_DATASET_REGION, sid2);
+ CHECK(ret, FAIL, "H5Rcreate");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_STD_REF_DSETREG, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf_reg);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close disk dataspaces */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(sid3);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open the file */
+ fid1 = H5Fopen(FILE_REF_COMPAT, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid1, H5I_INVALID_HID, "H5Fopen");
+
+ /* Open the object reference dataset */
+ dataset = H5Dopen2(fid1, "/Dataset3", H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dopen2");
+
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf_obj);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Verify type of objects pointed at */
+ ret = H5Rget_obj_type3(&rbuf_obj[0], H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3");
+
+ ret = H5Rget_obj_type3(&rbuf_obj[1], H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3");
+
+ ret = H5Rget_obj_type3(&rbuf_obj[2], H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_GROUP, "H5Rget_obj_type3");
+
+ ret = H5Rget_obj_type3(&rbuf_obj[3], H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_NAMED_DATATYPE, "H5Rget_obj_type3");
+
+ /* Make sure the referenced objects can be opened */
+ dset2 = H5Ropen_object(&rbuf_obj[0], H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset2, H5I_INVALID_HID, "H5Ropen_object");
+
+ ret = H5Dclose(dset2);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ dset2 = H5Ropen_object(&rbuf_obj[1], H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset2, H5I_INVALID_HID, "H5Ropen_object");
+
+ ret = H5Dclose(dset2);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ group2 = H5Ropen_object(&rbuf_obj[2], H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(group2, H5I_INVALID_HID, "H5Ropen_object");
+
+ ret = H5Gclose(group2);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ tid2 = H5Ropen_object(&rbuf_obj[3], H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(tid2, H5I_INVALID_HID, "H5Ropen_object");
+
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Open the dataset region reference dataset */
+ dataset = H5Dopen2(fid1, "/Dataset4", H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dopen2");
+
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf_reg);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Verify type of objects pointed at */
+ ret = H5Rget_obj_type3(&rbuf_reg[0], H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3");
+
+ ret = H5Rget_obj_type3(&rbuf_reg[1], H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3");
+
+ /* Make sure the referenced objects can be opened */
+ dset2 = H5Ropen_object(&rbuf_reg[0], H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset2, H5I_INVALID_HID, "H5Ropen_object");
+
+ ret = H5Dclose(dset2);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ dset2 = H5Ropen_object(&rbuf_reg[1], H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset2, H5I_INVALID_HID, "H5Ropen_object");
+
+ ret = H5Dclose(dset2);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Destroy references */
+ for (i = 0; i < dims1[0]; i++) {
+ ret = H5Rdestroy(&rbuf_obj[i]);
+ CHECK(ret, FAIL, "H5Rdestroy");
+ }
+ for (i = 0; i < dims3[0]; i++) {
+ ret = H5Rdestroy(&rbuf_reg[i]);
+ CHECK(ret, FAIL, "H5Rdestroy");
+ }
+
+ /* Free memory buffers */
+ HDfree(wbuf_obj);
+ HDfree(rbuf_obj);
+ HDfree(wbuf_reg);
+ HDfree(rbuf_reg);
+} /* test_reference_compat() */
+#endif
+
+/****************************************************************
+**
+** test_reference_perf(): Test basic H5R (reference) object reference
+** performance.
+**
+****************************************************************/
+static void
+test_reference_perf(void)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset, /* Dataset ID */
+ dset2; /* Dereferenced dataset ID */
+ hid_t group; /* Group ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1; /* Datatype ID */
+ hsize_t dims1[] = {1};
+ hid_t dapl_id; /* Dataset access property list */
+ H5R_ref_t *wbuf, /* buffer to write to disk */
+ *rbuf, /* buffer read from disk */
+ *tbuf; /* temp. buffer read from disk */
+ H5R_ref_t *wbuf_reg, /* buffer to write to disk */
+ *rbuf_reg; /* buffer read from disk */
+ hobj_ref_t *wbuf_deprec, /* deprecated references */
+ *rbuf_deprec; /* deprecated references */
+ hdset_reg_ref_t *wbuf_reg_deprec, /* deprecated references*/
+ *rbuf_reg_deprec; /* deprecated references*/
+ unsigned *ibuf, *obuf;
+ unsigned i, j; /* Counters */
+ H5O_type_t obj_type; /* Object type */
+ herr_t ret; /* Generic return value */
+ double t1, t2, t; /* Timers */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Object Reference Performance\n"));
+
+ /* Allocate write & read buffers */
+ wbuf = HDcalloc(sizeof(H5R_ref_t), SPACE1_DIM1);
+ obuf = HDcalloc(sizeof(unsigned), SPACE1_DIM1);
+ ibuf = HDcalloc(sizeof(unsigned), SPACE1_DIM1);
+ wbuf_deprec = (hobj_ref_t *)HDcalloc(sizeof(hobj_ref_t), SPACE1_DIM1);
+ rbuf = (H5R_ref_t *)HDcalloc(sizeof(H5R_ref_t), SPACE1_DIM1);
+ rbuf_deprec = (hobj_ref_t *)HDcalloc(sizeof(hobj_ref_t), SPACE1_DIM1);
+ tbuf = (H5R_ref_t *)HDcalloc(sizeof(H5R_ref_t), SPACE1_DIM1);
+ wbuf_reg = (H5R_ref_t *)HDcalloc(sizeof(H5R_ref_t), SPACE1_DIM1);
+ rbuf_reg = (H5R_ref_t *)HDcalloc(sizeof(H5R_ref_t), SPACE1_DIM1);
+ wbuf_reg_deprec = (hdset_reg_ref_t *)HDcalloc(sizeof(hdset_reg_ref_t), SPACE1_DIM1);
+ rbuf_reg_deprec = (hdset_reg_ref_t *)HDcalloc(sizeof(hdset_reg_ref_t), SPACE1_DIM1);
+
+ for (i = 0; i < SPACE1_DIM1; i++)
+ obuf[i] = i * 3;
+
+ /* Create file */
+ fid1 = H5Fcreate(FILE_REF_OBJ, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, H5I_INVALID_HID, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, H5I_INVALID_HID, "H5Screate_simple");
+
+ /* Create dataset access property list */
+ dapl_id = H5Pcreate(H5P_DATASET_ACCESS);
+ CHECK(dapl_id, H5I_INVALID_HID, "H5Pcreate");
+
+ /* Create a group */
+ group = H5Gcreate2(fid1, "Group1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(group, H5I_INVALID_HID, "H5Gcreate2");
+
+ /* Create a dataset (inside Group1) */
+ dataset = H5Dcreate2(group, "Dataset1", H5T_NATIVE_UINT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, obuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Create another dataset (inside Group1) */
+ dataset = H5Dcreate2(group, "Dataset2", H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Create a datatype to refer to */
+ tid1 = H5Tcreate(H5T_COMPOUND, sizeof(s1_t));
+ CHECK(tid1, H5I_INVALID_HID, "H5Tcreate");
+
+ /* Insert fields */
+ ret = H5Tinsert(tid1, "a", HOFFSET(s1_t, a), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(tid1, "b", HOFFSET(s1_t, b), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(tid1, "c", HOFFSET(s1_t, c), H5T_NATIVE_FLOAT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Save datatype for later */
+ ret = H5Tcommit2(group, "Datatype1", tid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close group */
+ ret = H5Gclose(group);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, "Dataset3", H5T_STD_REF, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
+
+ t = 0;
+ for (i = 0; i < MAX_ITER_CREATE; i++) {
+ t1 = H5_get_time();
+ ret = H5Rcreate_object(fid1, "/Group1/Dataset1", H5P_DEFAULT, &wbuf[0]);
+ CHECK(ret, FAIL, "H5Rcreate_object");
+ t2 = H5_get_time();
+ t += t2 - t1;
+ ret = H5Rdestroy(&wbuf[0]);
+ CHECK(ret, FAIL, "H5Rdestroy");
+ }
+ if (VERBOSE_MED)
+ HDprintf("--- Object reference create time: %lfs\n", t / MAX_ITER_CREATE);
+
+ /* Create reference to dataset */
+ ret = H5Rcreate_object(fid1, "/Group1/Dataset1", H5P_DEFAULT, &wbuf[0]);
+ CHECK(ret, FAIL, "H5Rcreate_object");
+ ret = H5Rget_obj_type3(&wbuf[0], H5P_DEFAULT, &obj_type);
+ CHECK(ret, FAIL, "H5Rget_obj_type3");
+ VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3");
+
+ t = 0;
+ for (i = 0; i < MAX_ITER_WRITE; i++) {
+ t1 = H5_get_time();
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+ t2 = H5_get_time();
+ t += t2 - t1;
+ }
+ if (VERBOSE_MED)
+ HDprintf("--- Object reference write time: %lfs\n", t / MAX_ITER_WRITE);
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+#if 0
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, "Dataset4", H5T_STD_REF_OBJ, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
+
+ t = 0;
+ for (i = 0; i < MAX_ITER_CREATE; i++) {
+ t1 = H5_get_time();
+ ret = H5Rcreate(&wbuf_deprec[0], fid1, "/Group1/Dataset1", H5R_OBJECT1, H5I_INVALID_HID);
+ CHECK(ret, FAIL, "H5Rcreate");
+ t2 = H5_get_time();
+ t += t2 - t1;
+ }
+ if (VERBOSE_MED)
+ HDprintf("--- Deprecated object reference create time: %lfs\n", t / MAX_ITER_CREATE);
+
+ /* Create reference to dataset */
+ ret = H5Rcreate(&wbuf_deprec[0], fid1, "/Group1/Dataset1", H5R_OBJECT1, H5I_INVALID_HID);
+ CHECK(ret, FAIL, "H5Rcreate");
+
+ t = 0;
+ for (i = 0; i < MAX_ITER_WRITE; i++) {
+ t1 = H5_get_time();
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_STD_REF_OBJ, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf_deprec);
+ CHECK(ret, FAIL, "H5Dwrite");
+ t2 = H5_get_time();
+ t += t2 - t1;
+ }
+ if (VERBOSE_MED)
+ HDprintf("--- Deprecated object reference write time: %lfs\n", t / MAX_ITER_WRITE);
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+#endif
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, "Dataset5", H5T_STD_REF, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
+
+ t = 0;
+ for (i = 0; i < MAX_ITER_CREATE; i++) {
+ t1 = H5_get_time();
+ /* Store first dataset region */
+ ret = H5Rcreate_region(fid1, "/Group1/Dataset1", sid1, H5P_DEFAULT, &wbuf_reg[0]);
+ CHECK(ret, FAIL, "H5Rcreate_region");
+ t2 = H5_get_time();
+ t += t2 - t1;
+ ret = H5Rdestroy(&wbuf_reg[0]);
+ CHECK(ret, FAIL, "H5Rdestroy");
+ }
+ if (VERBOSE_MED)
+ HDprintf("--- Region reference create time: %lfs\n", t / MAX_ITER_CREATE);
+
+ /* Store first dataset region */
+ ret = H5Rcreate_region(fid1, "/Group1/Dataset1", sid1, H5P_DEFAULT, &wbuf_reg[0]);
+ CHECK(ret, FAIL, "H5Rcreate_region");
+
+ t = 0;
+ for (i = 0; i < MAX_ITER_WRITE; i++) {
+ t1 = H5_get_time();
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf_reg);
+ CHECK(ret, FAIL, "H5Dwrite");
+ t2 = H5_get_time();
+ t += t2 - t1;
+ }
+ if (VERBOSE_MED)
+ HDprintf("--- Region reference write time: %lfs\n", t / MAX_ITER_WRITE);
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+#if 0
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, "Dataset6", H5T_STD_REF_DSETREG, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2");
+
+ t = 0;
+ for (i = 0; i < MAX_ITER_CREATE; i++) {
+ t1 = H5_get_time();
+ /* Store first dataset region */
+ ret = H5Rcreate(&wbuf_reg_deprec[0], fid1, "/Group1/Dataset1", H5R_DATASET_REGION1, sid1);
+ CHECK(ret, FAIL, "H5Rcreate");
+ t2 = H5_get_time();
+ t += t2 - t1;
+ }
+ if (VERBOSE_MED)
+ HDprintf("--- Deprecated region reference create time: %lfs\n", t / MAX_ITER_CREATE);
+
+ t = 0;
+ for (i = 0; i < MAX_ITER_WRITE; i++) {
+ t1 = H5_get_time();
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_STD_REF_DSETREG, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf_reg_deprec);
+ CHECK(ret, FAIL, "H5Dwrite");
+ t2 = H5_get_time();
+ t += t2 - t1;
+ }
+ if (VERBOSE_MED)
+ HDprintf("--- Deprecated region reference write time: %lfs\n", t / MAX_ITER_WRITE);
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+#endif
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open the file */
+ fid1 = H5Fopen(FILE_REF_OBJ, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid1, H5I_INVALID_HID, "H5Fopen");
+
+ /* Open the dataset */
+ dataset = H5Dopen2(fid1, "/Dataset3", H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dopen2");
+
+ t = 0;
+ for (i = 0; i < MAX_ITER_READ; i++) {
+ t1 = H5_get_time();
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+ t2 = H5_get_time();
+ t += t2 - t1;
+ ret = H5Rdestroy(&rbuf[0]);
+ CHECK(ret, FAIL, "H5Rdestroy");
+ }
+ if (VERBOSE_MED)
+ HDprintf("--- Object reference read time: %lfs\n", t / MAX_ITER_READ);
+
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Open dataset object */
+ dset2 = H5Ropen_object(&rbuf[0], H5P_DEFAULT, dapl_id);
+ CHECK(dset2, H5I_INVALID_HID, "H5Ropen_object");
+
+ /* Check information in referenced dataset */
+ sid1 = H5Dget_space(dset2);
+ CHECK(sid1, H5I_INVALID_HID, "H5Dget_space");
+
+ ret = (int)H5Sget_simple_extent_npoints(sid1);
+ VERIFY(ret, dims1[0], "H5Sget_simple_extent_npoints");
+
+ /* Read from disk */
+ ret = H5Dread(dset2, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, ibuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ for (i = 0; i < dims1[0]; i++)
+ VERIFY(ibuf[i], i * 3, "Data");
+
+ /* Close dereferenced Dataset */
+ ret = H5Dclose(dset2);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+#if 0
+ /* Open the dataset */
+ dataset = H5Dopen2(fid1, "/Dataset4", H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dopen2");
+
+ t = 0;
+ for (i = 0; i < MAX_ITER_READ; i++) {
+ t1 = H5_get_time();
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_STD_REF_OBJ, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf_deprec);
+ CHECK(ret, FAIL, "H5Dread");
+ t2 = H5_get_time();
+ t += t2 - t1;
+ }
+ if (VERBOSE_MED)
+ HDprintf("--- Deprecated object reference read time: %lfs\n", t / MAX_ITER_READ);
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+#endif
+ /* Open the dataset */
+ dataset = H5Dopen2(fid1, "/Dataset5", H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dopen2");
+
+ t = 0;
+ for (i = 0; i < MAX_ITER_READ; i++) {
+ t1 = H5_get_time();
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf_reg);
+ CHECK(ret, FAIL, "H5Dread");
+ t2 = H5_get_time();
+ t += t2 - t1;
+ ret = H5Rdestroy(&rbuf_reg[0]);
+ CHECK(ret, FAIL, "H5Rdestroy");
+ }
+ if (VERBOSE_MED)
+ HDprintf("--- Region reference read time: %lfs\n", t / MAX_ITER_READ);
+
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf_reg);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+#if 0
+ /* Open the dataset */
+ dataset = H5Dopen2(fid1, "/Dataset6", H5P_DEFAULT);
+ CHECK(dataset, H5I_INVALID_HID, "H5Dopen2");
+
+ t = 0;
+ for (i = 0; i < MAX_ITER_READ; i++) {
+ t1 = H5_get_time();
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_STD_REF_DSETREG, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf_reg_deprec);
+ CHECK(ret, FAIL, "H5Dread");
+ t2 = H5_get_time();
+ t += t2 - t1;
+ }
+ if (VERBOSE_MED)
+ HDprintf("--- Deprecated region reference read time: %lfs\n", t / MAX_ITER_READ);
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+#endif
+ /* Close dataset access property list */
+ ret = H5Pclose(dapl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Destroy references */
+ for (j = 0; j < dims1[0]; j++) {
+ ret = H5Rdestroy(&wbuf[j]);
+ CHECK(ret, FAIL, "H5Rdestroy");
+ ret = H5Rdestroy(&wbuf_reg[j]);
+ CHECK(ret, FAIL, "H5Rdestroy");
+ ret = H5Rdestroy(&rbuf[j]);
+ CHECK(ret, FAIL, "H5Rdestroy");
+ ret = H5Rdestroy(&rbuf_reg[j]);
+ CHECK(ret, FAIL, "H5Rdestroy");
+ }
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+ HDfree(wbuf_reg);
+ HDfree(rbuf_reg);
+ HDfree(wbuf_deprec);
+ HDfree(rbuf_deprec);
+ HDfree(wbuf_reg_deprec);
+ HDfree(rbuf_reg_deprec);
+ HDfree(tbuf);
+ HDfree(ibuf);
+ HDfree(obuf);
+} /* test_reference_perf() */
+
+/****************************************************************
+**
+** test_reference(): Main H5R reference testing routine.
+**
+****************************************************************/
+void
+test_reference(void)
+{
+ H5F_libver_t low, high; /* Low and high bounds */
+ const char *env_h5_drvr; /* File Driver value from environment */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing References\n"));
+
+ /* Get the VFD to use */
+ env_h5_drvr = HDgetenv(HDF5_DRIVER);
+ if (env_h5_drvr == NULL)
+ env_h5_drvr = "nomatch";
+
+ test_reference_params(); /* Test for correct parameter checking */
+ test_reference_obj(); /* Test basic H5R object reference code */
+ test_reference_vlen_obj(); /* Test reference within vlen */
+ test_reference_cmpnd_obj(); /* Test reference within compound type */
+
+ /* Loop through all the combinations of low/high version bounds */
+ for (low = H5F_LIBVER_EARLIEST; low < H5F_LIBVER_NBOUNDS; low++) {
+ for (high = H5F_LIBVER_EARLIEST; high < H5F_LIBVER_NBOUNDS; high++) {
+
+ /* Invalid combinations, just continue */
+ if (high == H5F_LIBVER_EARLIEST || high < low)
+ continue;
+
+ test_reference_region(low, high); /* Test basic H5R dataset region reference code */
+ test_reference_region_1D(low, high); /* Test H5R dataset region reference code for 1-D datasets */
+
+ } /* end high bound */
+ } /* end low bound */
+
+ /* The following test is currently broken with the Direct VFD */
+ if (HDstrcmp(env_h5_drvr, "direct") != 0) {
+ test_reference_obj_deleted(); /* Test H5R object reference code for deleted objects */
+ }
+
+ test_reference_group(); /* Test operations on dereferenced groups */
+ test_reference_attr(); /* Test attribute references */
+ test_reference_external(); /* Test external references */
+#if 0
+ test_reference_compat_conv(); /* Test operations with old types */
+#endif
+
+ test_reference_perf();
+
+} /* test_reference() */
+
+/*-------------------------------------------------------------------------
+ * Function: cleanup_reference
+ *
+ * Purpose: Cleanup temporary test files
+ *
+ * Return: none
+ *
+ * Programmer: Quincey Koziol
+ * September 8, 1998
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+cleanup_reference(void)
+{
+ H5Fdelete(FILE_REF_PARAM, H5P_DEFAULT);
+ H5Fdelete(FILE_REF_OBJ, H5P_DEFAULT);
+ H5Fdelete(FILE_REF_VL_OBJ, H5P_DEFAULT);
+ H5Fdelete(FILE_REF_CMPND_OBJ, H5P_DEFAULT);
+ H5Fdelete(FILE_REF_REG, H5P_DEFAULT);
+ H5Fdelete(FILE_REF_REG_1D, H5P_DEFAULT);
+ H5Fdelete(FILE_REF_OBJ_DEL, H5P_DEFAULT);
+ H5Fdelete(FILE_REF_GRP, H5P_DEFAULT);
+ H5Fdelete(FILE_REF_ATTR, H5P_DEFAULT);
+ H5Fdelete(FILE_REF_EXT1, H5P_DEFAULT);
+ H5Fdelete(FILE_REF_EXT2, H5P_DEFAULT);
+ H5Fdelete(FILE_REF_COMPAT, H5P_DEFAULT);
+}
diff --git a/test/API/tselect.c b/test/API/tselect.c
new file mode 100644
index 0000000..a2f377d
--- /dev/null
+++ b/test/API/tselect.c
@@ -0,0 +1,16314 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/***********************************************************
+ *
+ * Test program: tselect
+ *
+ * Test the Dataspace selection functionality
+ *
+ *************************************************************/
+
+#define H5S_FRIEND /*suppress error about including H5Spkg */
+
+/* Define this macro to indicate that the testing APIs should be available */
+#define H5S_TESTING
+
+#include "testhdf5.h"
+#include "hdf5.h"
+/* #include "H5Spkg.h" */ /* Dataspaces */
+
+#define FILENAME "tselect.h5"
+
+/* 3-D dataset with fixed dimensions */
+#define SPACE1_NAME "Space1"
+#define SPACE1_RANK 3
+#define SPACE1_DIM1 3
+#define SPACE1_DIM2 15
+#define SPACE1_DIM3 13
+
+/* 2-D dataset with fixed dimensions */
+#define SPACE2_NAME "Space2"
+#define SPACE2_RANK 2
+#define SPACE2_DIM1 30
+#define SPACE2_DIM2 26
+#define SPACE2A_RANK 1
+#define SPACE2A_DIM1 (SPACE2_DIM1 * SPACE2_DIM2)
+
+/* 2-D dataset with fixed dimensions */
+#define SPACE3_NAME "Space3"
+#define SPACE3_RANK 2
+#define SPACE3_DIM1 15
+#define SPACE3_DIM2 26
+
+/* 3-D dataset with fixed dimensions */
+#define SPACE4_NAME "Space4"
+#define SPACE4_RANK 3
+#define SPACE4_DIM1 11
+#define SPACE4_DIM2 13
+#define SPACE4_DIM3 17
+
+/* Number of random hyperslabs to test */
+#define NHYPERSLABS 10
+
+/* Number of random hyperslab tests performed */
+#define NRAND_HYPER 100
+
+/* 5-D dataset with fixed dimensions */
+#define SPACE5_NAME "Space5"
+#define SPACE5_RANK 5
+#define SPACE5_DIM1 10
+#define SPACE5_DIM2 10
+#define SPACE5_DIM3 10
+#define SPACE5_DIM4 10
+#define SPACE5_DIM5 10
+
+/* 1-D dataset with same size as 5-D dataset */
+#define SPACE6_RANK 1
+#define SPACE6_DIM1 (SPACE5_DIM1 * SPACE5_DIM2 * SPACE5_DIM3 * SPACE5_DIM4 * SPACE5_DIM5)
+
+/* 2-D dataset with easy dimension sizes */
+#define SPACE7_NAME "Space7"
+#define SPACE7_RANK 2
+#define SPACE7_DIM1 10
+#define SPACE7_DIM2 10
+#define SPACE7_FILL 254
+#define SPACE7_CHUNK_DIM1 5
+#define SPACE7_CHUNK_DIM2 5
+#define SPACE7_NPOINTS 8
+
+/* 4-D dataset with fixed dimensions */
+#define SPACE8_NAME "Space8"
+#define SPACE8_RANK 4
+#define SPACE8_DIM1 11
+#define SPACE8_DIM2 13
+#define SPACE8_DIM3 17
+#define SPACE8_DIM4 19
+
+/* Another 2-D dataset with easy dimension sizes */
+#define SPACE9_RANK 2
+#define SPACE9_DIM1 12
+#define SPACE9_DIM2 12
+
+/* Element selection information */
+#define POINT1_NPOINTS 10
+
+/* Chunked dataset information */
+#define DATASETNAME "ChunkArray"
+#define NX_SUB 87 /* hyperslab dimensions */
+#define NY_SUB 61
+#define NZ_SUB 181
+#define NX 87 /* output buffer dimensions */
+#define NY 61
+#define NZ 181
+#define RANK_F 3 /* File dataspace rank */
+#define RANK_M 3 /* Memory dataspace rank */
+#define X 87 /* dataset dimensions */
+#define Y 61
+#define Z 181
+#define CHUNK_X 87 /* chunk dimensions */
+#define CHUNK_Y 61
+#define CHUNK_Z 181
+
+/* Basic chunk size */
+#define SPACE10_DIM1 180
+#define SPACE10_CHUNK_SIZE 12
+
+/* Information for bounds checking test */
+#define SPACE11_RANK 2
+#define SPACE11_DIM1 100
+#define SPACE11_DIM2 100
+#define SPACE11_NPOINTS 4
+
+/* Information for offsets w/chunks test #2 */
+#define SPACE12_RANK 1
+#define SPACE12_DIM0 25
+#define SPACE12_CHUNK_DIM0 5
+
+/* Information for Space rebuild test */
+#define SPACERE1_RANK 1
+#define SPACERE1_DIM0 20
+#define SPACERE2_RANK 2
+#define SPACERE2_DIM0 8
+#define SPACERE2_DIM1 12
+#define SPACERE3_RANK 3
+#define SPACERE3_DIM0 8
+#define SPACERE3_DIM1 12
+#define SPACERE3_DIM2 8
+#define SPACERE4_RANK 4
+#define SPACERE4_DIM0 8
+#define SPACERE4_DIM1 12
+#define SPACERE4_DIM2 8
+#define SPACERE4_DIM3 12
+#define SPACERE5_RANK 5
+#define SPACERE5_DIM0 8
+#define SPACERE5_DIM1 12
+#define SPACERE5_DIM2 8
+#define SPACERE5_DIM3 12
+#define SPACERE5_DIM4 8
+
+/* Information for Space update diminfo test */
+#define SPACEUD1_DIM0 20
+#define SPACEUD3_DIM0 9
+#define SPACEUD3_DIM1 12
+#define SPACEUD3_DIM2 13
+
+/* #defines for shape same / different rank tests */
+#define SS_DR_MAX_RANK 5
+
+/* Information for regular hyperslab query test */
+#define SPACE13_RANK 3
+#define SPACE13_DIM1 50
+#define SPACE13_DIM2 50
+#define SPACE13_DIM3 50
+#define SPACE13_NPOINTS 4
+
+/* Information for testing selection iterators */
+#define SEL_ITER_MAX_SEQ 256
+
+/* Defines for test_hyper_io_1d() */
+#define DNAME "DSET_1D"
+#define RANK 1
+#define NUMCHUNKS 3
+#define CHUNKSZ 20
+#define NUM_ELEMENTS NUMCHUNKS *CHUNKSZ
+
+/* Location comparison function */
+static int compare_size_t(const void *s1, const void *s2);
+
+static herr_t test_select_hyper_iter1(void *elem, hid_t type_id, unsigned ndim, const hsize_t *point,
+ void *operator_data);
+static herr_t test_select_point_iter1(void *elem, hid_t type_id, unsigned ndim, const hsize_t *point,
+ void *operator_data);
+static herr_t test_select_all_iter1(void *elem, hid_t type_id, unsigned ndim, const hsize_t *point,
+ void *operator_data);
+static herr_t test_select_none_iter1(void *elem, hid_t type_id, unsigned ndim, const hsize_t *point,
+ void *operator_data);
+static herr_t test_select_hyper_iter2(void *_elem, hid_t type_id, unsigned ndim, const hsize_t *point,
+ void *_operator_data);
+static herr_t test_select_hyper_iter3(void *elem, hid_t type_id, unsigned ndim, const hsize_t *point,
+ void *operator_data);
+
+/****************************************************************
+**
+** test_select_hyper_iter1(): Iterator for checking hyperslab iteration
+**
+****************************************************************/
+static herr_t
+test_select_hyper_iter1(void *_elem, hid_t H5_ATTR_UNUSED type_id, unsigned H5_ATTR_UNUSED ndim,
+ const hsize_t H5_ATTR_UNUSED *point, void *_operator_data)
+{
+ uint8_t *tbuf = (uint8_t *)_elem, /* temporary buffer pointer */
+ **tbuf2 = (uint8_t **)_operator_data; /* temporary buffer handle */
+
+ if (*tbuf != **tbuf2)
+ return (-1);
+ else {
+ (*tbuf2)++;
+ return (0);
+ }
+} /* end test_select_hyper_iter1() */
+
+/****************************************************************
+**
+** test_select_hyper(): Test basic H5S (dataspace) selection code.
+** Tests hyperslabs of various sizes and dimensionalities.
+**
+****************************************************************/
+static void
+test_select_hyper(hid_t xfer_plist)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1, sid2; /* Dataspace ID */
+ hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
+ hsize_t dims2[] = {SPACE2_DIM1, SPACE2_DIM2};
+ hsize_t dims3[] = {SPACE3_DIM1, SPACE3_DIM2};
+ hsize_t start[SPACE1_RANK]; /* Starting location of hyperslab */
+ hsize_t stride[SPACE1_RANK]; /* Stride of hyperslab */
+ hsize_t count[SPACE1_RANK]; /* Element count of hyperslab */
+ hsize_t block[SPACE1_RANK]; /* Block size of hyperslab */
+ uint8_t *wbuf, /* buffer to write to disk */
+ *rbuf, /* buffer read from disk */
+ *tbuf; /* temporary buffer pointer */
+ int i, j; /* Counters */
+ herr_t ret; /* Generic return value */
+ H5S_class_t ext_type; /* Extent type */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Hyperslab Selection Functions\n"));
+
+ /* Allocate write & read buffers */
+ wbuf = (uint8_t *)HDmalloc(sizeof(uint8_t) * SPACE2_DIM1 * SPACE2_DIM2);
+ CHECK_PTR(wbuf, "HDmalloc");
+ rbuf = (uint8_t *)HDcalloc(sizeof(uint8_t), (size_t)(SPACE3_DIM1 * SPACE3_DIM2));
+ CHECK_PTR(rbuf, "HDcalloc");
+
+ /* Initialize write buffer */
+ for (i = 0, tbuf = wbuf; i < SPACE2_DIM1; i++)
+ for (j = 0; j < SPACE2_DIM2; j++)
+ *tbuf++ = (uint8_t)((i * SPACE2_DIM2) + j);
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create dataspace for writing buffer */
+ sid2 = H5Screate_simple(SPACE2_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Verify extent type */
+ ext_type = H5Sget_simple_extent_type(sid1);
+ VERIFY(ext_type, H5S_SIMPLE, "H5Sget_simple_extent_type");
+
+ /* Test selecting stride==0 to verify failure */
+ start[0] = 1;
+ start[1] = 0;
+ start[2] = 0;
+ stride[0] = 0;
+ stride[1] = 0;
+ stride[2] = 0;
+ count[0] = 2;
+ count[1] = 15;
+ count[2] = 13;
+ block[0] = 1;
+ block[1] = 1;
+ block[2] = 1;
+ H5E_BEGIN_TRY
+ {
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, stride, count, block);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Test selecting stride<block to verify failure */
+ start[0] = 1;
+ start[1] = 0;
+ start[2] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ stride[2] = 1;
+ count[0] = 2;
+ count[1] = 15;
+ count[2] = 13;
+ block[0] = 2;
+ block[1] = 2;
+ block[2] = 2;
+ H5E_BEGIN_TRY
+ {
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, stride, count, block);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Select 2x15x13 hyperslab for disk dataset */
+ start[0] = 1;
+ start[1] = 0;
+ start[2] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ stride[2] = 1;
+ count[0] = 2;
+ count[1] = 15;
+ count[2] = 13;
+ block[0] = 1;
+ block[1] = 1;
+ block[2] = 1;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Select 15x26 hyperslab for memory dataset */
+ start[0] = 15;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 15;
+ count[1] = 26;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, SPACE2_NAME, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UCHAR, sid2, sid1, xfer_plist, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Exercise checks for NULL buffer and valid selection */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Dwrite(dataset, H5T_NATIVE_UCHAR, sid2, sid1, xfer_plist, NULL);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Dwrite");
+ H5E_BEGIN_TRY
+ {
+ ret = H5Dwrite(dataset, H5T_NATIVE_UCHAR, H5S_ALL, H5S_ALL, xfer_plist, NULL);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Dwrite");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create dataspace for reading buffer */
+ sid2 = H5Screate_simple(SPACE3_RANK, dims3, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select 15x26 hyperslab for reading memory dataset */
+ start[0] = 0;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 15;
+ count[1] = 26;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Select 0x26 hyperslab to OR into current selection (should be a NOOP) */
+ start[0] = 0;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 0;
+ count[1] = 26;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_NATIVE_UCHAR, sid2, sid1, xfer_plist, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Exercise checks for NULL buffer and valid selection */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Dread(dataset, H5T_NATIVE_UCHAR, sid2, sid1, xfer_plist, NULL);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Dread");
+ H5E_BEGIN_TRY
+ {
+ ret = H5Dread(dataset, H5T_NATIVE_UCHAR, H5S_ALL, H5S_ALL, xfer_plist, NULL);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Dread");
+
+ /* Check that the values match with a dataset iterator */
+ tbuf = wbuf + (15 * SPACE2_DIM2);
+ ret = H5Diterate(rbuf, H5T_NATIVE_UCHAR, sid2, test_select_hyper_iter1, &tbuf);
+ CHECK(ret, FAIL, "H5Diterate");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+} /* test_select_hyper() */
+
+struct pnt_iter {
+ hsize_t coord[POINT1_NPOINTS * 2][SPACE2_RANK]; /* Coordinates for point selection */
+ uint8_t *buf; /* Buffer the points are in */
+ int offset; /* Which point we are looking at */
+};
+
+/****************************************************************
+**
+** test_select_point_iter1(): Iterator for checking point iteration
+** (This is really ugly code, not a very good example of correct usage - QAK)
+**
+****************************************************************/
+static herr_t
+test_select_point_iter1(void *_elem, hid_t H5_ATTR_UNUSED type_id, unsigned H5_ATTR_UNUSED ndim,
+ const hsize_t H5_ATTR_UNUSED *point, void *_operator_data)
+{
+ uint8_t *elem = (uint8_t *)_elem; /* Pointer to the element to examine */
+ uint8_t *tmp; /* temporary ptr to element in operator data */
+ struct pnt_iter *pnt_info = (struct pnt_iter *)_operator_data;
+
+ tmp = pnt_info->buf + (pnt_info->coord[pnt_info->offset][0] * SPACE2_DIM2) +
+ pnt_info->coord[pnt_info->offset][1];
+ if (*elem != *tmp)
+ return (-1);
+ else {
+ pnt_info->offset++;
+ return (0);
+ }
+} /* end test_select_point_iter1() */
+
+/****************************************************************
+**
+** test_select_point(): Test basic H5S (dataspace) selection code.
+** Tests element selections between dataspaces of various sizes
+** and dimensionalities.
+**
+****************************************************************/
+static void
+test_select_point(hid_t xfer_plist)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1, sid2; /* Dataspace ID */
+ hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
+ hsize_t dims2[] = {SPACE2_DIM1, SPACE2_DIM2};
+ hsize_t dims3[] = {SPACE3_DIM1, SPACE3_DIM2};
+ hsize_t coord1[POINT1_NPOINTS][SPACE1_RANK]; /* Coordinates for point selection */
+ hsize_t temp_coord1[POINT1_NPOINTS][SPACE1_RANK]; /* Coordinates for point selection */
+ hsize_t coord2[POINT1_NPOINTS][SPACE2_RANK]; /* Coordinates for point selection */
+ hsize_t temp_coord2[POINT1_NPOINTS][SPACE2_RANK]; /* Coordinates for point selection */
+ hsize_t coord3[POINT1_NPOINTS][SPACE3_RANK]; /* Coordinates for point selection */
+ hsize_t temp_coord3[POINT1_NPOINTS][SPACE3_RANK]; /* Coordinates for point selection */
+ uint8_t *wbuf, /* buffer to write to disk */
+ *rbuf, /* buffer read from disk */
+ *tbuf; /* temporary buffer pointer */
+ int i, j; /* Counters */
+ struct pnt_iter pi; /* Custom Pointer iterator struct */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Element Selection Functions\n"));
+
+ /* Allocate write & read buffers */
+ wbuf = (uint8_t *)HDmalloc(sizeof(uint8_t) * SPACE2_DIM1 * SPACE2_DIM2);
+ CHECK_PTR(wbuf, "HDmalloc");
+ rbuf = (uint8_t *)HDcalloc(sizeof(uint8_t), (size_t)(SPACE3_DIM1 * SPACE3_DIM2));
+ CHECK_PTR(rbuf, "HDcalloc");
+
+ /* Initialize write buffer */
+ for (i = 0, tbuf = wbuf; i < SPACE2_DIM1; i++)
+ for (j = 0; j < SPACE2_DIM2; j++)
+ *tbuf++ = (uint8_t)((i * SPACE2_DIM2) + j);
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create dataspace for write buffer */
+ sid2 = H5Screate_simple(SPACE2_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select sequence of ten points for disk dataset */
+ coord1[0][0] = 0;
+ coord1[0][1] = 10;
+ coord1[0][2] = 5;
+ coord1[1][0] = 1;
+ coord1[1][1] = 2;
+ coord1[1][2] = 7;
+ coord1[2][0] = 2;
+ coord1[2][1] = 4;
+ coord1[2][2] = 9;
+ coord1[3][0] = 0;
+ coord1[3][1] = 6;
+ coord1[3][2] = 11;
+ coord1[4][0] = 1;
+ coord1[4][1] = 8;
+ coord1[4][2] = 13;
+ coord1[5][0] = 2;
+ coord1[5][1] = 12;
+ coord1[5][2] = 0;
+ coord1[6][0] = 0;
+ coord1[6][1] = 14;
+ coord1[6][2] = 2;
+ coord1[7][0] = 1;
+ coord1[7][1] = 0;
+ coord1[7][2] = 4;
+ coord1[8][0] = 2;
+ coord1[8][1] = 1;
+ coord1[8][2] = 6;
+ coord1[9][0] = 0;
+ coord1[9][1] = 3;
+ coord1[9][2] = 8;
+ ret = H5Sselect_elements(sid1, H5S_SELECT_SET, (size_t)POINT1_NPOINTS, (const hsize_t *)coord1);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Verify correct elements selected */
+ H5Sget_select_elem_pointlist(sid1, (hsize_t)0, (hsize_t)POINT1_NPOINTS, (hsize_t *)temp_coord1);
+ for (i = 0; i < POINT1_NPOINTS; i++) {
+ VERIFY(temp_coord1[i][0], coord1[i][0], "H5Sget_select_elem_pointlist");
+ VERIFY(temp_coord1[i][1], coord1[i][1], "H5Sget_select_elem_pointlist");
+ VERIFY(temp_coord1[i][2], coord1[i][2], "H5Sget_select_elem_pointlist");
+ } /* end for */
+
+ ret = (int)H5Sget_select_npoints(sid1);
+ VERIFY(ret, 10, "H5Sget_select_npoints");
+
+ /* Append another sequence of ten points to disk dataset */
+ coord1[0][0] = 0;
+ coord1[0][1] = 2;
+ coord1[0][2] = 0;
+ coord1[1][0] = 1;
+ coord1[1][1] = 10;
+ coord1[1][2] = 8;
+ coord1[2][0] = 2;
+ coord1[2][1] = 8;
+ coord1[2][2] = 10;
+ coord1[3][0] = 0;
+ coord1[3][1] = 7;
+ coord1[3][2] = 12;
+ coord1[4][0] = 1;
+ coord1[4][1] = 3;
+ coord1[4][2] = 11;
+ coord1[5][0] = 2;
+ coord1[5][1] = 1;
+ coord1[5][2] = 1;
+ coord1[6][0] = 0;
+ coord1[6][1] = 13;
+ coord1[6][2] = 7;
+ coord1[7][0] = 1;
+ coord1[7][1] = 14;
+ coord1[7][2] = 6;
+ coord1[8][0] = 2;
+ coord1[8][1] = 2;
+ coord1[8][2] = 5;
+ coord1[9][0] = 0;
+ coord1[9][1] = 6;
+ coord1[9][2] = 13;
+ ret = H5Sselect_elements(sid1, H5S_SELECT_APPEND, (size_t)POINT1_NPOINTS, (const hsize_t *)coord1);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Verify correct elements selected */
+ H5Sget_select_elem_pointlist(sid1, (hsize_t)POINT1_NPOINTS, (hsize_t)POINT1_NPOINTS,
+ (hsize_t *)temp_coord1);
+ for (i = 0; i < POINT1_NPOINTS; i++) {
+ VERIFY(temp_coord1[i][0], coord1[i][0], "H5Sget_select_elem_pointlist");
+ VERIFY(temp_coord1[i][1], coord1[i][1], "H5Sget_select_elem_pointlist");
+ VERIFY(temp_coord1[i][2], coord1[i][2], "H5Sget_select_elem_pointlist");
+ } /* end for */
+
+ ret = (int)H5Sget_select_npoints(sid1);
+ VERIFY(ret, 20, "H5Sget_select_npoints");
+
+ /* Select sequence of ten points for memory dataset */
+ coord2[0][0] = 12;
+ coord2[0][1] = 3;
+ coord2[1][0] = 15;
+ coord2[1][1] = 13;
+ coord2[2][0] = 7;
+ coord2[2][1] = 25;
+ coord2[3][0] = 0;
+ coord2[3][1] = 6;
+ coord2[4][0] = 13;
+ coord2[4][1] = 0;
+ coord2[5][0] = 24;
+ coord2[5][1] = 11;
+ coord2[6][0] = 12;
+ coord2[6][1] = 21;
+ coord2[7][0] = 29;
+ coord2[7][1] = 4;
+ coord2[8][0] = 8;
+ coord2[8][1] = 8;
+ coord2[9][0] = 19;
+ coord2[9][1] = 17;
+ ret = H5Sselect_elements(sid2, H5S_SELECT_SET, (size_t)POINT1_NPOINTS, (const hsize_t *)coord2);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Verify correct elements selected */
+ H5Sget_select_elem_pointlist(sid2, (hsize_t)0, (hsize_t)POINT1_NPOINTS, (hsize_t *)temp_coord2);
+ for (i = 0; i < POINT1_NPOINTS; i++) {
+ VERIFY(temp_coord2[i][0], coord2[i][0], "H5Sget_select_elem_pointlist");
+ VERIFY(temp_coord2[i][1], coord2[i][1], "H5Sget_select_elem_pointlist");
+ } /* end for */
+
+ /* Save points for later iteration */
+ /* (these are in the second half of the buffer, because we are prepending */
+ /* the next list of points to the beginning of the point selection list) */
+ HDmemcpy(((char *)pi.coord) + sizeof(coord2), coord2, sizeof(coord2));
+
+ ret = (int)H5Sget_select_npoints(sid2);
+ VERIFY(ret, 10, "H5Sget_select_npoints");
+
+ /* Append another sequence of ten points to memory dataset */
+ coord2[0][0] = 24;
+ coord2[0][1] = 0;
+ coord2[1][0] = 2;
+ coord2[1][1] = 25;
+ coord2[2][0] = 13;
+ coord2[2][1] = 17;
+ coord2[3][0] = 8;
+ coord2[3][1] = 3;
+ coord2[4][0] = 29;
+ coord2[4][1] = 4;
+ coord2[5][0] = 11;
+ coord2[5][1] = 14;
+ coord2[6][0] = 5;
+ coord2[6][1] = 22;
+ coord2[7][0] = 12;
+ coord2[7][1] = 2;
+ coord2[8][0] = 21;
+ coord2[8][1] = 12;
+ coord2[9][0] = 9;
+ coord2[9][1] = 18;
+ ret = H5Sselect_elements(sid2, H5S_SELECT_PREPEND, (size_t)POINT1_NPOINTS, (const hsize_t *)coord2);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Verify correct elements selected */
+ H5Sget_select_elem_pointlist(sid2, (hsize_t)0, (hsize_t)POINT1_NPOINTS, (hsize_t *)temp_coord2);
+ for (i = 0; i < POINT1_NPOINTS; i++) {
+ VERIFY(temp_coord2[i][0], coord2[i][0], "H5Sget_select_elem_pointlist");
+ VERIFY(temp_coord2[i][1], coord2[i][1], "H5Sget_select_elem_pointlist");
+ } /* end for */
+
+ ret = (int)H5Sget_select_npoints(sid2);
+ VERIFY(ret, 20, "H5Sget_select_npoints");
+
+ /* Save points for later iteration */
+ HDmemcpy(pi.coord, coord2, sizeof(coord2));
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, SPACE1_NAME, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UCHAR, sid2, sid1, xfer_plist, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create dataspace for reading buffer */
+ sid2 = H5Screate_simple(SPACE3_RANK, dims3, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select sequence of points for read dataset */
+ coord3[0][0] = 0;
+ coord3[0][1] = 2;
+ coord3[1][0] = 4;
+ coord3[1][1] = 8;
+ coord3[2][0] = 13;
+ coord3[2][1] = 13;
+ coord3[3][0] = 14;
+ coord3[3][1] = 20;
+ coord3[4][0] = 7;
+ coord3[4][1] = 9;
+ coord3[5][0] = 2;
+ coord3[5][1] = 0;
+ coord3[6][0] = 9;
+ coord3[6][1] = 19;
+ coord3[7][0] = 1;
+ coord3[7][1] = 22;
+ coord3[8][0] = 12;
+ coord3[8][1] = 21;
+ coord3[9][0] = 11;
+ coord3[9][1] = 6;
+ ret = H5Sselect_elements(sid2, H5S_SELECT_SET, (size_t)POINT1_NPOINTS, (const hsize_t *)coord3);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Verify correct elements selected */
+ H5Sget_select_elem_pointlist(sid2, (hsize_t)0, (hsize_t)POINT1_NPOINTS, (hsize_t *)temp_coord3);
+ for (i = 0; i < POINT1_NPOINTS; i++) {
+ VERIFY(temp_coord3[i][0], coord3[i][0], "H5Sget_select_elem_pointlist");
+ VERIFY(temp_coord3[i][1], coord3[i][1], "H5Sget_select_elem_pointlist");
+ } /* end for */
+
+ ret = (int)H5Sget_select_npoints(sid2);
+ VERIFY(ret, 10, "H5Sget_select_npoints");
+
+ /* Append another sequence of ten points to disk dataset */
+ coord3[0][0] = 14;
+ coord3[0][1] = 25;
+ coord3[1][0] = 0;
+ coord3[1][1] = 0;
+ coord3[2][0] = 11;
+ coord3[2][1] = 11;
+ coord3[3][0] = 5;
+ coord3[3][1] = 14;
+ coord3[4][0] = 3;
+ coord3[4][1] = 5;
+ coord3[5][0] = 2;
+ coord3[5][1] = 2;
+ coord3[6][0] = 7;
+ coord3[6][1] = 13;
+ coord3[7][0] = 9;
+ coord3[7][1] = 16;
+ coord3[8][0] = 12;
+ coord3[8][1] = 22;
+ coord3[9][0] = 13;
+ coord3[9][1] = 9;
+ ret = H5Sselect_elements(sid2, H5S_SELECT_APPEND, (size_t)POINT1_NPOINTS, (const hsize_t *)coord3);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Verify correct elements selected */
+ H5Sget_select_elem_pointlist(sid2, (hsize_t)POINT1_NPOINTS, (hsize_t)POINT1_NPOINTS,
+ (hsize_t *)temp_coord3);
+ for (i = 0; i < POINT1_NPOINTS; i++) {
+ VERIFY(temp_coord3[i][0], coord3[i][0], "H5Sget_select_elem_pointlist");
+ VERIFY(temp_coord3[i][1], coord3[i][1], "H5Sget_select_elem_pointlist");
+ } /* end for */
+ ret = (int)H5Sget_select_npoints(sid2);
+ VERIFY(ret, 20, "H5Sget_select_npoints");
+
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_NATIVE_UCHAR, sid2, sid1, xfer_plist, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check that the values match with a dataset iterator */
+ pi.buf = wbuf;
+ pi.offset = 0;
+ ret = H5Diterate(rbuf, H5T_NATIVE_UCHAR, sid2, test_select_point_iter1, &pi);
+ CHECK(ret, FAIL, "H5Diterate");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+} /* test_select_point() */
+
+/****************************************************************
+**
+** test_select_all_iter1(): Iterator for checking all iteration
+**
+**
+****************************************************************/
+static herr_t
+test_select_all_iter1(void *_elem, hid_t H5_ATTR_UNUSED type_id, unsigned H5_ATTR_UNUSED ndim,
+ const hsize_t H5_ATTR_UNUSED *point, void *_operator_data)
+{
+ uint8_t *tbuf = (uint8_t *)_elem, /* temporary buffer pointer */
+ **tbuf2 = (uint8_t **)_operator_data; /* temporary buffer handle */
+
+ if (*tbuf != **tbuf2)
+ return (-1);
+ else {
+ (*tbuf2)++;
+ return (0);
+ }
+} /* end test_select_all_iter1() */
+
+/****************************************************************
+**
+** test_select_none_iter1(): Iterator for checking none iteration
+** (This is never supposed to be called, so it always returns -1)
+**
+****************************************************************/
+static herr_t
+test_select_none_iter1(void H5_ATTR_UNUSED *_elem, hid_t H5_ATTR_UNUSED type_id, unsigned H5_ATTR_UNUSED ndim,
+ const hsize_t H5_ATTR_UNUSED *point, void H5_ATTR_UNUSED *_operator_data)
+{
+ return (-1);
+} /* end test_select_none_iter1() */
+
+/****************************************************************
+**
+** test_select_all(): Test basic H5S (dataspace) selection code.
+** Tests "all" selections.
+**
+****************************************************************/
+static void
+test_select_all(hid_t xfer_plist)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ hsize_t dims1[] = {SPACE4_DIM1, SPACE4_DIM2, SPACE4_DIM3};
+ uint8_t *wbuf, /* buffer to write to disk */
+ *rbuf, /* buffer read from disk */
+ *tbuf; /* temporary buffer pointer */
+ int i, j, k; /* Counters */
+ herr_t ret; /* Generic return value */
+ H5S_class_t ext_type; /* Extent type */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing 'All' Selection Functions\n"));
+
+ /* Allocate write & read buffers */
+ wbuf = (uint8_t *)HDmalloc(sizeof(uint8_t) * SPACE4_DIM1 * SPACE4_DIM2 * SPACE4_DIM3);
+ CHECK_PTR(wbuf, "HDmalloc");
+ rbuf = (uint8_t *)HDcalloc(sizeof(uint8_t), (size_t)(SPACE4_DIM1 * SPACE4_DIM2 * SPACE4_DIM3));
+ CHECK_PTR(rbuf, "HDcalloc");
+
+ /* Initialize write buffer */
+ for (i = 0, tbuf = wbuf; i < SPACE4_DIM1; i++)
+ for (j = 0; j < SPACE4_DIM2; j++)
+ for (k = 0; k < SPACE4_DIM3; k++)
+ *tbuf++ = (uint8_t)((((i * SPACE4_DIM2) + j) * SPACE4_DIM3) + k);
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset */
+ sid1 = H5Screate_simple(SPACE4_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Verify extent type */
+ ext_type = H5Sget_simple_extent_type(sid1);
+ VERIFY(ext_type, H5S_SIMPLE, "H5Sget_simple_extent_type");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, SPACE4_NAME, H5T_NATIVE_INT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UCHAR, H5S_ALL, H5S_ALL, xfer_plist, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_NATIVE_UCHAR, H5S_ALL, H5S_ALL, xfer_plist, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check that the values match with a dataset iterator */
+ tbuf = wbuf;
+ ret = H5Diterate(rbuf, H5T_NATIVE_UCHAR, sid1, test_select_all_iter1, &tbuf);
+ CHECK(ret, FAIL, "H5Diterate");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+} /* test_select_all() */
+
+/****************************************************************
+**
+** test_select_all_hyper(): Test basic H5S (dataspace) selection code.
+** Tests "all" and hyperslab selections.
+**
+****************************************************************/
+static void
+test_select_all_hyper(hid_t xfer_plist)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1, sid2; /* Dataspace ID */
+ hsize_t dims1[] = {SPACE3_DIM1, SPACE3_DIM2};
+ hsize_t dims2[] = {SPACE2_DIM1, SPACE2_DIM2};
+ hsize_t dims3[] = {SPACE3_DIM1, SPACE3_DIM2};
+ hsize_t start[SPACE1_RANK]; /* Starting location of hyperslab */
+ hsize_t stride[SPACE1_RANK]; /* Stride of hyperslab */
+ hsize_t count[SPACE1_RANK]; /* Element count of hyperslab */
+ hsize_t block[SPACE1_RANK]; /* Block size of hyperslab */
+ uint8_t *wbuf, /* buffer to write to disk */
+ *rbuf, /* buffer read from disk */
+ *tbuf; /* temporary buffer pointer */
+ int i, j; /* Counters */
+ herr_t ret; /* Generic return value */
+ H5S_class_t ext_type; /* Extent type */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing 'All' Selection Functions\n"));
+
+ /* Allocate write & read buffers */
+ wbuf = (uint8_t *)HDmalloc(sizeof(uint8_t) * SPACE2_DIM1 * SPACE2_DIM2);
+ CHECK_PTR(wbuf, "HDmalloc");
+ rbuf = (uint8_t *)HDcalloc(sizeof(uint8_t), (size_t)(SPACE3_DIM1 * SPACE3_DIM2));
+ CHECK_PTR(rbuf, "HDcalloc");
+
+ /* Initialize write buffer */
+ for (i = 0, tbuf = wbuf; i < SPACE2_DIM1; i++)
+ for (j = 0; j < SPACE2_DIM2; j++)
+ *tbuf++ = (uint8_t)((i * SPACE2_DIM2) + j);
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset */
+ sid1 = H5Screate_simple(SPACE3_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create dataspace for writing buffer */
+ sid2 = H5Screate_simple(SPACE2_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Verify extent type */
+ ext_type = H5Sget_simple_extent_type(sid1);
+ VERIFY(ext_type, H5S_SIMPLE, "H5Sget_simple_extent_type");
+
+ /* Select entire 15x26 extent for disk dataset */
+ ret = H5Sselect_all(sid1);
+ CHECK(ret, FAIL, "H5Sselect_all");
+
+ /* Select 15x26 hyperslab for memory dataset */
+ start[0] = 15;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 15;
+ count[1] = 26;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, SPACE3_NAME, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UCHAR, sid2, sid1, xfer_plist, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create dataspace for reading buffer */
+ sid2 = H5Screate_simple(SPACE3_RANK, dims3, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select 15x26 hyperslab for reading memory dataset */
+ start[0] = 0;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 15;
+ count[1] = 26;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Select no extent for disk dataset */
+ ret = H5Sselect_none(sid1);
+ CHECK(ret, FAIL, "H5Sselect_none");
+
+ /* Read selection from disk (should fail with no selection defined) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Dread(dataset, H5T_NATIVE_UCHAR, sid2, sid1, xfer_plist, rbuf);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Dread");
+
+ /* Select entire 15x26 extent for disk dataset */
+ ret = H5Sselect_all(sid1);
+ CHECK(ret, FAIL, "H5Sselect_all");
+
+ /* Read selection from disk (should work now) */
+ ret = H5Dread(dataset, H5T_NATIVE_UCHAR, sid2, sid1, xfer_plist, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check that the values match with a dataset iterator */
+ tbuf = wbuf + (15 * SPACE2_DIM2);
+ ret = H5Diterate(rbuf, H5T_NATIVE_UCHAR, sid2, test_select_all_iter1, &tbuf);
+ CHECK(ret, FAIL, "H5Diterate");
+
+ /* A quick check to make certain that iterating through a "none" selection works */
+ ret = H5Sselect_none(sid2);
+ CHECK(ret, FAIL, "H5Sselect_none");
+ ret = H5Diterate(rbuf, H5T_NATIVE_UCHAR, sid2, test_select_none_iter1, &tbuf);
+ CHECK(ret, FAIL, "H5Diterate");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+} /* test_select_all_hyper() */
+
+/****************************************************************
+**
+** test_select_combo(): Test basic H5S (dataspace) selection code.
+** Tests combinations of element and hyperslab selections between
+** dataspaces of various sizes and dimensionalities.
+**
+****************************************************************/
+static void
+test_select_combo(void)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1, sid2; /* Dataspace ID */
+ hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
+ hsize_t dims2[] = {SPACE2_DIM1, SPACE2_DIM2};
+ hsize_t dims3[] = {SPACE3_DIM1, SPACE3_DIM2};
+ hsize_t coord1[POINT1_NPOINTS][SPACE1_RANK]; /* Coordinates for point selection */
+ hsize_t start[SPACE1_RANK]; /* Starting location of hyperslab */
+ hsize_t stride[SPACE1_RANK]; /* Stride of hyperslab */
+ hsize_t count[SPACE1_RANK]; /* Element count of hyperslab */
+ hsize_t block[SPACE1_RANK]; /* Block size of hyperslab */
+ uint8_t *wbuf, /* buffer to write to disk */
+ *rbuf, /* buffer read from disk */
+ *tbuf, /* temporary buffer pointer */
+ *tbuf2; /* temporary buffer pointer */
+ int i, j; /* Counters */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Combination of Hyperslab & Element Selection Functions\n"));
+
+ /* Allocate write & read buffers */
+ wbuf = (uint8_t *)HDmalloc(sizeof(uint8_t) * SPACE2_DIM1 * SPACE2_DIM2);
+ CHECK_PTR(wbuf, "HDmalloc");
+ rbuf = (uint8_t *)HDcalloc(sizeof(uint8_t), (size_t)(SPACE3_DIM1 * SPACE3_DIM2));
+ CHECK_PTR(rbuf, "HDcalloc");
+
+ /* Initialize write buffer */
+ for (i = 0, tbuf = wbuf; i < SPACE2_DIM1; i++)
+ for (j = 0; j < SPACE2_DIM2; j++)
+ *tbuf++ = (uint8_t)((i * SPACE2_DIM2) + j);
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create dataspace for write buffer */
+ sid2 = H5Screate_simple(SPACE2_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select sequence of ten points for disk dataset */
+ coord1[0][0] = 0;
+ coord1[0][1] = 10;
+ coord1[0][2] = 5;
+ coord1[1][0] = 1;
+ coord1[1][1] = 2;
+ coord1[1][2] = 7;
+ coord1[2][0] = 2;
+ coord1[2][1] = 4;
+ coord1[2][2] = 9;
+ coord1[3][0] = 0;
+ coord1[3][1] = 6;
+ coord1[3][2] = 11;
+ coord1[4][0] = 1;
+ coord1[4][1] = 8;
+ coord1[4][2] = 13;
+ coord1[5][0] = 2;
+ coord1[5][1] = 12;
+ coord1[5][2] = 0;
+ coord1[6][0] = 0;
+ coord1[6][1] = 14;
+ coord1[6][2] = 2;
+ coord1[7][0] = 1;
+ coord1[7][1] = 0;
+ coord1[7][2] = 4;
+ coord1[8][0] = 2;
+ coord1[8][1] = 1;
+ coord1[8][2] = 6;
+ coord1[9][0] = 0;
+ coord1[9][1] = 3;
+ coord1[9][2] = 8;
+ ret = H5Sselect_elements(sid1, H5S_SELECT_SET, (size_t)POINT1_NPOINTS, (const hsize_t *)coord1);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Select 1x10 hyperslab for writing memory dataset */
+ start[0] = 0;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 1;
+ count[1] = 10;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, SPACE1_NAME, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create dataspace for reading buffer */
+ sid2 = H5Screate_simple(SPACE3_RANK, dims3, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select 10x1 hyperslab for reading memory dataset */
+ start[0] = 0;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 10;
+ count[1] = 1;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read with data written out */
+ for (i = 0; i < POINT1_NPOINTS; i++) {
+ tbuf = wbuf + i;
+ tbuf2 = rbuf + (i * SPACE3_DIM2);
+ if (*tbuf != *tbuf2)
+ TestErrPrintf("element values don't match!, i=%d\n", i);
+ } /* end for */
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+} /* test_select_combo() */
+
+static int
+compare_size_t(const void *s1, const void *s2)
+{
+ if (*(const size_t *)s1 < *(const size_t *)s2)
+ return (-1);
+ else if (*(const size_t *)s1 > *(const size_t *)s2)
+ return (1);
+ else
+ return (0);
+}
+
+/****************************************************************
+**
+** test_select_hyper_stride(): Test H5S (dataspace) selection code.
+** Tests strided hyperslabs of various sizes and dimensionalities.
+**
+****************************************************************/
+static void
+test_select_hyper_stride(hid_t xfer_plist)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1, sid2; /* Dataspace ID */
+ hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
+ hsize_t dims2[] = {SPACE2_DIM1, SPACE2_DIM2};
+ hsize_t dims3[] = {SPACE3_DIM1, SPACE3_DIM2};
+ hsize_t start[SPACE1_RANK]; /* Starting location of hyperslab */
+ hsize_t stride[SPACE1_RANK]; /* Stride of hyperslab */
+ hsize_t count[SPACE1_RANK]; /* Element count of hyperslab */
+ hsize_t block[SPACE1_RANK]; /* Block size of hyperslab */
+ uint16_t *wbuf, /* buffer to write to disk */
+ *rbuf, /* buffer read from disk */
+ *tbuf, /* temporary buffer pointer */
+ *tbuf2; /* temporary buffer pointer */
+ size_t loc1[72] = {
+ /* Gruesomely ugly way to make certain hyperslab locations are checked correctly */
+ 27, 28, 29, 53, 54, 55, 79, 80, 81, /* Block #1 */
+ 32, 33, 34, 58, 59, 60, 84, 85, 86, /* Block #2 */
+ 157, 158, 159, 183, 184, 185, 209, 210, 211, /* Block #3 */
+ 162, 163, 164, 188, 189, 190, 214, 215, 216, /* Block #4 */
+ 287, 288, 289, 313, 314, 315, 339, 340, 341, /* Block #5 */
+ 292, 293, 294, 318, 319, 320, 344, 345, 346, /* Block #6 */
+ 417, 418, 419, 443, 444, 445, 469, 470, 471, /* Block #7 */
+ 422, 423, 424, 448, 449, 450, 474, 475, 476, /* Block #8 */
+ };
+ size_t loc2[72] = {
+ 0, 1, 2, 26, 27, 28, /* Block #1 */
+ 4, 5, 6, 30, 31, 32, /* Block #2 */
+ 8, 9, 10, 34, 35, 36, /* Block #3 */
+ 12, 13, 14, 38, 39, 40, /* Block #4 */
+ 104, 105, 106, 130, 131, 132, /* Block #5 */
+ 108, 109, 110, 134, 135, 136, /* Block #6 */
+ 112, 113, 114, 138, 139, 140, /* Block #7 */
+ 116, 117, 118, 142, 143, 144, /* Block #8 */
+ 208, 209, 210, 234, 235, 236, /* Block #9 */
+ 212, 213, 214, 238, 239, 240, /* Block #10 */
+ 216, 217, 218, 242, 243, 244, /* Block #11 */
+ 220, 221, 222, 246, 247, 248, /* Block #12 */
+ };
+ int i, j; /* Counters */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Hyperslabs with Strides Functionality\n"));
+
+ /* Allocate write & read buffers */
+ wbuf = (uint16_t *)HDmalloc(sizeof(uint16_t) * SPACE2_DIM1 * SPACE2_DIM2);
+ CHECK_PTR(wbuf, "HDmalloc");
+ rbuf = (uint16_t *)HDcalloc(sizeof(uint16_t), (size_t)(SPACE3_DIM1 * SPACE3_DIM2));
+ CHECK_PTR(rbuf, "HDcalloc");
+
+ /* Initialize write buffer */
+ for (i = 0, tbuf = wbuf; i < SPACE2_DIM1; i++)
+ for (j = 0; j < SPACE2_DIM2; j++)
+ *tbuf++ = (uint16_t)((i * SPACE2_DIM2) + j);
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create dataspace for writing buffer */
+ sid2 = H5Screate_simple(SPACE2_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select 2x3x3 count with a stride of 2x4x3 & 1x2x2 block hyperslab for disk dataset */
+ start[0] = 0;
+ start[1] = 0;
+ start[2] = 0;
+ stride[0] = 2;
+ stride[1] = 4;
+ stride[2] = 3;
+ count[0] = 2;
+ count[1] = 3;
+ count[2] = 3;
+ block[0] = 1;
+ block[1] = 2;
+ block[2] = 2;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Select 4x2 count with a stride of 5x5 & 3x3 block hyperslab for memory dataset */
+ start[0] = 1;
+ start[1] = 1;
+ stride[0] = 5;
+ stride[1] = 5;
+ count[0] = 4;
+ count[1] = 2;
+ block[0] = 3;
+ block[1] = 3;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, SPACE2_NAME, H5T_STD_U16LE, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_USHORT, sid2, sid1, xfer_plist, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create dataspace for reading buffer */
+ sid2 = H5Screate_simple(SPACE3_RANK, dims3, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select 3x4 count with a stride of 4x4 & 2x3 block hyperslab for memory dataset */
+ start[0] = 0;
+ start[1] = 0;
+ stride[0] = 4;
+ stride[1] = 4;
+ count[0] = 3;
+ count[1] = 4;
+ block[0] = 2;
+ block[1] = 3;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_NATIVE_USHORT, sid2, sid1, xfer_plist, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Sort the locations into the proper order */
+ HDqsort(loc1, (size_t)72, sizeof(size_t), compare_size_t);
+ HDqsort(loc2, (size_t)72, sizeof(size_t), compare_size_t);
+ /* Compare data read with data written out */
+ for (i = 0; i < 72; i++) {
+ tbuf = wbuf + loc1[i];
+ tbuf2 = rbuf + loc2[i];
+ if (*tbuf != *tbuf2) {
+ HDprintf("%d: hyperslab values don't match!, loc1[%d]=%d, loc2[%d]=%d\n", __LINE__, i,
+ (int)loc1[i], i, (int)loc2[i]);
+ HDprintf("wbuf=%p, tbuf=%p, rbuf=%p, tbuf2=%p\n", (void *)wbuf, (void *)tbuf, (void *)rbuf,
+ (void *)tbuf2);
+ TestErrPrintf("*tbuf=%u, *tbuf2=%u\n", (unsigned)*tbuf, (unsigned)*tbuf2);
+ } /* end if */
+ } /* end for */
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+} /* test_select_hyper_stride() */
+
+/****************************************************************
+**
+** test_select_hyper_contig(): Test H5S (dataspace) selection code.
+** Tests contiguous hyperslabs of various sizes and dimensionalities.
+**
+****************************************************************/
+static void
+test_select_hyper_contig(hid_t dset_type, hid_t xfer_plist)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1, sid2; /* Dataspace ID */
+ hsize_t dims2[] = {SPACE2_DIM2, SPACE2_DIM1};
+ hsize_t start[SPACE1_RANK]; /* Starting location of hyperslab */
+ hsize_t stride[SPACE1_RANK]; /* Stride of hyperslab */
+ hsize_t count[SPACE1_RANK]; /* Element count of hyperslab */
+ hsize_t block[SPACE1_RANK]; /* Block size of hyperslab */
+ uint16_t *wbuf, /* buffer to write to disk */
+ *rbuf, /* buffer read from disk */
+ *tbuf; /* temporary buffer pointer */
+ int i, j; /* Counters */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Contiguous Hyperslabs Functionality\n"));
+
+ /* Allocate write & read buffers */
+ wbuf = (uint16_t *)HDmalloc(sizeof(uint16_t) * SPACE2_DIM1 * SPACE2_DIM2);
+ CHECK_PTR(wbuf, "HDmalloc");
+ rbuf = (uint16_t *)HDcalloc(sizeof(uint16_t), (size_t)(SPACE2_DIM1 * SPACE2_DIM2));
+ CHECK_PTR(rbuf, "HDcalloc");
+
+ /* Initialize write buffer */
+ for (i = 0, tbuf = wbuf; i < SPACE2_DIM1; i++)
+ for (j = 0; j < SPACE2_DIM2; j++)
+ *tbuf++ = (uint16_t)((i * SPACE2_DIM2) + j);
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset */
+ sid1 = H5Screate_simple(SPACE2_RANK, dims2, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create dataspace for writing buffer */
+ sid2 = H5Screate_simple(SPACE2_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select 12x10 count with a stride of 1x3 & 3x3 block hyperslab for disk dataset */
+ start[0] = 0;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 3;
+ count[0] = 12;
+ count[1] = 10;
+ block[0] = 1;
+ block[1] = 3;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Select 4x5 count with a stride of 3x6 & 3x6 block hyperslab for memory dataset */
+ start[0] = 0;
+ start[1] = 0;
+ stride[0] = 3;
+ stride[1] = 6;
+ count[0] = 4;
+ count[1] = 5;
+ block[0] = 3;
+ block[1] = 6;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, SPACE2_NAME, dset_type, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_USHORT, sid2, sid1, xfer_plist, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create dataspace for reading buffer */
+ sid2 = H5Screate_simple(SPACE2_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select 6x5 count with a stride of 2x6 & 2x6 block hyperslab for disk dataset */
+ start[0] = 0;
+ start[1] = 0;
+ stride[0] = 2;
+ stride[1] = 6;
+ count[0] = 6;
+ count[1] = 5;
+ block[0] = 2;
+ block[1] = 6;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Select 3x15 count with a stride of 4x2 & 4x2 block hyperslab for memory dataset */
+ start[0] = 0;
+ start[1] = 0;
+ stride[0] = 4;
+ stride[1] = 2;
+ count[0] = 3;
+ count[1] = 15;
+ block[0] = 4;
+ block[1] = 2;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_NATIVE_USHORT, sid2, sid1, xfer_plist, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read with data written out */
+ if (HDmemcmp(rbuf, wbuf, sizeof(uint16_t) * 30 * 12) != 0)
+ TestErrPrintf("hyperslab values don't match! Line=%d\n", __LINE__);
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+} /* test_select_hyper_contig() */
+
+/****************************************************************
+**
+** test_select_hyper_contig2(): Test H5S (dataspace) selection code.
+** Tests more contiguous hyperslabs of various sizes and dimensionalities.
+**
+****************************************************************/
+static void
+test_select_hyper_contig2(hid_t dset_type, hid_t xfer_plist)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1, sid2; /* Dataspace ID */
+ hsize_t dims2[] = {SPACE8_DIM4, SPACE8_DIM3, SPACE8_DIM2, SPACE8_DIM1};
+ hsize_t start[SPACE8_RANK]; /* Starting location of hyperslab */
+ hsize_t count[SPACE8_RANK]; /* Element count of hyperslab */
+ uint16_t *wbuf, /* buffer to write to disk */
+ *rbuf, /* buffer read from disk */
+ *tbuf; /* temporary buffer pointer */
+ int i, j, k, l; /* Counters */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing More Contiguous Hyperslabs Functionality\n"));
+
+ /* Allocate write & read buffers */
+ wbuf = (uint16_t *)HDmalloc(sizeof(uint16_t) * SPACE8_DIM1 * SPACE8_DIM2 * SPACE8_DIM3 * SPACE8_DIM4);
+ CHECK_PTR(wbuf, "HDmalloc");
+ rbuf = (uint16_t *)HDcalloc(sizeof(uint16_t),
+ (size_t)(SPACE8_DIM1 * SPACE8_DIM2 * SPACE8_DIM3 * SPACE8_DIM4));
+ CHECK_PTR(rbuf, "HDcalloc");
+
+ /* Initialize write buffer */
+ for (i = 0, tbuf = wbuf; i < SPACE8_DIM1; i++)
+ for (j = 0; j < SPACE8_DIM2; j++)
+ for (k = 0; k < SPACE8_DIM3; k++)
+ for (l = 0; l < SPACE8_DIM4; l++)
+ *tbuf++ = (uint16_t)((i * SPACE8_DIM2) + j);
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset */
+ sid1 = H5Screate_simple(SPACE8_RANK, dims2, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create dataspace for writing buffer */
+ sid2 = H5Screate_simple(SPACE8_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select contiguous hyperslab for disk dataset */
+ start[0] = 0;
+ start[1] = 0;
+ start[2] = 0;
+ start[3] = 0;
+ count[0] = 2;
+ count[1] = SPACE8_DIM3;
+ count[2] = SPACE8_DIM2;
+ count[3] = SPACE8_DIM1;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Select contiguous hyperslab in memory */
+ start[0] = 0;
+ start[1] = 0;
+ start[2] = 0;
+ start[3] = 0;
+ count[0] = 2;
+ count[1] = SPACE8_DIM3;
+ count[2] = SPACE8_DIM2;
+ count[3] = SPACE8_DIM1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, SPACE8_NAME, dset_type, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_USHORT, sid2, sid1, xfer_plist, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create dataspace for reading buffer */
+ sid2 = H5Screate_simple(SPACE8_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select contiguous hyperslab in memory */
+ start[0] = 0;
+ start[1] = 0;
+ start[2] = 0;
+ start[3] = 0;
+ count[0] = 2;
+ count[1] = SPACE8_DIM3;
+ count[2] = SPACE8_DIM2;
+ count[3] = SPACE8_DIM1;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Select contiguous hyperslab in memory */
+ start[0] = 0;
+ start[1] = 0;
+ start[2] = 0;
+ start[3] = 0;
+ count[0] = 2;
+ count[1] = SPACE8_DIM3;
+ count[2] = SPACE8_DIM2;
+ count[3] = SPACE8_DIM1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_NATIVE_USHORT, sid2, sid1, xfer_plist, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read with data written out */
+ if (HDmemcmp(rbuf, wbuf, sizeof(uint16_t) * 2 * SPACE8_DIM3 * SPACE8_DIM2 * SPACE8_DIM1) != 0)
+ TestErrPrintf("Error: hyperslab values don't match!\n");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+} /* test_select_hyper_contig2() */
+
+/****************************************************************
+**
+** test_select_hyper_contig3(): Test H5S (dataspace) selection code.
+** Tests contiguous hyperslabs of various sizes and dimensionalities.
+** This test uses a hyperslab that is contiguous in the lowest dimension,
+** not contiguous in a dimension, then has a selection across the entire next
+** dimension (which should be "flattened" out also).
+**
+****************************************************************/
+static void
+test_select_hyper_contig3(hid_t dset_type, hid_t xfer_plist)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1, sid2; /* Dataspace ID */
+ hsize_t dims2[] = {SPACE8_DIM4, SPACE8_DIM3, SPACE8_DIM2, SPACE8_DIM1};
+ hsize_t start[SPACE8_RANK]; /* Starting location of hyperslab */
+ hsize_t count[SPACE8_RANK]; /* Element count of hyperslab */
+ uint16_t *wbuf, /* Buffer to write to disk */
+ *rbuf, /* Buffer read from disk */
+ *tbuf, *tbuf2; /* Temporary buffer pointers */
+ unsigned i, j, k, l; /* Counters */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Yet More Contiguous Hyperslabs Functionality\n"));
+
+ /* Allocate write & read buffers */
+ wbuf = (uint16_t *)HDmalloc(sizeof(uint16_t) * SPACE8_DIM1 * SPACE8_DIM2 * SPACE8_DIM3 * SPACE8_DIM4);
+ CHECK_PTR(wbuf, "HDmalloc");
+ rbuf = (uint16_t *)HDcalloc(sizeof(uint16_t),
+ (size_t)(SPACE8_DIM1 * SPACE8_DIM2 * SPACE8_DIM3 * SPACE8_DIM4));
+ CHECK_PTR(rbuf, "HDcalloc");
+
+ /* Initialize write buffer */
+ for (i = 0, tbuf = wbuf; i < SPACE8_DIM4; i++)
+ for (j = 0; j < SPACE8_DIM3; j++)
+ for (k = 0; k < SPACE8_DIM2; k++)
+ for (l = 0; l < SPACE8_DIM1; l++)
+ *tbuf++ = (uint16_t)((k * SPACE8_DIM2) + l);
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset */
+ sid1 = H5Screate_simple(SPACE8_RANK, dims2, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create dataspace for writing buffer */
+ sid2 = H5Screate_simple(SPACE8_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select semi-contiguous hyperslab for disk dataset */
+ start[0] = 0;
+ start[1] = 0;
+ start[2] = SPACE8_DIM2 / 2;
+ start[3] = 0;
+ count[0] = 2;
+ count[1] = SPACE8_DIM3;
+ count[2] = SPACE8_DIM2 / 2;
+ count[3] = SPACE8_DIM1;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Select semi-contiguous hyperslab in memory */
+ start[0] = 0;
+ start[1] = 0;
+ start[2] = SPACE8_DIM2 / 2;
+ start[3] = 0;
+ count[0] = 2;
+ count[1] = SPACE8_DIM3;
+ count[2] = SPACE8_DIM2 / 2;
+ count[3] = SPACE8_DIM1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, SPACE8_NAME, dset_type, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_USHORT, sid2, sid1, xfer_plist, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create dataspace for reading buffer */
+ sid2 = H5Screate_simple(SPACE8_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select semi-contiguous hyperslab in memory */
+ start[0] = 0;
+ start[1] = 0;
+ start[2] = SPACE8_DIM2 / 2;
+ start[3] = 0;
+ count[0] = 2;
+ count[1] = SPACE8_DIM3;
+ count[2] = SPACE8_DIM2 / 2;
+ count[3] = SPACE8_DIM1;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Select semi-contiguous hyperslab in memory */
+ start[0] = 0;
+ start[1] = 0;
+ start[2] = SPACE8_DIM2 / 2;
+ start[3] = 0;
+ count[0] = 2;
+ count[1] = SPACE8_DIM3;
+ count[2] = SPACE8_DIM2 / 2;
+ count[3] = SPACE8_DIM1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_NATIVE_USHORT, sid2, sid1, xfer_plist, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read with data written out */
+ for (i = 0, tbuf = wbuf, tbuf2 = rbuf; i < SPACE8_DIM4; i++)
+ for (j = 0; j < SPACE8_DIM3; j++)
+ for (k = 0; k < SPACE8_DIM2; k++)
+ for (l = 0; l < SPACE8_DIM1; l++, tbuf++, tbuf2++)
+ if ((i >= start[0] && i < (start[0] + count[0])) &&
+ (j >= start[1] && j < (start[1] + count[1])) &&
+ (k >= start[2] && k < (start[2] + count[2])) &&
+ (l >= start[3] && l < (start[3] + count[3]))) {
+ if (*tbuf != *tbuf2) {
+ HDprintf("Error: hyperslab values don't match!\n");
+ TestErrPrintf("Line: %d, i=%u, j=%u, k=%u, l=%u, *tbuf=%u,*tbuf2=%u\n", __LINE__,
+ i, j, k, l, (unsigned)*tbuf, (unsigned)*tbuf2);
+ } /* end if */
+ } /* end if */
+ else {
+ if (*tbuf2 != 0) {
+ HDprintf("Error: invalid data in read buffer!\n");
+ TestErrPrintf("Line: %d, i=%u, j=%u, k=%u, l=%u, *tbuf=%u,*tbuf2=%u\n", __LINE__,
+ i, j, k, l, (unsigned)*tbuf, (unsigned)*tbuf2);
+ } /* end if */
+ } /* end else */
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+} /* test_select_hyper_contig3() */
+
+#if 0
+/****************************************************************
+**
+** verify_select_hyper_contig_dr__run_test(): Verify data from
+** test_select_hyper_contig_dr__run_test()
+**
+****************************************************************/
+static void
+verify_select_hyper_contig_dr__run_test(const uint16_t *cube_buf, size_t cube_size,
+ unsigned edge_size, unsigned cube_rank)
+{
+ const uint16_t *cube_ptr; /* Pointer into the cube buffer */
+ uint16_t expected_value; /* Expected value in dataset */
+ unsigned i, j, k, l, m; /* Local index variables */
+ size_t s; /* Local index variable */
+ hbool_t mis_match; /* Flag to indicate mismatch in expected value */
+
+ HDassert(cube_buf);
+ HDassert(cube_size > 0);
+
+ expected_value = 0;
+ mis_match = FALSE;
+ cube_ptr = cube_buf;
+ s = 0;
+ i = 0;
+ do {
+ j = 0;
+ do {
+ k = 0;
+ do {
+ l = 0;
+ do {
+ m = 0;
+ do {
+ /* Sanity check */
+ HDassert(s < cube_size);
+
+ /* Check for correct value */
+ if (*cube_ptr != expected_value)
+ mis_match = TRUE;
+
+ /* Advance to next element */
+ cube_ptr++;
+ expected_value++;
+ s++;
+ m++;
+ } while ((cube_rank > 0) && (m < edge_size));
+ l++;
+ } while ((cube_rank > 1) && (l < edge_size));
+ k++;
+ } while ((cube_rank > 2) && (k < edge_size));
+ j++;
+ } while ((cube_rank > 3) && (j < edge_size));
+ i++;
+ } while ((cube_rank > 4) && (i < edge_size));
+ if (mis_match)
+ TestErrPrintf("Initial cube data don't match! Line = %d\n", __LINE__);
+} /* verify_select_hyper_contig_dr__run_test() */
+#endif
+#if 0
+
+/****************************************************************
+**
+** test_select_hyper_contig_dr__run_test(): Test H5S (dataspace)
+** selection code with contiguous source and target having
+** different ranks but the same shape. We have already
+** tested H5Sselect_shape_same in isolation, so now we try to do
+** I/O.
+**
+****************************************************************/
+static void
+test_select_hyper_contig_dr__run_test(int test_num, const uint16_t *cube_buf, const uint16_t *zero_buf,
+ unsigned edge_size, unsigned chunk_edge_size, unsigned small_rank,
+ unsigned large_rank, hid_t dset_type, hid_t xfer_plist)
+{
+ hbool_t mis_match; /* Flag indicating a value read in wasn't what was expected */
+ hid_t fapl; /* File access property list */
+ hid_t fid1; /* File ID */
+ hid_t small_cube_sid; /* Dataspace ID for small cube in memory & file */
+ hid_t mem_large_cube_sid; /* Dataspace ID for large cube in memory */
+ hid_t file_large_cube_sid; /* Dataspace ID for large cube in file */
+ hid_t small_cube_dcpl_id = H5P_DEFAULT; /* DCPL for small cube dataset */
+ hid_t large_cube_dcpl_id = H5P_DEFAULT; /* DCPL for large cube dataset */
+ hid_t small_cube_dataset; /* Dataset ID */
+ hid_t large_cube_dataset; /* Dataset ID */
+ size_t start_index; /* Offset within buffer to begin inspecting */
+ size_t stop_index; /* Offset within buffer to end inspecting */
+ uint16_t expected_value; /* Expected value in dataset */
+ uint16_t *small_cube_buf_1; /* Buffer for small cube data */
+ uint16_t *large_cube_buf_1; /* Buffer for large cube data */
+ uint16_t *ptr_1; /* Temporary pointer into cube data */
+ hsize_t dims[SS_DR_MAX_RANK]; /* Dataspace dimensions */
+ hsize_t start[SS_DR_MAX_RANK]; /* Shared hyperslab start offset */
+ hsize_t stride[SS_DR_MAX_RANK]; /* Shared hyperslab stride */
+ hsize_t count[SS_DR_MAX_RANK]; /* Shared hyperslab count */
+ hsize_t block[SS_DR_MAX_RANK]; /* Shared hyperslab block size */
+ hsize_t *start_ptr; /* Actual hyperslab start offset */
+ hsize_t *stride_ptr; /* Actual hyperslab stride */
+ hsize_t *count_ptr; /* Actual hyperslab count */
+ hsize_t *block_ptr; /* Actual hyperslab block size */
+ size_t small_cube_size; /* Number of elements in small cube */
+ size_t large_cube_size; /* Number of elements in large cube */
+ unsigned u, v, w, x; /* Local index variables */
+ size_t s; /* Local index variable */
+ htri_t check; /* Shape comparison return value */
+ herr_t ret; /* Generic return value */
+
+ MESSAGE(7, ("\tn-cube slice through m-cube I/O test %d.\n", test_num));
+ MESSAGE(7, ("\tranks = %u/%u, edge_size = %u, chunk_edge_size = %u.\n", small_rank, large_rank, edge_size,
+ chunk_edge_size));
+
+ HDassert(edge_size >= 6);
+ HDassert(edge_size >= chunk_edge_size);
+ HDassert((chunk_edge_size == 0) || (chunk_edge_size >= 3));
+ HDassert(small_rank > 0);
+ HDassert(small_rank < large_rank);
+ HDassert(large_rank <= SS_DR_MAX_RANK);
+
+ /* Compute cube sizes */
+ small_cube_size = large_cube_size = (size_t)1;
+ for (u = 0; u < large_rank; u++) {
+ if (u < small_rank)
+ small_cube_size *= (size_t)edge_size;
+
+ large_cube_size *= (size_t)edge_size;
+ } /* end for */
+
+ HDassert(large_cube_size < (size_t)UINT_MAX);
+
+ /* set up the start, stride, count, and block pointers */
+ start_ptr = &(start[SS_DR_MAX_RANK - large_rank]);
+ stride_ptr = &(stride[SS_DR_MAX_RANK - large_rank]);
+ count_ptr = &(count[SS_DR_MAX_RANK - large_rank]);
+ block_ptr = &(block[SS_DR_MAX_RANK - large_rank]);
+
+ /* Allocate buffers */
+ small_cube_buf_1 = (uint16_t *)HDcalloc(sizeof(uint16_t), small_cube_size);
+ CHECK_PTR(small_cube_buf_1, "HDcalloc");
+ large_cube_buf_1 = (uint16_t *)HDcalloc(sizeof(uint16_t), large_cube_size);
+ CHECK_PTR(large_cube_buf_1, "HDcalloc");
+
+ /* Create a dataset transfer property list */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+
+ /* Use the 'core' VFD for this test */
+ ret = H5Pset_fapl_core(fapl, (size_t)(1024 * 1024), FALSE);
+ CHECK(ret, FAIL, "H5Pset_fapl_core");
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Close file access property list */
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* setup dims: */
+ dims[0] = dims[1] = dims[2] = dims[3] = dims[4] = (hsize_t)edge_size;
+
+ /* Create small cube dataspaces */
+ small_cube_sid = H5Screate_simple((int)small_rank, dims, NULL);
+ CHECK(small_cube_sid, FAIL, "H5Screate_simple");
+
+ /* Create large cube dataspace */
+ mem_large_cube_sid = H5Screate_simple((int)large_rank, dims, NULL);
+ CHECK(mem_large_cube_sid, FAIL, "H5Screate_simple");
+ file_large_cube_sid = H5Screate_simple((int)large_rank, dims, NULL);
+ CHECK(file_large_cube_sid, FAIL, "H5Screate_simple");
+
+ /* if chunk edge size is greater than zero, set up the small and
+ * large data set creation property lists to specify chunked
+ * datasets.
+ */
+ if (chunk_edge_size > 0) {
+ hsize_t chunk_dims[SS_DR_MAX_RANK]; /* Chunk dimensions */
+
+ chunk_dims[0] = chunk_dims[1] = chunk_dims[2] = chunk_dims[3] = chunk_dims[4] =
+ (hsize_t)chunk_edge_size;
+
+ small_cube_dcpl_id = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(small_cube_dcpl_id, FAIL, "H5Pcreate");
+
+ ret = H5Pset_layout(small_cube_dcpl_id, H5D_CHUNKED);
+ CHECK(ret, FAIL, "H5Pset_layout");
+
+ ret = H5Pset_chunk(small_cube_dcpl_id, (int)small_rank, chunk_dims);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+
+ large_cube_dcpl_id = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(large_cube_dcpl_id, FAIL, "H5Pcreate");
+
+ ret = H5Pset_layout(large_cube_dcpl_id, H5D_CHUNKED);
+ CHECK(ret, FAIL, "H5Pset_layout");
+
+ ret = H5Pset_chunk(large_cube_dcpl_id, (int)large_rank, chunk_dims);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+ } /* end if */
+
+ /* create the small cube dataset */
+ small_cube_dataset = H5Dcreate2(fid1, "small_cube_dataset", dset_type, small_cube_sid, H5P_DEFAULT,
+ small_cube_dcpl_id, H5P_DEFAULT);
+ CHECK(small_cube_dataset, FAIL, "H5Dcreate2");
+
+ /* Close non-default small dataset DCPL */
+ if (small_cube_dcpl_id != H5P_DEFAULT) {
+ ret = H5Pclose(small_cube_dcpl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+ } /* end if */
+
+ /* create the large cube dataset */
+ large_cube_dataset = H5Dcreate2(fid1, "large_cube_dataset", dset_type, file_large_cube_sid, H5P_DEFAULT,
+ large_cube_dcpl_id, H5P_DEFAULT);
+ CHECK(large_cube_dataset, FAIL, "H5Dcreate2");
+
+ /* Close non-default large dataset DCPL */
+ if (large_cube_dcpl_id != H5P_DEFAULT) {
+ ret = H5Pclose(large_cube_dcpl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+ } /* end if */
+
+ /* write initial data to the on disk datasets */
+ ret =
+ H5Dwrite(small_cube_dataset, H5T_NATIVE_UINT16, small_cube_sid, small_cube_sid, xfer_plist, cube_buf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ ret = H5Dwrite(large_cube_dataset, H5T_NATIVE_UINT16, mem_large_cube_sid, file_large_cube_sid, xfer_plist,
+ cube_buf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* read initial data from disk and verify that it is as expected. */
+ ret = H5Dread(small_cube_dataset, H5T_NATIVE_UINT16, small_cube_sid, small_cube_sid, xfer_plist,
+ small_cube_buf_1);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check that the data is valid */
+ verify_select_hyper_contig_dr__run_test(small_cube_buf_1, small_cube_size, edge_size, small_rank);
+
+ ret = H5Dread(large_cube_dataset, H5T_NATIVE_UINT16, mem_large_cube_sid, file_large_cube_sid, xfer_plist,
+ large_cube_buf_1);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check that the data is valid */
+ verify_select_hyper_contig_dr__run_test(large_cube_buf_1, large_cube_size, edge_size, large_rank);
+
+ /* first, verify that we can read from disk correctly using selections
+ * of different rank that H5Sselect_shape_same() views as being of the
+ * same shape.
+ *
+ * Start by reading small_rank-D slice from the on disk large cube, and
+ * verifying that the data read is correct. Verify that H5Sselect_shape_same()
+ * returns true on the memory and file selections.
+ */
+
+ /* set up start, stride, count, and block -- note that we will
+ * change start[] so as to read slices of the large cube.
+ */
+ for (u = 0; u < SS_DR_MAX_RANK; u++) {
+ start[u] = 0;
+ stride[u] = 1;
+ count[u] = 1;
+ if ((SS_DR_MAX_RANK - u) > small_rank)
+ block[u] = 1;
+ else
+ block[u] = (hsize_t)edge_size;
+ } /* end for */
+
+ u = 0;
+ do {
+ v = 0;
+ do {
+ w = 0;
+ do {
+ x = 0;
+ do {
+ /* we know that small_rank >= 1 and that large_rank > small_rank
+ * by the assertions at the head of this function. Thus no
+ * need for another inner loop.
+ */
+ start[0] = (hsize_t)u;
+ start[1] = (hsize_t)v;
+ start[2] = (hsize_t)w;
+ start[3] = (hsize_t)x;
+ start[4] = (hsize_t)0;
+
+ ret = H5Sselect_hyperslab(file_large_cube_sid, H5S_SELECT_SET, start_ptr, stride_ptr,
+ count_ptr, block_ptr);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* verify that H5Sselect_shape_same() reports the two
+ * selections as having the same shape.
+ */
+ check = H5Sselect_shape_same(small_cube_sid, file_large_cube_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* Read selection from disk */
+ ret = H5Dread(large_cube_dataset, H5T_NATIVE_UINT16, small_cube_sid, file_large_cube_sid,
+ xfer_plist, small_cube_buf_1);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* verify that expected data is retrieved */
+ mis_match = FALSE;
+ ptr_1 = small_cube_buf_1;
+ expected_value = (uint16_t)((u * edge_size * edge_size * edge_size * edge_size) +
+ (v * edge_size * edge_size * edge_size) +
+ (w * edge_size * edge_size) + (x * edge_size));
+ for (s = 0; s < small_cube_size; s++) {
+ if (*ptr_1 != expected_value)
+ mis_match = TRUE;
+ ptr_1++;
+ expected_value++;
+ } /* end for */
+ if (mis_match)
+ TestErrPrintf("small cube read from largecube has bad data! Line=%d\n", __LINE__);
+
+ x++;
+ } while ((large_rank >= 2) && (small_rank <= 1) && (x < edge_size));
+ w++;
+ } while ((large_rank >= 3) && (small_rank <= 2) && (w < edge_size));
+ v++;
+ } while ((large_rank >= 4) && (small_rank <= 3) && (v < edge_size));
+ u++;
+ } while ((large_rank >= 5) && (small_rank <= 4) && (u < edge_size));
+
+ /* similarly, read the on disk small cube into slices through the in memory
+ * large cube, and verify that the correct data (and only the correct data)
+ * is read.
+ */
+
+ /* zero out the in-memory large cube */
+ HDmemset(large_cube_buf_1, 0, large_cube_size * sizeof(uint16_t));
+
+ u = 0;
+ do {
+ v = 0;
+ do {
+ w = 0;
+ do {
+ x = 0;
+ do {
+ /* we know that small_rank >= 1 and that large_rank > small_rank
+ * by the assertions at the head of this function. Thus no
+ * need for another inner loop.
+ */
+ start[0] = (hsize_t)u;
+ start[1] = (hsize_t)v;
+ start[2] = (hsize_t)w;
+ start[3] = (hsize_t)x;
+ start[4] = (hsize_t)0;
+
+ ret = H5Sselect_hyperslab(mem_large_cube_sid, H5S_SELECT_SET, start_ptr, stride_ptr,
+ count_ptr, block_ptr);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* verify that H5Sselect_shape_same() reports the two
+ * selections as having the same shape.
+ */
+ check = H5Sselect_shape_same(small_cube_sid, mem_large_cube_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* Read selection from disk */
+ ret = H5Dread(small_cube_dataset, H5T_NATIVE_UINT16, mem_large_cube_sid, small_cube_sid,
+ xfer_plist, large_cube_buf_1);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* verify that the expected data and only the
+ * expected data was read.
+ */
+ start_index = (u * edge_size * edge_size * edge_size * edge_size) +
+ (v * edge_size * edge_size * edge_size) + (w * edge_size * edge_size) +
+ (x * edge_size);
+ stop_index = start_index + small_cube_size - 1;
+
+ HDassert(start_index < stop_index);
+ HDassert(stop_index <= large_cube_size);
+
+ mis_match = FALSE;
+ ptr_1 = large_cube_buf_1;
+ expected_value = 0;
+ for (s = 0; s < start_index; s++) {
+ if (*ptr_1 != 0)
+ mis_match = TRUE;
+ ptr_1++;
+ } /* end for */
+ for (; s <= stop_index; s++) {
+ if (*ptr_1 != expected_value)
+ mis_match = TRUE;
+ expected_value++;
+ ptr_1++;
+ } /* end for */
+ for (; s < large_cube_size; s++) {
+ if (*ptr_1 != 0)
+ mis_match = TRUE;
+ ptr_1++;
+ } /* end for */
+ if (mis_match)
+ TestErrPrintf("large cube read from small cube has bad data! Line=%u\n", __LINE__);
+
+ /* Zero out the buffer for the next pass */
+ HDmemset(large_cube_buf_1 + start_index, 0, small_cube_size * sizeof(uint16_t));
+
+ x++;
+ } while ((large_rank >= 2) && (small_rank <= 1) && (x < edge_size));
+ w++;
+ } while ((large_rank >= 3) && (small_rank <= 2) && (w < edge_size));
+ v++;
+ } while ((large_rank >= 4) && (small_rank <= 3) && (v < edge_size));
+ u++;
+ } while ((large_rank >= 5) && (small_rank <= 4) && (u < edge_size));
+
+ /* now we go in the opposite direction, verifying that we can write
+ * from memory to file using selections of different rank that
+ * H5Sselect_shape_same() views as being of the same shape.
+ *
+ * Start by writing small_rank D slices from the in memory large cube, to
+ * the the on disk small cube dataset. After each write, read the small
+ * cube dataset back from disk, and verify that it contains the expected
+ * data. Verify that H5Sselect_shape_same() returns true on the
+ * memory and file selections.
+ */
+
+ u = 0;
+ do {
+ v = 0;
+ do {
+ w = 0;
+ do {
+ x = 0;
+ do {
+ /* we know that small_rank >= 1 and that large_rank > small_rank
+ * by the assertions at the head of this function. Thus no
+ * need for another inner loop.
+ */
+
+ /* zero out the on disk small cube */
+ ret = H5Dwrite(small_cube_dataset, H5T_NATIVE_UINT16, small_cube_sid, small_cube_sid,
+ xfer_plist, zero_buf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* select the portion of the in memory large cube from which we
+ * are going to write data.
+ */
+ start[0] = (hsize_t)u;
+ start[1] = (hsize_t)v;
+ start[2] = (hsize_t)w;
+ start[3] = (hsize_t)x;
+ start[4] = (hsize_t)0;
+
+ ret = H5Sselect_hyperslab(mem_large_cube_sid, H5S_SELECT_SET, start_ptr, stride_ptr,
+ count_ptr, block_ptr);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* verify that H5Sselect_shape_same() reports the in
+ * memory slice through the cube selection and the
+ * on disk full small cube selections as having the same shape.
+ */
+ check = H5Sselect_shape_same(small_cube_sid, mem_large_cube_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* write the slice from the in memory large cube to the on disk small cube */
+ ret = H5Dwrite(small_cube_dataset, H5T_NATIVE_UINT16, mem_large_cube_sid, small_cube_sid,
+ xfer_plist, cube_buf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* read the on disk small cube into memory */
+ ret = H5Dread(small_cube_dataset, H5T_NATIVE_UINT16, small_cube_sid, small_cube_sid,
+ xfer_plist, small_cube_buf_1);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* verify that expected data is retrieved */
+ mis_match = FALSE;
+ ptr_1 = small_cube_buf_1;
+ expected_value = (uint16_t)((u * edge_size * edge_size * edge_size * edge_size) +
+ (v * edge_size * edge_size * edge_size) +
+ (w * edge_size * edge_size) + (x * edge_size));
+ for (s = 0; s < small_cube_size; s++) {
+ if (*ptr_1 != expected_value)
+ mis_match = TRUE;
+ expected_value++;
+ ptr_1++;
+ } /* end for */
+ if (mis_match)
+ TestErrPrintf("small cube data don't match! Line=%d\n", __LINE__);
+
+ x++;
+ } while ((large_rank >= 2) && (small_rank <= 1) && (x < edge_size));
+ w++;
+ } while ((large_rank >= 3) && (small_rank <= 2) && (w < edge_size));
+ v++;
+ } while ((large_rank >= 4) && (small_rank <= 3) && (v < edge_size));
+ u++;
+ } while ((large_rank >= 5) && (small_rank <= 4) && (u < edge_size));
+
+ /* Now write the contents of the in memory small cube to slices of
+ * the on disk cube. After each write, read the on disk cube
+ * into memory, and verify that it contains the expected
+ * data. Verify that H5Sselect_shape_same() returns true on
+ * the memory and file selections.
+ */
+
+ /* select the entire memory and file cube dataspaces */
+ ret = H5Sselect_all(mem_large_cube_sid);
+ CHECK(ret, FAIL, "H5Sselect_all");
+
+ ret = H5Sselect_all(file_large_cube_sid);
+ CHECK(ret, FAIL, "H5Sselect_all");
+
+ u = 0;
+ do {
+ v = 0;
+ do {
+ w = 0;
+ do {
+ x = 0;
+ do {
+ /* we know that small_rank >= 1 and that large_rank > small_rank
+ * by the assertions at the head of this function. Thus no
+ * need for another inner loop.
+ */
+
+ /* zero out the on disk cube */
+ ret = H5Dwrite(large_cube_dataset, H5T_NATIVE_USHORT, mem_large_cube_sid,
+ file_large_cube_sid, xfer_plist, zero_buf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* select the portion of the in memory large cube to which we
+ * are going to write data.
+ */
+ start[0] = (hsize_t)u;
+ start[1] = (hsize_t)v;
+ start[2] = (hsize_t)w;
+ start[3] = (hsize_t)x;
+ start[4] = (hsize_t)0;
+
+ ret = H5Sselect_hyperslab(file_large_cube_sid, H5S_SELECT_SET, start_ptr, stride_ptr,
+ count_ptr, block_ptr);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* verify that H5Sselect_shape_same() reports the in
+ * memory full selection of the small cube and the
+ * on disk slice through the large cube selection
+ * as having the same shape.
+ */
+ check = H5Sselect_shape_same(small_cube_sid, file_large_cube_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* write the cube from memory to the target slice of the disk cube */
+ ret = H5Dwrite(large_cube_dataset, H5T_NATIVE_UINT16, small_cube_sid, file_large_cube_sid,
+ xfer_plist, cube_buf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* read the on disk cube into memory */
+ ret = H5Sselect_all(file_large_cube_sid);
+ CHECK(ret, FAIL, "H5Sselect_all");
+
+ ret = H5Dread(large_cube_dataset, H5T_NATIVE_UINT16, mem_large_cube_sid,
+ file_large_cube_sid, xfer_plist, large_cube_buf_1);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* verify that the expected data and only the
+ * expected data was read.
+ */
+ start_index = (u * edge_size * edge_size * edge_size * edge_size) +
+ (v * edge_size * edge_size * edge_size) + (w * edge_size * edge_size) +
+ (x * edge_size);
+ stop_index = start_index + small_cube_size - 1;
+
+ HDassert(start_index < stop_index);
+ HDassert(stop_index <= large_cube_size);
+
+ mis_match = FALSE;
+ ptr_1 = large_cube_buf_1;
+ expected_value = 0;
+ for (s = 0; s < start_index; s++) {
+ if (*ptr_1 != 0)
+ mis_match = TRUE;
+ ptr_1++;
+ } /* end for */
+ for (; s <= stop_index; s++) {
+ if (*ptr_1 != expected_value)
+ mis_match = TRUE;
+ expected_value++;
+ ptr_1++;
+ } /* end for */
+ for (; s < large_cube_size; s++) {
+ if (*ptr_1 != 0)
+ mis_match = TRUE;
+ ptr_1++;
+ } /* end for */
+ if (mis_match)
+ TestErrPrintf("large cube written from small cube has bad data! Line=%d\n", __LINE__);
+
+ x++;
+ } while ((large_rank >= 2) && (small_rank <= 1) && (x < edge_size));
+ w++;
+ } while ((large_rank >= 3) && (small_rank <= 2) && (w < edge_size));
+ v++;
+ } while ((large_rank >= 4) && (small_rank <= 3) && (v < edge_size));
+ u++;
+ } while ((large_rank >= 5) && (small_rank <= 4) && (u < edge_size));
+
+ /* Close memory dataspaces */
+ ret = H5Sclose(small_cube_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(mem_large_cube_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(file_large_cube_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Datasets */
+ ret = H5Dclose(small_cube_dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Dclose(large_cube_dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free memory buffers */
+ HDfree(small_cube_buf_1);
+ HDfree(large_cube_buf_1);
+
+} /* test_select_hyper_contig_dr__run_test() */
+#endif
+#if 0
+/****************************************************************
+**
+** test_select_hyper_contig_dr(): Test H5S (dataspace)
+** selection code with contiguous source and target having
+** different ranks but the same shape. We have already
+** tested H5Sselect_shape_same in isolation, so now we try to do
+** I/O.
+**
+****************************************************************/
+static void
+test_select_hyper_contig_dr(hid_t dset_type, hid_t xfer_plist)
+{
+ int test_num = 0;
+ unsigned chunk_edge_size; /* Size of chunk's dataspace dimensions */
+ unsigned edge_size = 6; /* Size of dataset's dataspace dimensions */
+ unsigned small_rank; /* Current rank of small dataset */
+ unsigned large_rank; /* Current rank of large dataset */
+ uint16_t *cube_buf; /* Buffer for writing cube data */
+ uint16_t *zero_buf; /* Buffer for writing zeroed cube data */
+ uint16_t *cube_ptr; /* Temporary pointer into cube data */
+ unsigned max_rank = 5; /* Max. rank to use */
+ size_t max_cube_size; /* Max. number of elements in largest cube */
+ size_t s; /* Local index variable */
+ unsigned u; /* Local index variable */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Contiguous Hyperslabs With Different Rank I/O Functionality\n"));
+
+ /* Compute max. cube size */
+ max_cube_size = (size_t)1;
+ for (u = 0; u < max_rank; u++)
+ max_cube_size *= (size_t)edge_size;
+
+ /* Allocate cube buffer for writing values */
+ cube_buf = (uint16_t *)HDmalloc(sizeof(uint16_t) * max_cube_size);
+ CHECK_PTR(cube_buf, "HDmalloc");
+
+ /* Initialize the cube buffer */
+ cube_ptr = cube_buf;
+ for (s = 0; s < max_cube_size; s++)
+ *cube_ptr++ = (uint16_t)s;
+
+ /* Allocate cube buffer for zeroing values on disk */
+ zero_buf = (uint16_t *)HDcalloc(sizeof(uint16_t), max_cube_size);
+ CHECK_PTR(zero_buf, "HDcalloc");
+
+ for (large_rank = 1; large_rank <= max_rank; large_rank++) {
+ for (small_rank = 1; small_rank < large_rank; small_rank++) {
+ chunk_edge_size = 0;
+ test_select_hyper_contig_dr__run_test(test_num, cube_buf, zero_buf, edge_size, chunk_edge_size,
+ small_rank, large_rank, dset_type, xfer_plist);
+ test_num++;
+
+ chunk_edge_size = 3;
+ test_select_hyper_contig_dr__run_test(test_num, cube_buf, zero_buf, edge_size, chunk_edge_size,
+ small_rank, large_rank, dset_type, xfer_plist);
+ test_num++;
+ } /* for loop on small rank */
+ } /* for loop on large rank */
+
+ HDfree(cube_buf);
+ HDfree(zero_buf);
+
+} /* test_select_hyper_contig_dr() */
+#endif
+/****************************************************************
+**
+** test_select_hyper_checker_board_dr__select_checker_board():
+** Given an n-cube dataspace with each edge of length
+** edge_size, and a checker_edge_size either select a checker
+** board selection of the entire cube(if sel_rank == n),
+** or select a checker board selection of a
+** sel_rank dimensional slice through n-cube parallel to the
+** sel_rank fastest changing indices, with origin (in the
+** higher indices) as indicated by the start array.
+**
+** Note that this function, like all its relatives, is
+** hard coded to presume a maximum n-cube rank of 5.
+** While this maximum is declared as a constant, increasing
+** it will require extensive coding in addition to changing
+** the value of the constant.
+**
+** JRM -- 9/9/09
+**
+****************************************************************/
+#if 0
+static void
+test_select_hyper_checker_board_dr__select_checker_board(hid_t tgt_n_cube_sid, unsigned tgt_n_cube_rank,
+ unsigned edge_size, unsigned checker_edge_size,
+ unsigned sel_rank, const hsize_t sel_start[])
+{
+ hbool_t first_selection = TRUE;
+ unsigned n_cube_offset;
+ unsigned sel_offset;
+ hsize_t base_count;
+ hsize_t offset_count;
+ hsize_t start[SS_DR_MAX_RANK]; /* Offset of hyperslab selection */
+ hsize_t stride[SS_DR_MAX_RANK]; /* Stride of hyperslab selection */
+ hsize_t count[SS_DR_MAX_RANK]; /* Count of hyperslab selection */
+ hsize_t block[SS_DR_MAX_RANK]; /* Block size of hyperslab selection */
+ unsigned i, j, k, l, m; /* Local index variable */
+ unsigned u; /* Local index variables */
+ herr_t ret; /* Generic return value */
+
+ HDassert(edge_size >= 6);
+ HDassert(0 < checker_edge_size);
+ HDassert(checker_edge_size <= edge_size);
+ HDassert(0 < sel_rank);
+ HDassert(sel_rank <= tgt_n_cube_rank);
+ HDassert(tgt_n_cube_rank <= SS_DR_MAX_RANK);
+
+ sel_offset = SS_DR_MAX_RANK - sel_rank;
+ n_cube_offset = SS_DR_MAX_RANK - tgt_n_cube_rank;
+ HDassert(n_cube_offset <= sel_offset);
+
+ /* First, compute the base count (which assumes start == 0
+ * for the associated offset) and offset_count (which
+ * assumes start == checker_edge_size for the associated
+ * offset).
+ */
+ base_count = edge_size / (checker_edge_size * 2);
+ if ((edge_size % (checker_edge_size * 2)) > 0)
+ base_count++;
+
+ offset_count = (edge_size - checker_edge_size) / (checker_edge_size * 2);
+ if (((edge_size - checker_edge_size) % (checker_edge_size * 2)) > 0)
+ offset_count++;
+
+ /* Now set up the stride and block arrays, and portions of the start
+ * and count arrays that will not be altered during the selection of
+ * the checker board.
+ */
+ u = 0;
+ while (u < n_cube_offset) {
+ /* these values should never be used */
+ start[u] = 0;
+ stride[u] = 0;
+ count[u] = 0;
+ block[u] = 0;
+
+ u++;
+ } /* end while */
+
+ while (u < sel_offset) {
+ start[u] = sel_start[u];
+ stride[u] = 2 * edge_size;
+ count[u] = 1;
+ block[u] = 1;
+
+ u++;
+ } /* end while */
+
+ while (u < SS_DR_MAX_RANK) {
+ stride[u] = 2 * checker_edge_size;
+ block[u] = checker_edge_size;
+
+ u++;
+ } /* end while */
+
+ i = 0;
+ do {
+ if (0 >= sel_offset) {
+ if (i == 0) {
+ start[0] = 0;
+ count[0] = base_count;
+ } /* end if */
+ else {
+ start[0] = checker_edge_size;
+ count[0] = offset_count;
+ } /* end else */
+ } /* end if */
+
+ j = 0;
+ do {
+ if (1 >= sel_offset) {
+ if (j == 0) {
+ start[1] = 0;
+ count[1] = base_count;
+ } /* end if */
+ else {
+ start[1] = checker_edge_size;
+ count[1] = offset_count;
+ } /* end else */
+ } /* end if */
+
+ k = 0;
+ do {
+ if (2 >= sel_offset) {
+ if (k == 0) {
+ start[2] = 0;
+ count[2] = base_count;
+ } /* end if */
+ else {
+ start[2] = checker_edge_size;
+ count[2] = offset_count;
+ } /* end else */
+ } /* end if */
+
+ l = 0;
+ do {
+ if (3 >= sel_offset) {
+ if (l == 0) {
+ start[3] = 0;
+ count[3] = base_count;
+ } /* end if */
+ else {
+ start[3] = checker_edge_size;
+ count[3] = offset_count;
+ } /* end else */
+ } /* end if */
+
+ m = 0;
+ do {
+ if (4 >= sel_offset) {
+ if (m == 0) {
+ start[4] = 0;
+ count[4] = base_count;
+ } /* end if */
+ else {
+ start[4] = checker_edge_size;
+ count[4] = offset_count;
+ } /* end else */
+ } /* end if */
+
+ if (((i + j + k + l + m) % 2) == 0) {
+ if (first_selection) {
+ first_selection = FALSE;
+
+ ret = H5Sselect_hyperslab(tgt_n_cube_sid, H5S_SELECT_SET,
+ &(start[n_cube_offset]), &(stride[n_cube_offset]),
+ &(count[n_cube_offset]), &(block[n_cube_offset]));
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+ } /* end if */
+ else {
+ ret = H5Sselect_hyperslab(tgt_n_cube_sid, H5S_SELECT_OR,
+ &(start[n_cube_offset]), &(stride[n_cube_offset]),
+ &(count[n_cube_offset]), &(block[n_cube_offset]));
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+ } /* end else */
+ } /* end if */
+
+ m++;
+ } while ((m <= 1) && (4 >= sel_offset));
+ l++;
+ } while ((l <= 1) && (3 >= sel_offset));
+ k++;
+ } while ((k <= 1) && (2 >= sel_offset));
+ j++;
+ } while ((j <= 1) && (1 >= sel_offset));
+ i++;
+ } while ((i <= 1) && (0 >= sel_offset));
+
+ /* Weirdness alert:
+ *
+ * Some how, it seems that selections can extend beyond the
+ * boundaries of the target dataspace -- hence the following
+ * code to manually clip the selection back to the dataspace
+ * proper.
+ */
+ for (u = 0; u < SS_DR_MAX_RANK; u++) {
+ start[u] = 0;
+ stride[u] = edge_size;
+ count[u] = 1;
+ block[u] = edge_size;
+ } /* end for */
+
+ ret = H5Sselect_hyperslab(tgt_n_cube_sid, H5S_SELECT_AND, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+} /* test_select_hyper_checker_board_dr__select_checker_board() */
+#endif
+
+/****************************************************************
+**
+** test_select_hyper_checker_board_dr__verify_data():
+**
+** Examine the supplied buffer to see if it contains the
+** expected data. Return TRUE if it does, and FALSE
+** otherwise.
+**
+** The supplied buffer is presumed to contain the results
+** of read or writing a checkerboard selection of an
+** n-cube, or a checkerboard selection of an m (1 <= m < n)
+** dimensional slice through an n-cube parallel to the
+** fastest changing indices.
+**
+** It is further presumed that the buffer was zeroed before
+** the read, and that the n-cube was initialize with the
+** natural numbers listed in order from the origin along
+** the fastest changing axis.
+**
+** Thus for a 10x10x10 3-cube, the value stored in location
+** (x, y, z) (assuming that z is the fastest changing index
+** and x the slowest) is assumed to be:
+**
+** (10 * 10 * x) + (10 * y) + z
+**
+** Thus, if the buffer contains the result of reading a
+** checker board selection of a 10x10x10 3-cube, location
+** (x, y, z) will contain zero if it is not in a checker,
+** and 100x + 10y + z if (x, y, z) is in a checker.
+**
+** If the buffer contains the result of reading a 3
+** dimensional slice (parallel to the three fastest changing
+** indices) through an n cube (n > 3), then the expected
+** values in the buffer will be the same, save that we will
+** add a constant determined by the origin of the 3-cube
+** in the n-cube.
+**
+** Finally, the function presumes that the first element
+** of the buffer resides either at the origin of either
+** a selected or an unselected checker.
+**
+****************************************************************/
+#if 0
+H5_ATTR_PURE static hbool_t
+test_select_hyper_checker_board_dr__verify_data(uint16_t *buf_ptr, unsigned rank, unsigned edge_size,
+ unsigned checker_edge_size, uint16_t first_expected_val,
+ hbool_t buf_starts_in_checker)
+{
+ hbool_t good_data = TRUE;
+ hbool_t in_checker;
+ hbool_t start_in_checker[5];
+ uint16_t expected_value;
+ uint16_t *val_ptr;
+ unsigned i, j, k, l, m; /* to track position in n-cube */
+ unsigned v, w, x, y, z; /* to track position in checker */
+ const unsigned test_max_rank = 5; /* code changes needed if this is increased */
+
+ HDassert(buf_ptr != NULL);
+ HDassert(0 < rank);
+ HDassert(rank <= test_max_rank);
+ HDassert(edge_size >= 6);
+ HDassert(0 < checker_edge_size);
+ HDassert(checker_edge_size <= edge_size);
+ HDassert(test_max_rank <= SS_DR_MAX_RANK);
+
+ val_ptr = buf_ptr;
+ expected_value = first_expected_val;
+
+ i = 0;
+ v = 0;
+ start_in_checker[0] = buf_starts_in_checker;
+ do {
+ if (v >= checker_edge_size) {
+ start_in_checker[0] = !start_in_checker[0];
+ v = 0;
+ } /* end if */
+
+ j = 0;
+ w = 0;
+ start_in_checker[1] = start_in_checker[0];
+ do {
+ if (w >= checker_edge_size) {
+ start_in_checker[1] = !start_in_checker[1];
+ w = 0;
+ } /* end if */
+
+ k = 0;
+ x = 0;
+ start_in_checker[2] = start_in_checker[1];
+ do {
+ if (x >= checker_edge_size) {
+ start_in_checker[2] = !start_in_checker[2];
+ x = 0;
+ } /* end if */
+
+ l = 0;
+ y = 0;
+ start_in_checker[3] = start_in_checker[2];
+ do {
+ if (y >= checker_edge_size) {
+ start_in_checker[3] = !start_in_checker[3];
+ y = 0;
+ } /* end if */
+
+ m = 0;
+ z = 0;
+ in_checker = start_in_checker[3];
+ do {
+ if (z >= checker_edge_size) {
+ in_checker = !in_checker;
+ z = 0;
+ } /* end if */
+
+ if (in_checker) {
+ if (*val_ptr != expected_value)
+ good_data = FALSE;
+ } /* end if */
+ else {
+ if (*val_ptr != 0)
+ good_data = FALSE;
+ } /* end else */
+
+ val_ptr++;
+ expected_value++;
+
+ m++;
+ z++;
+ } while ((rank >= (test_max_rank - 4)) && (m < edge_size));
+ l++;
+ y++;
+ } while ((rank >= (test_max_rank - 3)) && (l < edge_size));
+ k++;
+ x++;
+ } while ((rank >= (test_max_rank - 2)) && (k < edge_size));
+ j++;
+ w++;
+ } while ((rank >= (test_max_rank - 1)) && (j < edge_size));
+ i++;
+ v++;
+ } while ((rank >= test_max_rank) && (i < edge_size));
+
+ return (good_data);
+} /* test_select_hyper_checker_board_dr__verify_data() */
+#endif
+
+/****************************************************************
+**
+** test_select_hyper_checker_board_dr__run_test(): Test H5S
+** (dataspace) selection code with checker board source and
+** target selections having different ranks but the same
+** shape. We have already tested H5Sselect_shape_same in
+** isolation, so now we try to do I/O.
+**
+****************************************************************/
+#if 0
+static void
+test_select_hyper_checker_board_dr__run_test(int test_num, const uint16_t *cube_buf, const uint16_t *zero_buf,
+ unsigned edge_size, unsigned checker_edge_size,
+ unsigned chunk_edge_size, unsigned small_rank,
+ unsigned large_rank, hid_t dset_type, hid_t xfer_plist)
+{
+ hbool_t data_ok;
+ hid_t fapl; /* File access property list */
+ hid_t fid; /* HDF5 File IDs */
+ hid_t full_small_cube_sid; /* Dataspace for small cube w/all selection */
+ hid_t mem_small_cube_sid;
+ hid_t file_small_cube_sid;
+ hid_t full_large_cube_sid; /* Dataspace for large cube w/all selection */
+ hid_t mem_large_cube_sid;
+ hid_t file_large_cube_sid;
+ hid_t small_cube_dcpl_id = H5P_DEFAULT; /* DCPL for small cube dataset */
+ hid_t large_cube_dcpl_id = H5P_DEFAULT; /* DCPL for large cube dataset */
+ hid_t small_cube_dataset; /* Dataset ID */
+ hid_t large_cube_dataset; /* Dataset ID */
+ unsigned small_rank_offset; /* Rank offset of slice */
+ const unsigned test_max_rank = 5; /* must update code if this changes */
+ size_t start_index; /* Offset within buffer to begin inspecting */
+ size_t stop_index; /* Offset within buffer to end inspecting */
+ uint16_t expected_value;
+ uint16_t *small_cube_buf_1;
+ uint16_t *large_cube_buf_1;
+ uint16_t *ptr_1;
+ size_t small_cube_size; /* Number of elements in small cube */
+ size_t large_cube_size; /* Number of elements in large cube */
+ hsize_t dims[SS_DR_MAX_RANK];
+ hsize_t chunk_dims[SS_DR_MAX_RANK];
+ hsize_t sel_start[SS_DR_MAX_RANK];
+ unsigned u, v, w, x; /* Local index variables */
+ size_t s; /* Local index variable */
+ htri_t check; /* Shape comparison return value */
+ herr_t ret; /* Generic return value */
+
+ MESSAGE(7, ("\tn-cube slice through m-cube I/O test %d.\n", test_num));
+ MESSAGE(7, ("\tranks = %d/%d, edge_size = %d, checker_edge_size = %d, chunk_edge_size = %d.\n",
+ small_rank, large_rank, edge_size, checker_edge_size, chunk_edge_size));
+
+ HDassert(edge_size >= 6);
+ HDassert(checker_edge_size > 0);
+ HDassert(checker_edge_size <= edge_size);
+ HDassert(edge_size >= chunk_edge_size);
+ HDassert((chunk_edge_size == 0) || (chunk_edge_size >= 3));
+ HDassert(small_rank > 0);
+ HDassert(small_rank < large_rank);
+ HDassert(large_rank <= test_max_rank);
+ HDassert(test_max_rank <= SS_DR_MAX_RANK);
+
+ /* Compute cube sizes */
+ small_cube_size = large_cube_size = (size_t)1;
+ for (u = 0; u < large_rank; u++) {
+ if (u < small_rank)
+ small_cube_size *= (size_t)edge_size;
+
+ large_cube_size *= (size_t)edge_size;
+ } /* end for */
+ HDassert(large_cube_size < (size_t)(UINT_MAX));
+
+ small_rank_offset = test_max_rank - small_rank;
+ HDassert(small_rank_offset >= 1);
+
+ /* also, at present, we use 16 bit values in this test --
+ * hence the following assertion. Delete it if we convert
+ * to 32 bit values.
+ */
+ HDassert(large_cube_size < (size_t)(64 * 1024));
+
+ /* Allocate & initialize buffers */
+ small_cube_buf_1 = (uint16_t *)HDcalloc(sizeof(uint16_t), small_cube_size);
+ CHECK_PTR(small_cube_buf_1, "HDcalloc");
+ large_cube_buf_1 = (uint16_t *)HDcalloc(sizeof(uint16_t), large_cube_size);
+ CHECK_PTR(large_cube_buf_1, "HDcalloc");
+
+ /* Create a dataset transfer property list */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+
+ /* Use the 'core' VFD for this test */
+ ret = H5Pset_fapl_core(fapl, (size_t)(1024 * 1024), FALSE);
+ CHECK(ret, FAIL, "H5Pset_fapl_core");
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Close file access property list */
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* setup dims: */
+ dims[0] = dims[1] = dims[2] = dims[3] = dims[4] = edge_size;
+
+ /* Create small cube dataspaces */
+ full_small_cube_sid = H5Screate_simple((int)small_rank, dims, NULL);
+ CHECK(full_small_cube_sid, FAIL, "H5Screate_simple");
+
+ mem_small_cube_sid = H5Screate_simple((int)small_rank, dims, NULL);
+ CHECK(mem_small_cube_sid, FAIL, "H5Screate_simple");
+
+ file_small_cube_sid = H5Screate_simple((int)small_rank, dims, NULL);
+ CHECK(file_small_cube_sid, FAIL, "H5Screate_simple");
+
+ /* Create large cube dataspace */
+ full_large_cube_sid = H5Screate_simple((int)large_rank, dims, NULL);
+ CHECK(full_large_cube_sid, FAIL, "H5Screate_simple");
+
+ mem_large_cube_sid = H5Screate_simple((int)large_rank, dims, NULL);
+ CHECK(mem_large_cube_sid, FAIL, "H5Screate_simple");
+
+ file_large_cube_sid = H5Screate_simple((int)large_rank, dims, NULL);
+ CHECK(file_large_cube_sid, FAIL, "H5Screate_simple");
+
+ /* if chunk edge size is greater than zero, set up the small and
+ * large data set creation property lists to specify chunked
+ * datasets.
+ */
+ if (chunk_edge_size > 0) {
+ chunk_dims[0] = chunk_dims[1] = chunk_dims[2] = chunk_dims[3] = chunk_dims[4] = chunk_edge_size;
+
+ small_cube_dcpl_id = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(small_cube_dcpl_id, FAIL, "H5Pcreate");
+
+ ret = H5Pset_layout(small_cube_dcpl_id, H5D_CHUNKED);
+ CHECK(ret, FAIL, "H5Pset_layout");
+
+ ret = H5Pset_chunk(small_cube_dcpl_id, (int)small_rank, chunk_dims);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+
+ large_cube_dcpl_id = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(large_cube_dcpl_id, FAIL, "H5Pcreate");
+
+ ret = H5Pset_layout(large_cube_dcpl_id, H5D_CHUNKED);
+ CHECK(ret, FAIL, "H5Pset_layout");
+
+ ret = H5Pset_chunk(large_cube_dcpl_id, (int)large_rank, chunk_dims);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+ } /* end if */
+
+ /* create the small cube dataset */
+ small_cube_dataset = H5Dcreate2(fid, "small_cube_dataset", dset_type, file_small_cube_sid, H5P_DEFAULT,
+ small_cube_dcpl_id, H5P_DEFAULT);
+ CHECK(small_cube_dataset, FAIL, "H5Dcreate2");
+
+ /* Close non-default small dataset DCPL */
+ if (small_cube_dcpl_id != H5P_DEFAULT) {
+ ret = H5Pclose(small_cube_dcpl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+ } /* end if */
+
+ /* create the large cube dataset */
+ large_cube_dataset = H5Dcreate2(fid, "large_cube_dataset", dset_type, file_large_cube_sid, H5P_DEFAULT,
+ large_cube_dcpl_id, H5P_DEFAULT);
+ CHECK(large_cube_dataset, FAIL, "H5Dcreate2");
+
+ /* Close non-default large dataset DCPL */
+ if (large_cube_dcpl_id != H5P_DEFAULT) {
+ ret = H5Pclose(large_cube_dcpl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+ } /* end if */
+
+ /* write initial data to the on disk datasets */
+ ret = H5Dwrite(small_cube_dataset, H5T_NATIVE_UINT16, full_small_cube_sid, full_small_cube_sid,
+ xfer_plist, cube_buf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ ret = H5Dwrite(large_cube_dataset, H5T_NATIVE_UINT16, full_large_cube_sid, full_large_cube_sid,
+ xfer_plist, cube_buf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* read initial small cube data from disk and verify that it is as expected. */
+ ret = H5Dread(small_cube_dataset, H5T_NATIVE_UINT16, full_small_cube_sid, full_small_cube_sid, xfer_plist,
+ small_cube_buf_1);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check that the data is valid */
+ verify_select_hyper_contig_dr__run_test(small_cube_buf_1, small_cube_size, edge_size, small_rank);
+
+ /* read initial large cube data from disk and verify that it is as expected. */
+ ret = H5Dread(large_cube_dataset, H5T_NATIVE_UINT16, full_large_cube_sid, full_large_cube_sid, xfer_plist,
+ large_cube_buf_1);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check that the data is valid */
+ verify_select_hyper_contig_dr__run_test(large_cube_buf_1, large_cube_size, edge_size, large_rank);
+
+ /* first, verify that we can read from disk correctly using selections
+ * of different rank that H5Sselect_shape_same() views as being of the
+ * same shape.
+ *
+ * Start by reading small_rank-D slice from the on disk large cube, and
+ * verifying that the data read is correct. Verify that H5Sselect_shape_same()
+ * returns true on the memory and file selections.
+ *
+ * The first step is to set up the needed checker board selection in the
+ * in memory small small cube
+ */
+
+ sel_start[0] = sel_start[1] = sel_start[2] = sel_start[3] = sel_start[4] = 0;
+
+ test_select_hyper_checker_board_dr__select_checker_board(mem_small_cube_sid, small_rank, edge_size,
+ checker_edge_size, small_rank, sel_start);
+
+ /* now read slices from the large, on-disk cube into the small cube.
+ * Note how we adjust sel_start only in the dimensions peculiar to the
+ * large cube.
+ */
+
+ u = 0;
+ do {
+ if (small_rank_offset > 0)
+ sel_start[0] = u;
+
+ v = 0;
+ do {
+ if (small_rank_offset > 1)
+ sel_start[1] = v;
+
+ w = 0;
+ do {
+ if (small_rank_offset > 2)
+ sel_start[2] = w;
+
+ x = 0;
+ do {
+ if (small_rank_offset > 3)
+ sel_start[3] = x;
+
+ /* we know that small_rank >= 1 and that large_rank > small_rank
+ * by the assertions at the head of this function. Thus no
+ * need for another inner loop.
+ */
+
+ HDassert((sel_start[0] == 0) || (0 < small_rank_offset));
+ HDassert((sel_start[1] == 0) || (1 < small_rank_offset));
+ HDassert((sel_start[2] == 0) || (2 < small_rank_offset));
+ HDassert((sel_start[3] == 0) || (3 < small_rank_offset));
+ HDassert((sel_start[4] == 0) || (4 < small_rank_offset));
+
+ test_select_hyper_checker_board_dr__select_checker_board(
+ file_large_cube_sid, large_rank, edge_size, checker_edge_size, small_rank, sel_start);
+
+ /* verify that H5Sselect_shape_same() reports the two
+ * selections as having the same shape.
+ */
+ check = H5Sselect_shape_same(mem_small_cube_sid, file_large_cube_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* zero the buffer that we will be using for reading */
+ HDmemset(small_cube_buf_1, 0, sizeof(*small_cube_buf_1) * small_cube_size);
+
+ /* Read selection from disk */
+ ret = H5Dread(large_cube_dataset, H5T_NATIVE_UINT16, mem_small_cube_sid,
+ file_large_cube_sid, xfer_plist, small_cube_buf_1);
+ CHECK(ret, FAIL, "H5Dread");
+
+ expected_value = (uint16_t)((u * edge_size * edge_size * edge_size * edge_size) +
+ (v * edge_size * edge_size * edge_size) +
+ (w * edge_size * edge_size) + (x * edge_size));
+
+ data_ok = test_select_hyper_checker_board_dr__verify_data(small_cube_buf_1, small_rank,
+ edge_size, checker_edge_size,
+ expected_value, (hbool_t)TRUE);
+ if (!data_ok)
+ TestErrPrintf("small cube read from largecube has bad data! Line=%d\n", __LINE__);
+
+ x++;
+ } while ((large_rank >= (test_max_rank - 3)) && (small_rank <= (test_max_rank - 4)) &&
+ (x < edge_size));
+ w++;
+ } while ((large_rank >= (test_max_rank - 2)) && (small_rank <= (test_max_rank - 3)) &&
+ (w < edge_size));
+ v++;
+ } while ((large_rank >= (test_max_rank - 1)) && (small_rank <= (test_max_rank - 2)) &&
+ (v < edge_size));
+ u++;
+ } while ((large_rank >= test_max_rank) && (small_rank <= (test_max_rank - 1)) && (u < edge_size));
+
+ /* similarly, read the on disk small cube into slices through the in memory
+ * large cube, and verify that the correct data (and only the correct data)
+ * is read.
+ */
+
+ /* select a checker board in the file small cube dataspace */
+ sel_start[0] = sel_start[1] = sel_start[2] = sel_start[3] = sel_start[4] = 0;
+ test_select_hyper_checker_board_dr__select_checker_board(file_small_cube_sid, small_rank, edge_size,
+ checker_edge_size, small_rank, sel_start);
+
+ u = 0;
+ do {
+ if (0 < small_rank_offset)
+ sel_start[0] = u;
+
+ v = 0;
+ do {
+ if (1 < small_rank_offset)
+ sel_start[1] = v;
+
+ w = 0;
+ do {
+ if (2 < small_rank_offset)
+ sel_start[2] = w;
+
+ x = 0;
+ do {
+ if (3 < small_rank_offset)
+ sel_start[3] = x;
+
+ /* we know that small_rank >= 1 and that large_rank > small_rank
+ * by the assertions at the head of this function. Thus no
+ * need for another inner loop.
+ */
+
+ HDassert((sel_start[0] == 0) || (0 < small_rank_offset));
+ HDassert((sel_start[1] == 0) || (1 < small_rank_offset));
+ HDassert((sel_start[2] == 0) || (2 < small_rank_offset));
+ HDassert((sel_start[3] == 0) || (3 < small_rank_offset));
+ HDassert((sel_start[4] == 0) || (4 < small_rank_offset));
+
+ test_select_hyper_checker_board_dr__select_checker_board(
+ mem_large_cube_sid, large_rank, edge_size, checker_edge_size, small_rank, sel_start);
+
+ /* verify that H5Sselect_shape_same() reports the two
+ * selections as having the same shape.
+ */
+ check = H5Sselect_shape_same(file_small_cube_sid, mem_large_cube_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* zero out the in memory large cube */
+ HDmemset(large_cube_buf_1, 0, sizeof(*large_cube_buf_1) * large_cube_size);
+
+ /* Read selection from disk */
+ ret = H5Dread(small_cube_dataset, H5T_NATIVE_UINT16, mem_large_cube_sid,
+ file_small_cube_sid, xfer_plist, large_cube_buf_1);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* verify that the expected data and only the
+ * expected data was read.
+ */
+ data_ok = TRUE;
+ ptr_1 = large_cube_buf_1;
+ expected_value = 0;
+ start_index = (u * edge_size * edge_size * edge_size * edge_size) +
+ (v * edge_size * edge_size * edge_size) + (w * edge_size * edge_size) +
+ (x * edge_size);
+ stop_index = start_index + small_cube_size - 1;
+
+ HDassert(start_index < stop_index);
+ HDassert(stop_index <= large_cube_size);
+
+ /* verify that the large cube contains only zeros before the slice */
+ for (s = 0; s < start_index; s++) {
+ if (*ptr_1 != 0)
+ data_ok = FALSE;
+ ptr_1++;
+ } /* end for */
+ HDassert(s == start_index);
+
+ data_ok &= test_select_hyper_checker_board_dr__verify_data(
+ ptr_1, small_rank, edge_size, checker_edge_size, (uint16_t)0, (hbool_t)TRUE);
+
+ ptr_1 += small_cube_size;
+ s += small_cube_size;
+
+ HDassert(s == stop_index + 1);
+
+ /* verify that the large cube contains only zeros after the slice */
+ for (s = stop_index + 1; s < large_cube_size; s++) {
+ if (*ptr_1 != 0)
+ data_ok = FALSE;
+ ptr_1++;
+ } /* end for */
+ if (!data_ok)
+ TestErrPrintf("large cube read from small cube has bad data! Line=%d\n", __LINE__);
+
+ x++;
+ } while ((large_rank >= (test_max_rank - 3)) && (small_rank <= (test_max_rank - 4)) &&
+ (x < edge_size));
+ w++;
+ } while ((large_rank >= (test_max_rank - 2)) && (small_rank <= (test_max_rank - 3)) &&
+ (w < edge_size));
+ v++;
+ } while ((large_rank >= (test_max_rank - 1)) && (small_rank <= (test_max_rank - 2)) &&
+ (v < edge_size));
+ u++;
+ } while ((large_rank >= test_max_rank) && (small_rank <= (test_max_rank - 1)) && (u < edge_size));
+
+ /* now we go in the opposite direction, verifying that we can write
+ * from memory to file using selections of different rank that
+ * H5Sselect_shape_same() views as being of the same shape.
+ *
+ * Start by writing small_rank D slices from the in memory large cube, to
+ * the the on disk small cube dataset. After each write, read the small
+ * cube dataset back from disk, and verify that it contains the expected
+ * data. Verify that H5Sselect_shape_same() returns true on the
+ * memory and file selections.
+ */
+
+ /* select a checker board in the file small cube dataspace */
+ sel_start[0] = sel_start[1] = sel_start[2] = sel_start[3] = sel_start[4] = 0;
+ test_select_hyper_checker_board_dr__select_checker_board(file_small_cube_sid, small_rank, edge_size,
+ checker_edge_size, small_rank, sel_start);
+
+ u = 0;
+ do {
+ if (small_rank_offset > 0)
+ sel_start[0] = u;
+
+ v = 0;
+ do {
+ if (small_rank_offset > 1)
+ sel_start[1] = v;
+
+ w = 0;
+ do {
+ if (small_rank_offset > 2)
+ sel_start[2] = w;
+
+ x = 0;
+ do {
+ if (small_rank_offset > 3)
+ sel_start[3] = x;
+
+ /* zero out the on disk small cube */
+ ret = H5Dwrite(small_cube_dataset, H5T_NATIVE_UINT16, full_small_cube_sid,
+ full_small_cube_sid, xfer_plist, zero_buf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* we know that small_rank >= 1 and that large_rank > small_rank
+ * by the assertions at the head of this function. Thus no
+ * need for another inner loop.
+ */
+
+ HDassert((sel_start[0] == 0) || (0 < small_rank_offset));
+ HDassert((sel_start[1] == 0) || (1 < small_rank_offset));
+ HDassert((sel_start[2] == 0) || (2 < small_rank_offset));
+ HDassert((sel_start[3] == 0) || (3 < small_rank_offset));
+ HDassert((sel_start[4] == 0) || (4 < small_rank_offset));
+
+ test_select_hyper_checker_board_dr__select_checker_board(
+ mem_large_cube_sid, large_rank, edge_size, checker_edge_size, small_rank, sel_start);
+
+ /* verify that H5Sselect_shape_same() reports the two
+ * selections as having the same shape.
+ */
+ check = H5Sselect_shape_same(file_small_cube_sid, mem_large_cube_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* write the slice from the in memory large cube to the
+ * on disk small cube
+ */
+ ret = H5Dwrite(small_cube_dataset, H5T_NATIVE_UINT16, mem_large_cube_sid,
+ file_small_cube_sid, xfer_plist, cube_buf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* zero the buffer that we will be using for reading */
+ HDmemset(small_cube_buf_1, 0, sizeof(*small_cube_buf_1) * small_cube_size);
+
+ /* read the on disk small cube into memory */
+ ret = H5Dread(small_cube_dataset, H5T_NATIVE_UINT16, full_small_cube_sid,
+ full_small_cube_sid, xfer_plist, small_cube_buf_1);
+ CHECK(ret, FAIL, "H5Dread");
+
+ expected_value = (uint16_t)((u * edge_size * edge_size * edge_size * edge_size) +
+ (v * edge_size * edge_size * edge_size) +
+ (w * edge_size * edge_size) + (x * edge_size));
+
+ data_ok = test_select_hyper_checker_board_dr__verify_data(small_cube_buf_1, small_rank,
+ edge_size, checker_edge_size,
+ expected_value, (hbool_t)TRUE);
+ if (!data_ok)
+ TestErrPrintf("small cube read from largecube has bad data! Line=%d\n", __LINE__);
+
+ x++;
+ } while ((large_rank >= (test_max_rank - 3)) && (small_rank <= (test_max_rank - 4)) &&
+ (x < edge_size));
+ w++;
+ } while ((large_rank >= (test_max_rank - 2)) && (small_rank <= (test_max_rank - 3)) &&
+ (w < edge_size));
+ v++;
+ } while ((large_rank >= (test_max_rank - 1)) && (small_rank <= (test_max_rank - 2)) &&
+ (v < edge_size));
+ u++;
+ } while ((large_rank >= test_max_rank) && (small_rank <= (test_max_rank - 1)) && (u < edge_size));
+
+ /* Now write checker board selections of the entries in memory
+ * small cube to slices of the on disk cube. After each write,
+ * read the on disk large cube * into memory, and verify that
+ * it contains the expected * data. Verify that
+ * H5Sselect_shape_same() returns true on the memory and file
+ * selections.
+ */
+
+ /* select a checker board in the in memory small cube dataspace */
+ sel_start[0] = sel_start[1] = sel_start[2] = sel_start[3] = sel_start[4] = 0;
+ test_select_hyper_checker_board_dr__select_checker_board(mem_small_cube_sid, small_rank, edge_size,
+ checker_edge_size, small_rank, sel_start);
+
+ u = 0;
+ do {
+ if (small_rank_offset > 0)
+ sel_start[0] = u;
+
+ v = 0;
+ do {
+ if (small_rank_offset > 1)
+ sel_start[1] = v;
+
+ w = 0;
+ do {
+ if (small_rank_offset > 2)
+ sel_start[2] = w;
+
+ x = 0;
+ do {
+ if (small_rank_offset > 3)
+ sel_start[3] = x;
+
+ /* zero out the on disk cube */
+ ret = H5Dwrite(large_cube_dataset, H5T_NATIVE_USHORT, full_large_cube_sid,
+ full_large_cube_sid, xfer_plist, zero_buf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* we know that small_rank >= 1 and that large_rank > small_rank
+ * by the assertions at the head of this function. Thus no
+ * need for another inner loop.
+ */
+
+ HDassert((sel_start[0] == 0) || (0 < small_rank_offset));
+ HDassert((sel_start[1] == 0) || (1 < small_rank_offset));
+ HDassert((sel_start[2] == 0) || (2 < small_rank_offset));
+ HDassert((sel_start[3] == 0) || (3 < small_rank_offset));
+ HDassert((sel_start[4] == 0) || (4 < small_rank_offset));
+
+ test_select_hyper_checker_board_dr__select_checker_board(
+ file_large_cube_sid, large_rank, edge_size, checker_edge_size, small_rank, sel_start);
+
+ /* verify that H5Sselect_shape_same() reports the two
+ * selections as having the same shape.
+ */
+ check = H5Sselect_shape_same(file_large_cube_sid, mem_small_cube_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* write the checker board selection of the in memory
+ * small cube to a slice through the on disk large
+ * cube.
+ */
+ ret = H5Dwrite(large_cube_dataset, H5T_NATIVE_UINT16, mem_small_cube_sid,
+ file_large_cube_sid, xfer_plist, cube_buf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* zero out the in memory large cube */
+ HDmemset(large_cube_buf_1, 0, sizeof(*large_cube_buf_1) * large_cube_size);
+
+ /* read the on disk large cube into memory */
+ ret = H5Dread(large_cube_dataset, H5T_NATIVE_UINT16, full_large_cube_sid,
+ full_large_cube_sid, xfer_plist, large_cube_buf_1);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* verify that the expected data and only the
+ * expected data was written to the on disk large
+ * cube.
+ */
+ data_ok = TRUE;
+ ptr_1 = large_cube_buf_1;
+ expected_value = 0;
+ start_index = (u * edge_size * edge_size * edge_size * edge_size) +
+ (v * edge_size * edge_size * edge_size) + (w * edge_size * edge_size) +
+ (x * edge_size);
+ stop_index = start_index + small_cube_size - 1;
+
+ HDassert(start_index < stop_index);
+ HDassert(stop_index <= large_cube_size);
+
+ /* verify that the large cube contains only zeros before the slice */
+ for (s = 0; s < start_index; s++) {
+ if (*ptr_1 != 0)
+ data_ok = FALSE;
+ ptr_1++;
+ } /* end for */
+ HDassert(s == start_index);
+
+ /* verify that the slice contains the expected data */
+ data_ok &= test_select_hyper_checker_board_dr__verify_data(
+ ptr_1, small_rank, edge_size, checker_edge_size, (uint16_t)0, (hbool_t)TRUE);
+
+ ptr_1 += small_cube_size;
+ s += small_cube_size;
+
+ HDassert(s == stop_index + 1);
+
+ /* verify that the large cube contains only zeros after the slice */
+ for (s = stop_index + 1; s < large_cube_size; s++) {
+ if (*ptr_1 != 0)
+ data_ok = FALSE;
+ ptr_1++;
+ } /* end for */
+ if (!data_ok)
+ TestErrPrintf("large cube written from small cube has bad data! Line=%d\n", __LINE__);
+
+ x++;
+ } while ((large_rank >= (test_max_rank - 3)) && (small_rank <= (test_max_rank - 4)) &&
+ (x < edge_size));
+ w++;
+ } while ((large_rank >= (test_max_rank - 2)) && (small_rank <= (test_max_rank - 3)) &&
+ (w < edge_size));
+ v++;
+ } while ((large_rank >= (test_max_rank - 1)) && (small_rank <= (test_max_rank - 2)) &&
+ (v < edge_size));
+ u++;
+ } while ((large_rank >= test_max_rank) && (small_rank <= (test_max_rank - 1)) && (u < edge_size));
+
+ /* Close memory dataspaces */
+ ret = H5Sclose(full_small_cube_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(full_large_cube_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(mem_small_cube_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(mem_large_cube_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(file_small_cube_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(file_large_cube_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Datasets */
+ ret = H5Dclose(small_cube_dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Dclose(large_cube_dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free memory buffers */
+ HDfree(small_cube_buf_1);
+ HDfree(large_cube_buf_1);
+
+} /* test_select_hyper_checker_board_dr__run_test() */
+#endif
+/****************************************************************
+**
+** test_select_hyper_checker_board_dr(): Test H5S (dataspace)
+** selection code with checkerboard source and target having
+** different ranks but the same shape. We have already
+** tested H5Sselect_shape_same in isolation, so now we try to do
+** I/O.
+**
+** This is just an initial smoke check, so we will work
+** with a slice through a cube only.
+**
+****************************************************************/
+#if 0
+static void
+test_select_hyper_checker_board_dr(hid_t dset_type, hid_t xfer_plist)
+{
+ uint16_t *cube_buf; /* Buffer for writing cube data */
+ uint16_t *cube_ptr; /* Temporary pointer into cube data */
+ uint16_t *zero_buf; /* Buffer for writing zeroed cube data */
+ int test_num = 0;
+ unsigned checker_edge_size = 2; /* Size of checkerboard dimension */
+ unsigned chunk_edge_size; /* Size of chunk's dataspace dimensions */
+ unsigned edge_size = 6; /* Size of dataset's dataspace dimensions */
+ unsigned small_rank; /* Current rank of small dataset */
+ unsigned large_rank; /* Current rank of large dataset */
+ unsigned max_rank = 5; /* Max. rank to use */
+ size_t max_cube_size; /* Max. number of elements in largest cube */
+ size_t s; /* Local index variable */
+ unsigned u; /* Local index variable */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Checker Board Hyperslabs With Different Rank I/O Functionality\n"));
+
+ /* Compute max. cube size */
+ max_cube_size = (size_t)1;
+ for (u = 0; u < max_rank; u++)
+ max_cube_size *= (size_t)(edge_size + 1);
+
+ /* Allocate cube buffer for writing values */
+ cube_buf = (uint16_t *)HDmalloc(sizeof(uint16_t) * max_cube_size);
+ CHECK_PTR(cube_buf, "HDmalloc");
+
+ /* Initialize the cube buffer */
+ cube_ptr = cube_buf;
+ for (s = 0; s < max_cube_size; s++)
+ *cube_ptr++ = (uint16_t)s;
+
+ /* Allocate cube buffer for zeroing values on disk */
+ zero_buf = (uint16_t *)HDcalloc(sizeof(uint16_t), max_cube_size);
+ CHECK_PTR(zero_buf, "HDcalloc");
+
+ for (large_rank = 1; large_rank <= max_rank; large_rank++) {
+ for (small_rank = 1; small_rank < large_rank; small_rank++) {
+ chunk_edge_size = 0;
+ test_select_hyper_checker_board_dr__run_test(test_num, cube_buf, zero_buf, edge_size,
+ checker_edge_size, chunk_edge_size, small_rank,
+ large_rank, dset_type, xfer_plist);
+ test_num++;
+
+ test_select_hyper_checker_board_dr__run_test(test_num, cube_buf, zero_buf, edge_size + 1,
+ checker_edge_size, chunk_edge_size, small_rank,
+ large_rank, dset_type, xfer_plist);
+ test_num++;
+
+ chunk_edge_size = 3;
+ test_select_hyper_checker_board_dr__run_test(test_num, cube_buf, zero_buf, edge_size,
+ checker_edge_size, chunk_edge_size, small_rank,
+ large_rank, dset_type, xfer_plist);
+ test_num++;
+
+ test_select_hyper_checker_board_dr__run_test(test_num, cube_buf, zero_buf, edge_size + 1,
+ checker_edge_size, chunk_edge_size, small_rank,
+ large_rank, dset_type, xfer_plist);
+ test_num++;
+ } /* for loop on small rank */
+ } /* for loop on large rank */
+
+ HDfree(cube_buf);
+ HDfree(zero_buf);
+
+} /* test_select_hyper_checker_board_dr() */
+#endif
+/****************************************************************
+**
+** test_select_hyper_copy(): Test H5S (dataspace) selection code.
+** Tests copying hyperslab selections
+**
+****************************************************************/
+static void
+test_select_hyper_copy(void)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t data1, data2; /* Dataset IDs */
+ hid_t sid1, sid2, sid3; /* Dataspace IDs */
+ hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
+ hsize_t dims2[] = {SPACE2_DIM1, SPACE2_DIM2};
+ hsize_t dims3[] = {SPACE3_DIM1, SPACE3_DIM2};
+ hsize_t start[SPACE1_RANK]; /* Starting location of hyperslab */
+ hsize_t stride[SPACE1_RANK]; /* Stride of hyperslab */
+ hsize_t count[SPACE1_RANK]; /* Element count of hyperslab */
+ hsize_t block[SPACE1_RANK]; /* Block size of hyperslab */
+ uint16_t *wbuf, /* buffer to write to disk */
+ *rbuf, /* 1st buffer read from disk */
+ *rbuf2, /* 2nd buffer read from disk */
+ *tbuf; /* temporary buffer pointer */
+ int i, j; /* Counters */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Hyperslabs with Strides Functionality\n"));
+
+ /* Allocate write & read buffers */
+ wbuf = (uint16_t *)HDmalloc(sizeof(uint16_t) * SPACE2_DIM1 * SPACE2_DIM2);
+ CHECK_PTR(wbuf, "HDmalloc");
+ rbuf = (uint16_t *)HDcalloc(sizeof(uint16_t), (size_t)(SPACE3_DIM1 * SPACE3_DIM2));
+ CHECK_PTR(rbuf, "HDcalloc");
+ rbuf2 = (uint16_t *)HDcalloc(sizeof(uint16_t), (size_t)(SPACE3_DIM1 * SPACE3_DIM2));
+ CHECK_PTR(rbuf2, "HDcalloc");
+
+ /* Initialize write buffer */
+ for (i = 0, tbuf = wbuf; i < SPACE2_DIM1; i++)
+ for (j = 0; j < SPACE2_DIM2; j++)
+ *tbuf++ = (uint16_t)((i * SPACE2_DIM2) + j);
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create dataspace for writing buffer */
+ sid2 = H5Screate_simple(SPACE2_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select 2x3x3 count with a stride of 2x4x3 & 1x2x2 block hyperslab for disk dataset */
+ start[0] = 0;
+ start[1] = 0;
+ start[2] = 0;
+ stride[0] = 2;
+ stride[1] = 4;
+ stride[2] = 3;
+ count[0] = 2;
+ count[1] = 3;
+ count[2] = 3;
+ block[0] = 1;
+ block[1] = 2;
+ block[2] = 2;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Select 4x2 count with a stride of 5x5 & 3x3 block hyperslab for memory dataset */
+ start[0] = 1;
+ start[1] = 1;
+ stride[0] = 5;
+ stride[1] = 5;
+ count[0] = 4;
+ count[1] = 2;
+ block[0] = 3;
+ block[1] = 3;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Make a copy of the dataspace to write */
+ sid3 = H5Scopy(sid2);
+ CHECK(sid3, FAIL, "H5Scopy");
+
+ /* Create a dataset */
+ data1 = H5Dcreate2(fid1, SPACE1_NAME, H5T_STD_U16LE, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(data1, FAIL, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(data1, H5T_STD_U16LE, sid2, sid1, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create another dataset */
+ data2 = H5Dcreate2(fid1, SPACE2_NAME, H5T_STD_U16LE, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(data2, FAIL, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(data2, H5T_STD_U16LE, sid3, sid1, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid3);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create dataspace for reading buffer */
+ sid2 = H5Screate_simple(SPACE3_RANK, dims3, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select 3x4 count with a stride of 4x4 & 2x3 block hyperslab for memory dataset */
+ start[0] = 0;
+ start[1] = 0;
+ stride[0] = 4;
+ stride[1] = 4;
+ count[0] = 3;
+ count[1] = 4;
+ block[0] = 2;
+ block[1] = 3;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Make a copy of the dataspace to read */
+ sid3 = H5Scopy(sid2);
+ CHECK(sid3, FAIL, "H5Scopy");
+
+ /* Read selection from disk */
+ ret = H5Dread(data1, H5T_STD_U16LE, sid2, sid1, H5P_DEFAULT, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Read selection from disk */
+ ret = H5Dread(data2, H5T_STD_U16LE, sid3, sid1, H5P_DEFAULT, rbuf2);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read with data written out */
+ if (HDmemcmp(rbuf, rbuf2, sizeof(uint16_t) * SPACE3_DIM1 * SPACE3_DIM2) != 0)
+ TestErrPrintf("hyperslab values don't match! Line=%d\n", __LINE__);
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close 2nd memory dataspace */
+ ret = H5Sclose(sid3);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(data1);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(data2);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+ HDfree(rbuf2);
+} /* test_select_hyper_copy() */
+
+/****************************************************************
+**
+** test_select_point_copy(): Test H5S (dataspace) selection code.
+** Tests copying point selections
+**
+****************************************************************/
+static void
+test_select_point_copy(void)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t data1, data2; /* Dataset IDs */
+ hid_t sid1, sid2, sid3; /* Dataspace IDs */
+ hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
+ hsize_t dims2[] = {SPACE2_DIM1, SPACE2_DIM2};
+ hsize_t dims3[] = {SPACE3_DIM1, SPACE3_DIM2};
+ hsize_t coord1[POINT1_NPOINTS][SPACE1_RANK]; /* Coordinates for point selection */
+ hsize_t coord2[POINT1_NPOINTS][SPACE2_RANK]; /* Coordinates for point selection */
+ hsize_t coord3[POINT1_NPOINTS][SPACE3_RANK]; /* Coordinates for point selection */
+ uint16_t *wbuf, /* buffer to write to disk */
+ *rbuf, /* 1st buffer read from disk */
+ *rbuf2, /* 2nd buffer read from disk */
+ *tbuf; /* temporary buffer pointer */
+ int i, j; /* Counters */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Hyperslabs with Strides Functionality\n"));
+
+ /* Allocate write & read buffers */
+ wbuf = (uint16_t *)HDmalloc(sizeof(uint16_t) * SPACE2_DIM1 * SPACE2_DIM2);
+ CHECK_PTR(wbuf, "HDmalloc");
+ rbuf = (uint16_t *)HDcalloc(sizeof(uint16_t), (size_t)(SPACE3_DIM1 * SPACE3_DIM2));
+ CHECK_PTR(rbuf, "HDcalloc");
+ rbuf2 = (uint16_t *)HDcalloc(sizeof(uint16_t), (size_t)(SPACE3_DIM1 * SPACE3_DIM2));
+ CHECK_PTR(rbuf2, "HDcalloc");
+
+ /* Initialize write buffer */
+ for (i = 0, tbuf = wbuf; i < SPACE2_DIM1; i++)
+ for (j = 0; j < SPACE2_DIM2; j++)
+ *tbuf++ = (uint16_t)((i * SPACE2_DIM2) + j);
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create dataspace for writing buffer */
+ sid2 = H5Screate_simple(SPACE2_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select sequence of ten points for disk dataset */
+ coord1[0][0] = 0;
+ coord1[0][1] = 10;
+ coord1[0][2] = 5;
+ coord1[1][0] = 1;
+ coord1[1][1] = 2;
+ coord1[1][2] = 7;
+ coord1[2][0] = 2;
+ coord1[2][1] = 4;
+ coord1[2][2] = 9;
+ coord1[3][0] = 0;
+ coord1[3][1] = 6;
+ coord1[3][2] = 11;
+ coord1[4][0] = 1;
+ coord1[4][1] = 8;
+ coord1[4][2] = 13;
+ coord1[5][0] = 2;
+ coord1[5][1] = 12;
+ coord1[5][2] = 0;
+ coord1[6][0] = 0;
+ coord1[6][1] = 14;
+ coord1[6][2] = 2;
+ coord1[7][0] = 1;
+ coord1[7][1] = 0;
+ coord1[7][2] = 4;
+ coord1[8][0] = 2;
+ coord1[8][1] = 1;
+ coord1[8][2] = 6;
+ coord1[9][0] = 0;
+ coord1[9][1] = 3;
+ coord1[9][2] = 8;
+ ret = H5Sselect_elements(sid1, H5S_SELECT_SET, (size_t)POINT1_NPOINTS, (const hsize_t *)coord1);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Select sequence of ten points for write dataset */
+ coord2[0][0] = 12;
+ coord2[0][1] = 3;
+ coord2[1][0] = 15;
+ coord2[1][1] = 13;
+ coord2[2][0] = 7;
+ coord2[2][1] = 25;
+ coord2[3][0] = 0;
+ coord2[3][1] = 6;
+ coord2[4][0] = 13;
+ coord2[4][1] = 0;
+ coord2[5][0] = 24;
+ coord2[5][1] = 11;
+ coord2[6][0] = 12;
+ coord2[6][1] = 21;
+ coord2[7][0] = 29;
+ coord2[7][1] = 4;
+ coord2[8][0] = 8;
+ coord2[8][1] = 8;
+ coord2[9][0] = 19;
+ coord2[9][1] = 17;
+ ret = H5Sselect_elements(sid2, H5S_SELECT_SET, (size_t)POINT1_NPOINTS, (const hsize_t *)coord2);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Make a copy of the dataspace to write */
+ sid3 = H5Scopy(sid2);
+ CHECK(sid3, FAIL, "H5Scopy");
+
+ /* Create a dataset */
+ data1 = H5Dcreate2(fid1, SPACE1_NAME, H5T_STD_U16LE, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(data1, FAIL, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(data1, H5T_STD_U16LE, sid2, sid1, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create another dataset */
+ data2 = H5Dcreate2(fid1, SPACE2_NAME, H5T_STD_U16LE, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(data2, FAIL, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(data2, H5T_STD_U16LE, sid3, sid1, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid3);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create dataspace for reading buffer */
+ sid2 = H5Screate_simple(SPACE3_RANK, dims3, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select sequence of points for read dataset */
+ coord3[0][0] = 0;
+ coord3[0][1] = 2;
+ coord3[1][0] = 4;
+ coord3[1][1] = 8;
+ coord3[2][0] = 13;
+ coord3[2][1] = 13;
+ coord3[3][0] = 14;
+ coord3[3][1] = 25;
+ coord3[4][0] = 7;
+ coord3[4][1] = 9;
+ coord3[5][0] = 2;
+ coord3[5][1] = 0;
+ coord3[6][0] = 9;
+ coord3[6][1] = 19;
+ coord3[7][0] = 1;
+ coord3[7][1] = 22;
+ coord3[8][0] = 12;
+ coord3[8][1] = 21;
+ coord3[9][0] = 11;
+ coord3[9][1] = 6;
+ ret = H5Sselect_elements(sid2, H5S_SELECT_SET, (size_t)POINT1_NPOINTS, (const hsize_t *)coord3);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Make a copy of the dataspace to read */
+ sid3 = H5Scopy(sid2);
+ CHECK(sid3, FAIL, "H5Scopy");
+
+ /* Read selection from disk */
+ ret = H5Dread(data1, H5T_STD_U16LE, sid2, sid1, H5P_DEFAULT, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Read selection from disk */
+ ret = H5Dread(data2, H5T_STD_U16LE, sid3, sid1, H5P_DEFAULT, rbuf2);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read with data written out */
+ if (HDmemcmp(rbuf, rbuf2, sizeof(uint16_t) * SPACE3_DIM1 * SPACE3_DIM2) != 0)
+ TestErrPrintf("point values don't match!\n");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close 2nd memory dataspace */
+ ret = H5Sclose(sid3);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(data1);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(data2);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+ HDfree(rbuf2);
+} /* test_select_point_copy() */
+
+/****************************************************************
+**
+** test_select_hyper_offset(): Test basic H5S (dataspace) selection code.
+** Tests hyperslabs of various sizes and dimensionalities with selection
+** offsets.
+**
+****************************************************************/
+static void
+test_select_hyper_offset(void)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1, sid2; /* Dataspace ID */
+ hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
+ hsize_t dims2[] = {SPACE2_DIM1, SPACE2_DIM2};
+ hsize_t dims3[] = {SPACE3_DIM1, SPACE3_DIM2};
+ hsize_t start[SPACE1_RANK]; /* Starting location of hyperslab */
+ hsize_t stride[SPACE1_RANK]; /* Stride of hyperslab */
+ hsize_t count[SPACE1_RANK]; /* Element count of hyperslab */
+ hsize_t block[SPACE1_RANK]; /* Block size of hyperslab */
+ hssize_t offset[SPACE1_RANK]; /* Offset of selection */
+ uint8_t *wbuf, /* buffer to write to disk */
+ *rbuf, /* buffer read from disk */
+ *tbuf, /* temporary buffer pointer */
+ *tbuf2; /* temporary buffer pointer */
+ int i, j; /* Counters */
+ herr_t ret; /* Generic return value */
+ htri_t valid; /* Generic boolean return value */
+ H5S_class_t ext_type; /* Extent type */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Hyperslab Selection Functions with Offsets\n"));
+
+ /* Allocate write & read buffers */
+ wbuf = (uint8_t *)HDmalloc(sizeof(uint8_t) * SPACE2_DIM1 * SPACE2_DIM2);
+ CHECK_PTR(wbuf, "HDmalloc");
+ rbuf = (uint8_t *)HDcalloc(sizeof(uint8_t), (size_t)(SPACE3_DIM1 * SPACE3_DIM2));
+ CHECK_PTR(rbuf, "HDcalloc");
+
+ /* Initialize write buffer */
+ for (i = 0, tbuf = wbuf; i < SPACE2_DIM1; i++)
+ for (j = 0; j < SPACE2_DIM2; j++)
+ *tbuf++ = (uint8_t)((i * SPACE2_DIM2) + j);
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create dataspace for writing buffer */
+ sid2 = H5Screate_simple(SPACE2_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Verify extent type */
+ ext_type = H5Sget_simple_extent_type(sid1);
+ VERIFY(ext_type, H5S_SIMPLE, "H5Sget_simple_extent_type");
+
+ /* Select 2x15x13 hyperslab for disk dataset */
+ start[0] = 1;
+ start[1] = 0;
+ start[2] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ stride[2] = 1;
+ count[0] = 2;
+ count[1] = 15;
+ count[2] = 13;
+ block[0] = 1;
+ block[1] = 1;
+ block[2] = 1;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Check a valid offset */
+ offset[0] = -1;
+ offset[1] = 0;
+ offset[2] = 0;
+ ret = H5Soffset_simple(sid1, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+ valid = H5Sselect_valid(sid1);
+ VERIFY(valid, TRUE, "H5Sselect_valid");
+
+ /* Check an invalid offset */
+ offset[0] = 10;
+ offset[1] = 0;
+ offset[2] = 0;
+ ret = H5Soffset_simple(sid1, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+ valid = H5Sselect_valid(sid1);
+ VERIFY(valid, FALSE, "H5Sselect_valid");
+
+ /* Reset offset */
+ offset[0] = 0;
+ offset[1] = 0;
+ offset[2] = 0;
+ ret = H5Soffset_simple(sid1, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+ valid = H5Sselect_valid(sid1);
+ VERIFY(valid, TRUE, "H5Sselect_valid");
+
+ /* Select 15x26 hyperslab for memory dataset */
+ start[0] = 15;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 15;
+ count[1] = 26;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Choose a valid offset for the memory dataspace */
+ offset[0] = -10;
+ offset[1] = 0;
+ ret = H5Soffset_simple(sid2, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+ valid = H5Sselect_valid(sid2);
+ VERIFY(valid, TRUE, "H5Sselect_valid");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, SPACE1_NAME, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create dataspace for reading buffer */
+ sid2 = H5Screate_simple(SPACE3_RANK, dims3, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select 15x26 hyperslab for reading memory dataset */
+ start[0] = 0;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 15;
+ count[1] = 26;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read with data written out */
+ for (i = 0; i < SPACE3_DIM1; i++) {
+ tbuf = wbuf + ((i + 5) * SPACE2_DIM2);
+ tbuf2 = rbuf + (i * SPACE3_DIM2);
+ for (j = 0; j < SPACE3_DIM2; j++, tbuf++, tbuf2++) {
+ if (*tbuf != *tbuf2)
+ TestErrPrintf("%d: hyperslab values don't match!, i=%d, j=%d, *tbuf=%u, *tbuf2=%u\n",
+ __LINE__, i, j, (unsigned)*tbuf, (unsigned)*tbuf2);
+ } /* end for */
+ } /* end for */
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+} /* test_select_hyper_offset() */
+
+/****************************************************************
+**
+** test_select_hyper_offset2(): Test basic H5S (dataspace) selection code.
+** Tests optimized hyperslab I/O with selection offsets.
+**
+****************************************************************/
+static void
+test_select_hyper_offset2(void)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1, sid2; /* Dataspace ID */
+ hsize_t dims1[] = {SPACE7_DIM1, SPACE7_DIM2};
+ hsize_t dims2[] = {SPACE7_DIM1, SPACE7_DIM2};
+ hsize_t start[SPACE7_RANK]; /* Starting location of hyperslab */
+ hsize_t count[SPACE7_RANK]; /* Element count of hyperslab */
+ hssize_t offset[SPACE7_RANK]; /* Offset of selection */
+ uint8_t *wbuf, /* buffer to write to disk */
+ *rbuf, /* buffer read from disk */
+ *tbuf, /* temporary buffer pointer */
+ *tbuf2; /* temporary buffer pointer */
+ int i, j; /* Counters */
+ herr_t ret; /* Generic return value */
+ htri_t valid; /* Generic boolean return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing More Hyperslab Selection Functions with Offsets\n"));
+
+ /* Allocate write & read buffers */
+ wbuf = (uint8_t *)HDmalloc(sizeof(uint8_t) * SPACE7_DIM1 * SPACE7_DIM2);
+ CHECK_PTR(wbuf, "HDmalloc");
+ rbuf = (uint8_t *)HDcalloc(sizeof(uint8_t), (size_t)(SPACE7_DIM1 * SPACE7_DIM2));
+ CHECK_PTR(rbuf, "HDcalloc");
+
+ /* Initialize write buffer */
+ for (i = 0, tbuf = wbuf; i < SPACE7_DIM1; i++)
+ for (j = 0; j < SPACE7_DIM2; j++)
+ *tbuf++ = (uint8_t)((i * SPACE7_DIM2) + j);
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset */
+ sid1 = H5Screate_simple(SPACE7_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create dataspace for writing buffer */
+ sid2 = H5Screate_simple(SPACE7_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select 4x10 hyperslab for disk dataset */
+ start[0] = 1;
+ start[1] = 0;
+ count[0] = 4;
+ count[1] = 10;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Set offset */
+ offset[0] = 1;
+ offset[1] = 0;
+ ret = H5Soffset_simple(sid1, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+ valid = H5Sselect_valid(sid1);
+ VERIFY(valid, TRUE, "H5Sselect_valid");
+
+ /* Select 4x10 hyperslab for memory dataset */
+ start[0] = 1;
+ start[1] = 0;
+ count[0] = 4;
+ count[1] = 10;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Choose a valid offset for the memory dataspace */
+ offset[0] = 2;
+ offset[1] = 0;
+ ret = H5Soffset_simple(sid2, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+ valid = H5Sselect_valid(sid2);
+ VERIFY(valid, TRUE, "H5Sselect_valid");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, SPACE7_NAME, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read with data written out */
+ for (i = 0; i < 4; i++) {
+ tbuf = wbuf + ((i + 3) * SPACE7_DIM2);
+ tbuf2 = rbuf + ((i + 3) * SPACE7_DIM2);
+ for (j = 0; j < SPACE7_DIM2; j++, tbuf++, tbuf2++) {
+ if (*tbuf != *tbuf2)
+ TestErrPrintf("%d: hyperslab values don't match!, i=%d, j=%d, *tbuf=%u, *tbuf2=%u\n",
+ __LINE__, i, j, (unsigned)*tbuf, (unsigned)*tbuf2);
+ } /* end for */
+ } /* end for */
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+} /* test_select_hyper_offset2() */
+
+/****************************************************************
+**
+** test_select_point_offset(): Test basic H5S (dataspace) selection code.
+** Tests element selections between dataspaces of various sizes
+** and dimensionalities with selection offsets.
+**
+****************************************************************/
+static void
+test_select_point_offset(void)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1, sid2; /* Dataspace ID */
+ hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
+ hsize_t dims2[] = {SPACE2_DIM1, SPACE2_DIM2};
+ hsize_t dims3[] = {SPACE3_DIM1, SPACE3_DIM2};
+ hsize_t coord1[POINT1_NPOINTS][SPACE1_RANK]; /* Coordinates for point selection */
+ hsize_t coord2[POINT1_NPOINTS][SPACE2_RANK]; /* Coordinates for point selection */
+ hsize_t coord3[POINT1_NPOINTS][SPACE3_RANK]; /* Coordinates for point selection */
+ hssize_t offset[SPACE1_RANK]; /* Offset of selection */
+ uint8_t *wbuf, /* buffer to write to disk */
+ *rbuf, /* buffer read from disk */
+ *tbuf, /* temporary buffer pointer */
+ *tbuf2; /* temporary buffer pointer */
+ int i, j; /* Counters */
+ herr_t ret; /* Generic return value */
+ htri_t valid; /* Generic boolean return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Element Selection Functions\n"));
+
+ /* Allocate write & read buffers */
+ wbuf = (uint8_t *)HDmalloc(sizeof(uint8_t) * SPACE2_DIM1 * SPACE2_DIM2);
+ CHECK_PTR(wbuf, "HDmalloc");
+ rbuf = (uint8_t *)HDcalloc(sizeof(uint8_t), (size_t)(SPACE3_DIM1 * SPACE3_DIM2));
+ CHECK_PTR(rbuf, "HDcalloc");
+
+ /* Initialize write buffer */
+ for (i = 0, tbuf = wbuf; i < SPACE2_DIM1; i++)
+ for (j = 0; j < SPACE2_DIM2; j++)
+ *tbuf++ = (uint8_t)((i * SPACE2_DIM2) + j);
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create dataspace for write buffer */
+ sid2 = H5Screate_simple(SPACE2_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select sequence of ten points for disk dataset */
+ coord1[0][0] = 0;
+ coord1[0][1] = 10;
+ coord1[0][2] = 5;
+ coord1[1][0] = 1;
+ coord1[1][1] = 2;
+ coord1[1][2] = 7;
+ coord1[2][0] = 2;
+ coord1[2][1] = 4;
+ coord1[2][2] = 9;
+ coord1[3][0] = 0;
+ coord1[3][1] = 6;
+ coord1[3][2] = 11;
+ coord1[4][0] = 1;
+ coord1[4][1] = 8;
+ coord1[4][2] = 12;
+ coord1[5][0] = 2;
+ coord1[5][1] = 12;
+ coord1[5][2] = 0;
+ coord1[6][0] = 0;
+ coord1[6][1] = 14;
+ coord1[6][2] = 2;
+ coord1[7][0] = 1;
+ coord1[7][1] = 0;
+ coord1[7][2] = 4;
+ coord1[8][0] = 2;
+ coord1[8][1] = 1;
+ coord1[8][2] = 6;
+ coord1[9][0] = 0;
+ coord1[9][1] = 3;
+ coord1[9][2] = 8;
+ ret = H5Sselect_elements(sid1, H5S_SELECT_SET, (size_t)POINT1_NPOINTS, (const hsize_t *)coord1);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Check a valid offset */
+ offset[0] = 0;
+ offset[1] = 0;
+ offset[2] = 1;
+ ret = H5Soffset_simple(sid1, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+ valid = H5Sselect_valid(sid1);
+ VERIFY(valid, TRUE, "H5Sselect_valid");
+
+ /* Check an invalid offset */
+ offset[0] = 10;
+ offset[1] = 0;
+ offset[2] = 0;
+ ret = H5Soffset_simple(sid1, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+ valid = H5Sselect_valid(sid1);
+ VERIFY(valid, FALSE, "H5Sselect_valid");
+
+ /* Reset offset */
+ offset[0] = 0;
+ offset[1] = 0;
+ offset[2] = 0;
+ ret = H5Soffset_simple(sid1, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+ valid = H5Sselect_valid(sid1);
+ VERIFY(valid, TRUE, "H5Sselect_valid");
+
+ /* Select sequence of ten points for write dataset */
+ coord2[0][0] = 12;
+ coord2[0][1] = 3;
+ coord2[1][0] = 15;
+ coord2[1][1] = 13;
+ coord2[2][0] = 7;
+ coord2[2][1] = 24;
+ coord2[3][0] = 0;
+ coord2[3][1] = 6;
+ coord2[4][0] = 13;
+ coord2[4][1] = 0;
+ coord2[5][0] = 24;
+ coord2[5][1] = 11;
+ coord2[6][0] = 12;
+ coord2[6][1] = 21;
+ coord2[7][0] = 23;
+ coord2[7][1] = 4;
+ coord2[8][0] = 8;
+ coord2[8][1] = 8;
+ coord2[9][0] = 19;
+ coord2[9][1] = 17;
+ ret = H5Sselect_elements(sid2, H5S_SELECT_SET, (size_t)POINT1_NPOINTS, (const hsize_t *)coord2);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Choose a valid offset for the memory dataspace */
+ offset[0] = 5;
+ offset[1] = 1;
+ ret = H5Soffset_simple(sid2, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+ valid = H5Sselect_valid(sid2);
+ VERIFY(valid, TRUE, "H5Sselect_valid");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, SPACE1_NAME, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create dataspace for reading buffer */
+ sid2 = H5Screate_simple(SPACE3_RANK, dims3, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select sequence of points for read dataset */
+ coord3[0][0] = 0;
+ coord3[0][1] = 2;
+ coord3[1][0] = 4;
+ coord3[1][1] = 8;
+ coord3[2][0] = 13;
+ coord3[2][1] = 13;
+ coord3[3][0] = 14;
+ coord3[3][1] = 25;
+ coord3[4][0] = 7;
+ coord3[4][1] = 9;
+ coord3[5][0] = 2;
+ coord3[5][1] = 0;
+ coord3[6][0] = 9;
+ coord3[6][1] = 19;
+ coord3[7][0] = 1;
+ coord3[7][1] = 22;
+ coord3[8][0] = 12;
+ coord3[8][1] = 21;
+ coord3[9][0] = 11;
+ coord3[9][1] = 6;
+ ret = H5Sselect_elements(sid2, H5S_SELECT_SET, (size_t)POINT1_NPOINTS, (const hsize_t *)coord3);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read with data written out */
+ for (i = 0; i < POINT1_NPOINTS; i++) {
+ tbuf = wbuf + ((coord2[i][0] + (hsize_t)offset[0]) * SPACE2_DIM2) + coord2[i][1] + (hsize_t)offset[1];
+ tbuf2 = rbuf + (coord3[i][0] * SPACE3_DIM2) + coord3[i][1];
+ if (*tbuf != *tbuf2)
+ TestErrPrintf("element values don't match!, i=%d\n", i);
+ } /* end for */
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+} /* test_select_point_offset() */
+
+/****************************************************************
+**
+** test_select_hyper_union(): Test basic H5S (dataspace) selection code.
+** Tests unions of hyperslabs of various sizes and dimensionalities.
+**
+****************************************************************/
+static void
+test_select_hyper_union(void)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1, sid2; /* Dataspace ID */
+ hid_t xfer; /* Dataset Transfer Property List ID */
+ hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
+ hsize_t dims2[] = {SPACE2_DIM1, SPACE2_DIM2};
+ hsize_t dims3[] = {SPACE3_DIM1, SPACE3_DIM2};
+ hsize_t start[SPACE1_RANK]; /* Starting location of hyperslab */
+ hsize_t stride[SPACE1_RANK]; /* Stride of hyperslab */
+ hsize_t count[SPACE1_RANK]; /* Element count of hyperslab */
+ hsize_t block[SPACE1_RANK]; /* Block size of hyperslab */
+ size_t begin[SPACE2_DIM1] = /* Offset within irregular block */
+ {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* First ten rows start at offset 0 */
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5}; /* Next eighteen rows start at offset 5 */
+ size_t len[SPACE2_DIM1] = /* Len of each row within irregular block */
+ {10, 10, 10, 10, 10, 10, 10, 10, /* First eight rows are 10 long */
+ 20, 20, /* Next two rows are 20 long */
+ 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15}; /* Next eighteen rows are 15 long */
+ uint8_t *wbuf, /* buffer to write to disk */
+ *rbuf, /* buffer read from disk */
+ *tbuf, /* temporary buffer pointer */
+ *tbuf2; /* temporary buffer pointer */
+ int i, j; /* Counters */
+ herr_t ret; /* Generic return value */
+ hssize_t npoints; /* Number of elements in selection */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Hyperslab Selection Functions with unions of hyperslabs\n"));
+
+ /* Allocate write & read buffers */
+ wbuf = (uint8_t *)HDmalloc(sizeof(uint8_t) * SPACE2_DIM1 * SPACE2_DIM2);
+ CHECK_PTR(wbuf, "HDmalloc");
+ rbuf = (uint8_t *)HDcalloc(sizeof(uint8_t), (size_t)(SPACE3_DIM1 * SPACE3_DIM2));
+ CHECK_PTR(rbuf, "HDcalloc");
+
+ /* Initialize write buffer */
+ for (i = 0, tbuf = wbuf; i < SPACE2_DIM1; i++)
+ for (j = 0; j < SPACE2_DIM2; j++)
+ *tbuf++ = (uint8_t)((i * SPACE2_DIM2) + j);
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Test simple case of one block overlapping another */
+ /* Create dataspace for dataset */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create dataspace for writing buffer */
+ sid2 = H5Screate_simple(SPACE2_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select 2x15x13 hyperslab for disk dataset */
+ start[0] = 1;
+ start[1] = 0;
+ start[2] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ stride[2] = 1;
+ count[0] = 2;
+ count[1] = 15;
+ count[2] = 13;
+ block[0] = 1;
+ block[1] = 1;
+ block[2] = 1;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ npoints = H5Sget_select_npoints(sid1);
+ VERIFY(npoints, 2 * 15 * 13, "H5Sget_select_npoints");
+
+ /* Select 8x26 hyperslab for memory dataset */
+ start[0] = 15;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 8;
+ count[1] = 26;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Union overlapping 8x26 hyperslab for memory dataset (to form a 15x26 selection) */
+ start[0] = 22;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 8;
+ count[1] = 26;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ npoints = H5Sget_select_npoints(sid2);
+ VERIFY(npoints, 15 * 26, "H5Sget_select_npoints");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, SPACE1_NAME, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create dataspace for reading buffer */
+ sid2 = H5Screate_simple(SPACE3_RANK, dims3, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select 15x26 hyperslab for reading memory dataset */
+ start[0] = 0;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 15;
+ count[1] = 26;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read with data written out */
+ for (i = 0; i < SPACE3_DIM1; i++) {
+ tbuf = wbuf + ((i + 15) * SPACE2_DIM2);
+ tbuf2 = rbuf + (i * SPACE3_DIM2);
+ for (j = 0; j < SPACE3_DIM2; j++, tbuf++, tbuf2++) {
+ if (*tbuf != *tbuf2)
+ TestErrPrintf("%d: hyperslab values don't match!, i=%d, j=%d, *tbuf=%d, *tbuf2=%d\n",
+ __LINE__, i, j, (int)*tbuf, (int)*tbuf2);
+ } /* end for */
+ } /* end for */
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Test simple case of several block overlapping another */
+ /* Create dataspace for dataset */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create dataspace for writing buffer */
+ sid2 = H5Screate_simple(SPACE2_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select 2x15x13 hyperslab for disk dataset */
+ start[0] = 1;
+ start[1] = 0;
+ start[2] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ stride[2] = 1;
+ count[0] = 2;
+ count[1] = 15;
+ count[2] = 13;
+ block[0] = 1;
+ block[1] = 1;
+ block[2] = 1;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Select 8x15 hyperslab for memory dataset */
+ start[0] = 15;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 8;
+ count[1] = 15;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Union overlapping 8x15 hyperslab for memory dataset (to form a 15x15 selection) */
+ start[0] = 22;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 8;
+ count[1] = 15;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Union overlapping 15x15 hyperslab for memory dataset (to form a 15x26 selection) */
+ start[0] = 15;
+ start[1] = 11;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 15;
+ count[1] = 15;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ npoints = H5Sget_select_npoints(sid2);
+ VERIFY(npoints, 15 * 26, "H5Sget_select_npoints");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, SPACE2_NAME, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create dataspace for reading buffer */
+ sid2 = H5Screate_simple(SPACE3_RANK, dims3, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select 15x26 hyperslab for reading memory dataset */
+ start[0] = 0;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 15;
+ count[1] = 26;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read with data written out */
+ for (i = 0; i < SPACE3_DIM1; i++) {
+ tbuf = wbuf + ((i + 15) * SPACE2_DIM2);
+ tbuf2 = rbuf + (i * SPACE3_DIM2);
+ for (j = 0; j < SPACE3_DIM2; j++, tbuf++, tbuf2++) {
+ if (*tbuf != *tbuf2)
+ TestErrPrintf("%d: hyperslab values don't match!, i=%d, j=%d, *tbuf=%d, *tbuf2=%d\n",
+ __LINE__, i, j, (int)*tbuf, (int)*tbuf2);
+ } /* end for */
+ } /* end for */
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Test disjoint case of two non-overlapping blocks */
+ /* Create dataspace for dataset */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create dataspace for writing buffer */
+ sid2 = H5Screate_simple(SPACE2_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select 2x15x13 hyperslab for disk dataset */
+ start[0] = 1;
+ start[1] = 0;
+ start[2] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ stride[2] = 1;
+ count[0] = 2;
+ count[1] = 15;
+ count[2] = 13;
+ block[0] = 1;
+ block[1] = 1;
+ block[2] = 1;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Select 7x26 hyperslab for memory dataset */
+ start[0] = 1;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 7;
+ count[1] = 26;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Union non-overlapping 8x26 hyperslab for memory dataset (to form a 15x26 disjoint selection) */
+ start[0] = 22;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 8;
+ count[1] = 26;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ npoints = H5Sget_select_npoints(sid2);
+ VERIFY(npoints, 15 * 26, "H5Sget_select_npoints");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, SPACE3_NAME, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create dataspace for reading buffer */
+ sid2 = H5Screate_simple(SPACE3_RANK, dims3, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select 15x26 hyperslab for reading memory dataset */
+ start[0] = 0;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 15;
+ count[1] = 26;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read with data written out */
+ for (i = 0; i < SPACE3_DIM1; i++) {
+ /* Jump over gap in middle */
+ if (i < 7)
+ tbuf = wbuf + ((i + 1) * SPACE2_DIM2);
+ else
+ tbuf = wbuf + ((i + 15) * SPACE2_DIM2);
+ tbuf2 = rbuf + (i * SPACE3_DIM2);
+ for (j = 0; j < SPACE3_DIM2; j++, tbuf++, tbuf2++) {
+ if (*tbuf != *tbuf2)
+ TestErrPrintf("%d: hyperslab values don't match!, i=%d, j=%d, *tbuf=%d, *tbuf2=%d\n",
+ __LINE__, i, j, (int)*tbuf, (int)*tbuf2);
+ } /* end for */
+ } /* end for */
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Test disjoint case of two non-overlapping blocks with hyperslab caching turned off */
+ /* Create dataspace for dataset */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create dataspace for writing buffer */
+ sid2 = H5Screate_simple(SPACE2_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select 2x15x13 hyperslab for disk dataset */
+ start[0] = 1;
+ start[1] = 0;
+ start[2] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ stride[2] = 1;
+ count[0] = 2;
+ count[1] = 15;
+ count[2] = 13;
+ block[0] = 1;
+ block[1] = 1;
+ block[2] = 1;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Select 7x26 hyperslab for memory dataset */
+ start[0] = 1;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 7;
+ count[1] = 26;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Union non-overlapping 8x26 hyperslab for memory dataset (to form a 15x26 disjoint selection) */
+ start[0] = 22;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 8;
+ count[1] = 26;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ npoints = H5Sget_select_npoints(sid2);
+ VERIFY(npoints, 15 * 26, "H5Sget_select_npoints");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, SPACE4_NAME, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ xfer = H5Pcreate(H5P_DATASET_XFER);
+ CHECK(xfer, FAIL, "H5Pcreate");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UCHAR, sid2, sid1, xfer, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create dataspace for reading buffer */
+ sid2 = H5Screate_simple(SPACE3_RANK, dims3, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select 15x26 hyperslab for reading memory dataset */
+ start[0] = 0;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 15;
+ count[1] = 26;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_NATIVE_UCHAR, sid2, sid1, xfer, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Close transfer property list */
+ ret = H5Pclose(xfer);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Compare data read with data written out */
+ for (i = 0; i < SPACE3_DIM1; i++) {
+ /* Jump over gap in middle */
+ if (i < 7)
+ tbuf = wbuf + ((i + 1) * SPACE2_DIM2);
+ else
+ tbuf = wbuf + ((i + 15) * SPACE2_DIM2);
+ tbuf2 = rbuf + (i * SPACE3_DIM2);
+ for (j = 0; j < SPACE3_DIM2; j++, tbuf++, tbuf2++) {
+ if (*tbuf != *tbuf2)
+ TestErrPrintf("%d: hyperslab values don't match!, i=%d, j=%d, *tbuf=%d, *tbuf2=%d\n",
+ __LINE__, i, j, (int)*tbuf, (int)*tbuf2);
+ } /* end for */
+ } /* end for */
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Test case of two blocks which overlap corners and must be split */
+ /* Create dataspace for dataset */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create dataspace for writing buffer */
+ sid2 = H5Screate_simple(SPACE2_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select 2x15x13 hyperslab for disk dataset */
+ start[0] = 1;
+ start[1] = 0;
+ start[2] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ stride[2] = 1;
+ count[0] = 2;
+ count[1] = 15;
+ count[2] = 13;
+ block[0] = 1;
+ block[1] = 1;
+ block[2] = 1;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Select 10x10 hyperslab for memory dataset */
+ start[0] = 0;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 10;
+ count[1] = 10;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Union overlapping 15x20 hyperslab for memory dataset (forming a irregularly shaped region) */
+ start[0] = 8;
+ start[1] = 5;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 20;
+ count[1] = 15;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ npoints = H5Sget_select_npoints(sid2);
+ VERIFY(npoints, 15 * 26, "H5Sget_select_npoints");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, SPACE5_NAME, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create dataspace for reading buffer */
+ sid2 = H5Screate_simple(SPACE3_RANK, dims3, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select 15x26 hyperslab for reading memory dataset */
+ start[0] = 0;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 15;
+ count[1] = 26;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read with data written out */
+ for (i = 0, tbuf2 = rbuf; i < SPACE2_DIM1; i++) {
+ tbuf = wbuf + (i * SPACE2_DIM2) + begin[i];
+ for (j = 0; j < (int)len[i]; j++, tbuf++, tbuf2++) {
+ if (*tbuf != *tbuf2)
+ TestErrPrintf("%d: hyperslab values don't match!, i=%d, j=%d, *tbuf=%d, *tbuf2=%d\n",
+ __LINE__, i, j, (int)*tbuf, (int)*tbuf2);
+ } /* end for */
+ } /* end for */
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+} /* test_select_hyper_union() */
+
+/****************************************************************
+**
+** test_select_hyper_union_stagger(): Test basic H5S (dataspace) selection code.
+** Tests unions of staggered hyperslabs. (Uses H5Scombine_hyperslab
+** and H5Smodify_select instead of H5Sselect_hyperslab)
+**
+****************************************************************/
+static void
+test_select_hyper_union_stagger(void)
+{
+ hid_t file_id; /* File ID */
+ hid_t dset_id; /* Dataset ID */
+ hid_t dataspace; /* File dataspace ID */
+ hid_t memspace; /* Memory dataspace ID */
+ hid_t tmp_space; /* Temporary dataspace ID */
+ hid_t tmp2_space; /* Another emporary dataspace ID */
+ hsize_t dimsm[2] = {7, 7}; /* Memory array dimensions */
+ hsize_t dimsf[2] = {6, 5}; /* File array dimensions */
+ hsize_t count[2] = {3, 1}; /* 1st Hyperslab size */
+ hsize_t count2[2] = {3, 1}; /* 2nd Hyperslab size */
+ hsize_t count3[2] = {2, 1}; /* 3rd Hyperslab size */
+ hsize_t start[2] = {0, 0}; /* 1st Hyperslab offset */
+ hsize_t start2[2] = {2, 1}; /* 2nd Hyperslab offset */
+ hsize_t start3[2] = {4, 2}; /* 3rd Hyperslab offset */
+ hsize_t count_out[2] = {4, 2}; /* Hyperslab size in memory */
+ hsize_t start_out[2] = {0, 3}; /* Hyperslab offset in memory */
+ int data[6][5]; /* Data to write */
+ int data_out[7][7]; /* Data read in */
+ int input_loc[8][2] = {{0, 0}, {1, 0}, {2, 0}, {2, 1}, {3, 1}, {4, 1}, {4, 2}, {5, 2}};
+ int output_loc[8][2] = {{0, 3}, {0, 4}, {1, 3}, {1, 4}, {2, 3}, {2, 4}, {3, 3}, {3, 4}};
+ int dsetrank = 2; /* File Dataset rank */
+ int memrank = 2; /* Memory Dataset rank */
+ int i, j; /* Local counting variables */
+ herr_t error;
+ hsize_t stride[2] = {1, 1};
+ hsize_t block[2] = {1, 1};
+
+ /* Initialize data to write */
+ for (i = 0; i < 6; i++)
+ for (j = 0; j < 5; j++)
+ data[i][j] = j * 10 + i;
+
+ /* Create file */
+ file_id = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file_id, FAIL, "H5Fcreate");
+
+ /* Create File Dataspace */
+ dataspace = H5Screate_simple(dsetrank, dimsf, NULL);
+ CHECK(dataspace, FAIL, "H5Screate_simple");
+
+ /* Create File Dataset */
+ dset_id =
+ H5Dcreate2(file_id, "IntArray", H5T_NATIVE_INT, dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset_id, FAIL, "H5Dcreate2");
+
+ /* Write File Dataset */
+ error = H5Dwrite(dset_id, H5T_NATIVE_INT, dataspace, dataspace, H5P_DEFAULT, data);
+ CHECK(error, FAIL, "H5Dwrite");
+
+ /* Close things */
+ error = H5Sclose(dataspace);
+ CHECK(error, FAIL, "H5Sclose");
+ error = H5Dclose(dset_id);
+ CHECK(error, FAIL, "H5Dclose");
+ error = H5Fclose(file_id);
+ CHECK(error, FAIL, "H5Fclose");
+
+ /* Initialize input buffer */
+ HDmemset(data_out, 0, 7 * 7 * sizeof(int));
+
+ /* Open file */
+ file_id = H5Fopen(FILENAME, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(file_id, FAIL, "H5Fopen");
+
+ /* Open dataset */
+ dset_id = H5Dopen2(file_id, "IntArray", H5P_DEFAULT);
+ CHECK(dset_id, FAIL, "H5Dopen2");
+
+ /* Get the dataspace */
+ dataspace = H5Dget_space(dset_id);
+ CHECK(dataspace, FAIL, "H5Dget_space");
+
+ /* Select the hyperslabs */
+ error = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(error, FAIL, "H5Sselect_hyperslab");
+ tmp_space = H5Scombine_hyperslab(dataspace, H5S_SELECT_OR, start2, stride, count2, block);
+ CHECK(tmp_space, FAIL, "H5Scombine_hyperslab");
+
+ /* Copy the file dataspace and select hyperslab */
+ tmp2_space = H5Scopy(dataspace);
+ CHECK(tmp2_space, FAIL, "H5Scopy");
+ error = H5Sselect_hyperslab(tmp2_space, H5S_SELECT_SET, start3, stride, count3, block);
+ CHECK(error, FAIL, "H5Sselect_hyperslab");
+
+ /* Combine the copied dataspace with the temporary dataspace */
+ error = H5Smodify_select(tmp_space, H5S_SELECT_OR, tmp2_space);
+ CHECK(error, FAIL, "H5Smodify_select");
+
+ /* Create Memory Dataspace */
+ memspace = H5Screate_simple(memrank, dimsm, NULL);
+ CHECK(memspace, FAIL, "H5Screate_simple");
+
+ /* Select hyperslab in memory */
+ error = H5Sselect_hyperslab(memspace, H5S_SELECT_SET, start_out, stride, count_out, block);
+ CHECK(error, FAIL, "H5Sselect_hyperslab");
+
+ /* Read File Dataset */
+ error = H5Dread(dset_id, H5T_NATIVE_INT, memspace, tmp_space, H5P_DEFAULT, data_out);
+ CHECK(error, FAIL, "H5Dread");
+
+ /* Verify input data */
+ for (i = 0; i < 8; i++) {
+ if (data[input_loc[i][0]][input_loc[i][1]] != data_out[output_loc[i][0]][output_loc[i][1]]) {
+ HDprintf("input data #%d is wrong!\n", i);
+ HDprintf("input_loc=[%d][%d]\n", input_loc[i][0], input_loc[i][1]);
+ HDprintf("output_loc=[%d][%d]\n", output_loc[i][0], output_loc[i][1]);
+ HDprintf("data=%d\n", data[input_loc[i][0]][input_loc[i][1]]);
+ TestErrPrintf("data_out=%d\n", data_out[output_loc[i][0]][output_loc[i][1]]);
+ } /* end if */
+ } /* end for */
+
+ /* Close things */
+ error = H5Sclose(tmp2_space);
+ CHECK(error, FAIL, "H5Sclose");
+ error = H5Sclose(tmp_space);
+ CHECK(error, FAIL, "H5Sclose");
+ error = H5Sclose(dataspace);
+ CHECK(error, FAIL, "H5Sclose");
+ error = H5Sclose(memspace);
+ CHECK(error, FAIL, "H5Sclose");
+ error = H5Dclose(dset_id);
+ CHECK(error, FAIL, "H5Dclose");
+ error = H5Fclose(file_id);
+ CHECK(error, FAIL, "H5Fclose");
+}
+
+/****************************************************************
+**
+** test_select_hyper_union_3d(): Test basic H5S (dataspace) selection code.
+** Tests unions of hyperslabs in 3-D (Uses H5Scombine_hyperslab
+** and H5Scombine_select instead of H5Sselect_hyperslab)
+**
+****************************************************************/
+static void
+test_select_hyper_union_3d(void)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1, sid2; /* Dataspace ID */
+ hid_t tmp_space; /* Temporary Dataspace ID */
+ hid_t tmp2_space; /* Another temporary Dataspace ID */
+ hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
+ hsize_t dims2[] = {SPACE4_DIM1, SPACE4_DIM2, SPACE4_DIM3};
+ hsize_t dims3[] = {SPACE3_DIM1, SPACE3_DIM2};
+ hsize_t start[SPACE1_RANK]; /* Starting location of hyperslab */
+ hsize_t stride[SPACE1_RANK]; /* Stride of hyperslab */
+ hsize_t count[SPACE1_RANK]; /* Element count of hyperslab */
+ hsize_t block[SPACE1_RANK]; /* Block size of hyperslab */
+ struct row_list {
+ size_t z;
+ size_t y;
+ size_t x;
+ size_t l;
+ } rows[] = {
+ /* Array of x,y,z coordinates & length for each row written from memory */
+ {0, 0, 0, 6}, /* 1st face of 3-D object */
+ {0, 1, 0, 6}, {0, 2, 0, 6}, {0, 3, 0, 6}, {0, 4, 0, 6}, {1, 0, 0, 6}, /* 2nd face of 3-D object */
+ {1, 1, 0, 6}, {1, 2, 0, 6}, {1, 3, 0, 6}, {1, 4, 0, 6}, {2, 0, 0, 6}, /* 3rd face of 3-D object */
+ {2, 1, 0, 10}, {2, 2, 0, 10}, {2, 3, 0, 10}, {2, 4, 0, 10}, {2, 5, 2, 8},
+ {2, 6, 2, 8}, {3, 0, 0, 6}, /* 4th face of 3-D object */
+ {3, 1, 0, 10}, {3, 2, 0, 10}, {3, 3, 0, 10}, {3, 4, 0, 10}, {3, 5, 2, 8},
+ {3, 6, 2, 8}, {4, 0, 0, 6}, /* 5th face of 3-D object */
+ {4, 1, 0, 10}, {4, 2, 0, 10}, {4, 3, 0, 10}, {4, 4, 0, 10}, {4, 5, 2, 8},
+ {4, 6, 2, 8}, {5, 1, 2, 8}, /* 6th face of 3-D object */
+ {5, 2, 2, 8}, {5, 3, 2, 8}, {5, 4, 2, 8}, {5, 5, 2, 8}, {5, 6, 2, 8},
+ {6, 1, 2, 8}, /* 7th face of 3-D object */
+ {6, 2, 2, 8}, {6, 3, 2, 8}, {6, 4, 2, 8}, {6, 5, 2, 8}, {6, 6, 2, 8},
+ {7, 1, 2, 8}, /* 8th face of 3-D object */
+ {7, 2, 2, 8}, {7, 3, 2, 8}, {7, 4, 2, 8}, {7, 5, 2, 8}, {7, 6, 2, 8}};
+ uint8_t *wbuf, /* buffer to write to disk */
+ *rbuf, /* buffer read from disk */
+ *tbuf, /* temporary buffer pointer */
+ *tbuf2; /* temporary buffer pointer */
+ int i, j, k; /* Counters */
+ herr_t ret; /* Generic return value */
+ hsize_t npoints; /* Number of elements in selection */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Hyperslab Selection Functions with unions of 3-D hyperslabs\n"));
+
+ /* Allocate write & read buffers */
+ wbuf = (uint8_t *)HDmalloc(sizeof(uint8_t) * SPACE4_DIM1 * SPACE4_DIM2 * SPACE4_DIM3);
+ CHECK_PTR(wbuf, "HDmalloc");
+ rbuf = (uint8_t *)HDcalloc(sizeof(uint8_t), SPACE3_DIM1 * SPACE3_DIM2);
+ CHECK_PTR(rbuf, "HDcalloc");
+
+ /* Initialize write buffer */
+ for (i = 0, tbuf = wbuf; i < SPACE4_DIM1; i++)
+ for (j = 0; j < SPACE4_DIM2; j++)
+ for (k = 0; k < SPACE4_DIM3; k++)
+ *tbuf++ = (uint8_t)((((i * SPACE4_DIM2) + j) * SPACE4_DIM3) + k);
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Test case of two blocks which overlap corners and must be split */
+ /* Create dataspace for dataset on disk */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create dataspace for writing buffer */
+ sid2 = H5Screate_simple(SPACE4_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select 2x15x13 hyperslab for disk dataset */
+ start[0] = 1;
+ start[1] = 0;
+ start[2] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ stride[2] = 1;
+ count[0] = 2;
+ count[1] = 15;
+ count[2] = 13;
+ block[0] = 1;
+ block[1] = 1;
+ block[2] = 1;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Select 5x5x6 hyperslab for memory dataset */
+ start[0] = 0;
+ start[1] = 0;
+ start[2] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ stride[2] = 1;
+ count[0] = 5;
+ count[1] = 5;
+ count[2] = 6;
+ block[0] = 1;
+ block[1] = 1;
+ block[2] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Union overlapping 15x20 hyperslab for memory dataset (forming a irregularly shaped region) */
+ start[0] = 2;
+ start[1] = 1;
+ start[2] = 2;
+ stride[0] = 1;
+ stride[1] = 1;
+ stride[2] = 1;
+ count[0] = 6;
+ count[1] = 6;
+ count[2] = 8;
+ block[0] = 1;
+ block[1] = 1;
+ block[2] = 1;
+ tmp_space = H5Scombine_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(tmp_space, FAIL, "H5Sselect_hyperslab");
+
+ /* Combine dataspaces and create new dataspace */
+ tmp2_space = H5Scombine_select(sid2, H5S_SELECT_OR, tmp_space);
+ CHECK(tmp2_space, FAIL, "H5Scombin_select");
+
+ npoints = (hsize_t)H5Sget_select_npoints(tmp2_space);
+ VERIFY(npoints, 15 * 26, "H5Sget_select_npoints");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, SPACE1_NAME, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UCHAR, tmp2_space, sid1, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close temporary dataspaces */
+ ret = H5Sclose(tmp_space);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(tmp2_space);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Create dataspace for reading buffer */
+ sid2 = H5Screate_simple(SPACE3_RANK, dims3, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select 15x26 hyperslab for reading memory dataset */
+ start[0] = 0;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 15;
+ count[1] = 26;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read with data written out */
+ for (i = 0, tbuf2 = rbuf; i < (int)(sizeof(rows) / sizeof(struct row_list)); i++) {
+ tbuf = wbuf + (rows[i].z * SPACE4_DIM3 * SPACE4_DIM2) + (rows[i].y * SPACE4_DIM3) + rows[i].x;
+ for (j = 0; j < (int)rows[i].l; j++, tbuf++, tbuf2++) {
+ if (*tbuf != *tbuf2)
+ TestErrPrintf("%d: hyperslab values don't match!, i=%d, j=%d, *tbuf=%d, *tbuf2=%d\n",
+ __LINE__, i, j, (int)*tbuf, (int)*tbuf2);
+ } /* end for */
+ } /* end for */
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+} /* test_select_hyper_union_3d() */
+
+/****************************************************************
+**
+** test_select_hyper_valid_combination(): Tests invalid and valid
+** combinations of selections on dataspace for H5Scombine_select
+** and H5Smodify_select.
+**
+****************************************************************/
+static void
+test_select_hyper_valid_combination(void)
+{
+ hid_t single_pt_sid; /* Dataspace ID with single point selection */
+ hid_t single_hyper_sid; /* Dataspace ID with single block hyperslab selection */
+ hid_t regular_hyper_sid; /* Dataspace ID with regular hyperslab selection */
+ hid_t non_existent_sid = -1; /* A non-existent space id */
+ hid_t tmp_sid; /* Temporary dataspace ID */
+ hsize_t dims2D[] = {SPACE9_DIM1, SPACE9_DIM2};
+ hsize_t dims3D[] = {SPACE4_DIM1, SPACE4_DIM2, SPACE4_DIM3};
+
+ hsize_t coord1[1][SPACE2_RANK]; /* Coordinates for single point selection */
+ hsize_t start[SPACE4_RANK]; /* Hyperslab start */
+ hsize_t stride[SPACE4_RANK]; /* Hyperslab stride */
+ hsize_t count[SPACE4_RANK]; /* Hyperslab block count */
+ hsize_t block[SPACE4_RANK]; /* Hyperslab block size */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(6, ("Testing Selection Combination Validity\n"));
+ HDassert(SPACE9_DIM2 >= POINT1_NPOINTS);
+
+ /* Create dataspace for single point selection */
+ single_pt_sid = H5Screate_simple(SPACE9_RANK, dims2D, NULL);
+ CHECK(single_pt_sid, FAIL, "H5Screate_simple");
+
+ /* Select sequence of ten points for multiple point selection */
+ coord1[0][0] = 2;
+ coord1[0][1] = 2;
+ ret = H5Sselect_elements(single_pt_sid, H5S_SELECT_SET, (size_t)1, (const hsize_t *)coord1);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Create dataspace for single hyperslab selection */
+ single_hyper_sid = H5Screate_simple(SPACE9_RANK, dims2D, NULL);
+ CHECK(single_hyper_sid, FAIL, "H5Screate_simple");
+
+ /* Select 10x10 hyperslab for single hyperslab selection */
+ start[0] = 1;
+ start[1] = 1;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 1;
+ count[1] = 1;
+ block[0] = (SPACE9_DIM1 - 2);
+ block[1] = (SPACE9_DIM2 - 2);
+ ret = H5Sselect_hyperslab(single_hyper_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create dataspace for regular hyperslab selection */
+ regular_hyper_sid = H5Screate_simple(SPACE4_RANK, dims3D, NULL);
+ CHECK(regular_hyper_sid, FAIL, "H5Screate_simple");
+
+ /* Select regular, strided hyperslab selection */
+ start[0] = 2;
+ start[1] = 2;
+ start[2] = 2;
+ stride[0] = 2;
+ stride[1] = 2;
+ stride[2] = 2;
+ count[0] = 5;
+ count[1] = 2;
+ count[2] = 5;
+ block[0] = 1;
+ block[1] = 1;
+ block[2] = 1;
+ ret = H5Sselect_hyperslab(regular_hyper_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Test all the selections created */
+
+ /* Test the invalid combinations between point and hyperslab */
+ H5E_BEGIN_TRY
+ {
+ tmp_sid = H5Scombine_select(single_pt_sid, H5S_SELECT_AND, single_hyper_sid);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_sid, FAIL, "H5Scombine_select");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_sid = H5Smodify_select(single_pt_sid, H5S_SELECT_AND, single_hyper_sid);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_sid, FAIL, "H5Smodify_select");
+
+ /* Test the invalid combination between two hyperslab but of different dimension size */
+ H5E_BEGIN_TRY
+ {
+ tmp_sid = H5Scombine_select(single_hyper_sid, H5S_SELECT_AND, regular_hyper_sid);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_sid, FAIL, "H5Scombine_select");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_sid = H5Smodify_select(single_hyper_sid, H5S_SELECT_AND, regular_hyper_sid);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_sid, FAIL, "H5Smodify_select");
+
+ /* Test invalid operation inputs to the two functions */
+ H5E_BEGIN_TRY
+ {
+ tmp_sid = H5Scombine_select(single_hyper_sid, H5S_SELECT_SET, single_hyper_sid);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_sid, FAIL, "H5Scombine_select");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_sid = H5Smodify_select(single_hyper_sid, H5S_SELECT_SET, single_hyper_sid);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_sid, FAIL, "H5Smodify_select");
+
+ /* Test inputs in case of non-existent space ids */
+ H5E_BEGIN_TRY
+ {
+ tmp_sid = H5Scombine_select(single_hyper_sid, H5S_SELECT_AND, non_existent_sid);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_sid, FAIL, "H5Scombine_select");
+
+ H5E_BEGIN_TRY
+ {
+ tmp_sid = H5Smodify_select(single_hyper_sid, H5S_SELECT_AND, non_existent_sid);
+ }
+ H5E_END_TRY;
+ VERIFY(tmp_sid, FAIL, "H5Smodify_select");
+
+ /* Close dataspaces */
+ ret = H5Sclose(single_pt_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(single_hyper_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(regular_hyper_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_select_hyper_valid_combination() */
+
+/****************************************************************
+**
+** test_select_hyper_and_2d(): Test basic H5S (dataspace) selection code.
+** Tests 'and' of hyperslabs in 2-D
+**
+****************************************************************/
+static void
+test_select_hyper_and_2d(void)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1, sid2; /* Dataspace ID */
+ hsize_t dims1[] = {SPACE2_DIM1, SPACE2_DIM2};
+ hsize_t dims2[] = {SPACE2A_DIM1};
+ hsize_t start[SPACE2_RANK]; /* Starting location of hyperslab */
+ hsize_t stride[SPACE2_RANK]; /* Stride of hyperslab */
+ hsize_t count[SPACE2_RANK]; /* Element count of hyperslab */
+ hsize_t block[SPACE2_RANK]; /* Block size of hyperslab */
+ uint8_t *wbuf, /* buffer to write to disk */
+ *rbuf, /* buffer read from disk */
+ *tbuf, /* temporary buffer pointer */
+ *tbuf2; /* temporary buffer pointer */
+ int i, j; /* Counters */
+ herr_t ret; /* Generic return value */
+ hssize_t npoints; /* Number of elements in selection */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Hyperslab Selection Functions with intersection of 2-D hyperslabs\n"));
+
+ /* Allocate write & read buffers */
+ wbuf = (uint8_t *)HDmalloc(sizeof(uint8_t) * SPACE2_DIM1 * SPACE2_DIM2);
+ CHECK_PTR(wbuf, "HDmalloc");
+ rbuf = (uint8_t *)HDcalloc(sizeof(uint8_t), (size_t)(SPACE2_DIM1 * SPACE2_DIM2));
+ CHECK_PTR(rbuf, "HDcalloc");
+
+ /* Initialize write buffer */
+ for (i = 0, tbuf = wbuf; i < SPACE2_DIM1; i++)
+ for (j = 0; j < SPACE2_DIM2; j++)
+ *tbuf++ = (uint8_t)((i * SPACE2_DIM2) + j);
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset on disk */
+ sid1 = H5Screate_simple(SPACE2_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create dataspace for writing buffer */
+ sid2 = H5Screate_simple(SPACE2A_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select 10x10 hyperslab for disk dataset */
+ start[0] = 0;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 10;
+ count[1] = 10;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Intersect overlapping 10x10 hyperslab */
+ start[0] = 5;
+ start[1] = 5;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 10;
+ count[1] = 10;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_AND, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ npoints = H5Sget_select_npoints(sid1);
+ VERIFY(npoints, 5 * 5, "H5Sget_select_npoints");
+
+ /* Select 25 hyperslab for memory dataset */
+ start[0] = 0;
+ stride[0] = 1;
+ count[0] = 25;
+ block[0] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ npoints = H5Sget_select_npoints(sid2);
+ VERIFY(npoints, 5 * 5, "H5Sget_select_npoints");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, SPACE2_NAME, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Read entire dataset from disk */
+ ret = H5Dread(dataset, H5T_NATIVE_UCHAR, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Initialize write buffer */
+ for (i = 0, tbuf = rbuf, tbuf2 = wbuf; i < SPACE2_DIM1; i++)
+ for (j = 0; j < SPACE2_DIM2; j++, tbuf++) {
+ if ((i >= 5 && i <= 9) && (j >= 5 && j <= 9)) {
+ if (*tbuf != *tbuf2)
+ HDprintf("%d: hyperslab values don't match!, i=%d, j=%d, *tbuf=%d, *tbuf2=%d\n", __LINE__,
+ i, j, (int)*tbuf, (int)*tbuf2);
+ tbuf2++;
+ } /* end if */
+ else {
+ if (*tbuf != 0)
+ HDprintf("%d: hyperslab element has wrong value!, i=%d, j=%d, *tbuf=%d\n", __LINE__, i, j,
+ (int)*tbuf);
+ } /* end else */
+ } /* end for */
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+} /* test_select_hyper_and_2d() */
+
+/****************************************************************
+**
+** test_select_hyper_xor_2d(): Test basic H5S (dataspace) selection code.
+** Tests 'xor' of hyperslabs in 2-D
+**
+****************************************************************/
+static void
+test_select_hyper_xor_2d(void)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1, sid2; /* Dataspace ID */
+ hsize_t dims1[] = {SPACE2_DIM1, SPACE2_DIM2};
+ hsize_t dims2[] = {SPACE2A_DIM1};
+ hsize_t start[SPACE2_RANK]; /* Starting location of hyperslab */
+ hsize_t stride[SPACE2_RANK]; /* Stride of hyperslab */
+ hsize_t count[SPACE2_RANK]; /* Element count of hyperslab */
+ hsize_t block[SPACE2_RANK]; /* Block size of hyperslab */
+ uint8_t *wbuf, /* buffer to write to disk */
+ *rbuf, /* buffer read from disk */
+ *tbuf, /* temporary buffer pointer */
+ *tbuf2; /* temporary buffer pointer */
+ int i, j; /* Counters */
+ herr_t ret; /* Generic return value */
+ hssize_t npoints; /* Number of elements in selection */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Hyperslab Selection Functions with XOR of 2-D hyperslabs\n"));
+
+ /* Allocate write & read buffers */
+ wbuf = (uint8_t *)HDmalloc(sizeof(uint8_t) * SPACE2_DIM1 * SPACE2_DIM2);
+ CHECK_PTR(wbuf, "HDmalloc");
+ rbuf = (uint8_t *)HDcalloc(sizeof(uint8_t), (size_t)(SPACE2_DIM1 * SPACE2_DIM2));
+ CHECK_PTR(rbuf, "HDcalloc");
+
+ /* Initialize write buffer */
+ for (i = 0, tbuf = wbuf; i < SPACE2_DIM1; i++)
+ for (j = 0; j < SPACE2_DIM2; j++)
+ *tbuf++ = (uint8_t)((i * SPACE2_DIM2) + j);
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset on disk */
+ sid1 = H5Screate_simple(SPACE2_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create dataspace for writing buffer */
+ sid2 = H5Screate_simple(SPACE2A_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select 10x10 hyperslab for disk dataset */
+ start[0] = 0;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 10;
+ count[1] = 10;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Intersect overlapping 10x10 hyperslab */
+ start[0] = 5;
+ start[1] = 5;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 10;
+ count[1] = 10;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_XOR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ npoints = H5Sget_select_npoints(sid1);
+ VERIFY(npoints, 150, "H5Sget_select_npoints");
+
+ /* Select 25 hyperslab for memory dataset */
+ start[0] = 0;
+ stride[0] = 1;
+ count[0] = 150;
+ block[0] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ npoints = H5Sget_select_npoints(sid2);
+ VERIFY(npoints, 150, "H5Sget_select_npoints");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, SPACE2_NAME, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Read entire dataset from disk */
+ ret = H5Dread(dataset, H5T_NATIVE_UCHAR, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Initialize write buffer */
+ for (i = 0, tbuf = rbuf, tbuf2 = wbuf; i < SPACE2_DIM1; i++)
+ for (j = 0; j < SPACE2_DIM2; j++, tbuf++) {
+ if (((i >= 0 && i <= 4) && (j >= 0 && j <= 9)) ||
+ ((i >= 5 && i <= 9) && ((j >= 0 && j <= 4) || (j >= 10 && j <= 14))) ||
+ ((i >= 10 && i <= 14) && (j >= 5 && j <= 14))) {
+ if (*tbuf != *tbuf2)
+ HDprintf("%d: hyperslab values don't match!, i=%d, j=%d, *tbuf=%d, *tbuf2=%d\n", __LINE__,
+ i, j, (int)*tbuf, (int)*tbuf2);
+ tbuf2++;
+ } /* end if */
+ else {
+ if (*tbuf != 0)
+ HDprintf("%d: hyperslab element has wrong value!, i=%d, j=%d, *tbuf=%d\n", __LINE__, i, j,
+ (int)*tbuf);
+ } /* end else */
+ } /* end for */
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+} /* test_select_hyper_xor_2d() */
+
+/****************************************************************
+**
+** test_select_hyper_notb_2d(): Test basic H5S (dataspace) selection code.
+** Tests 'notb' of hyperslabs in 2-D
+**
+****************************************************************/
+static void
+test_select_hyper_notb_2d(void)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1, sid2; /* Dataspace ID */
+ hsize_t dims1[] = {SPACE2_DIM1, SPACE2_DIM2};
+ hsize_t dims2[] = {SPACE2A_DIM1};
+ hsize_t start[SPACE2_RANK]; /* Starting location of hyperslab */
+ hsize_t stride[SPACE2_RANK]; /* Stride of hyperslab */
+ hsize_t count[SPACE2_RANK]; /* Element count of hyperslab */
+ hsize_t block[SPACE2_RANK]; /* Block size of hyperslab */
+ uint8_t *wbuf, /* buffer to write to disk */
+ *rbuf, /* buffer read from disk */
+ *tbuf, /* temporary buffer pointer */
+ *tbuf2; /* temporary buffer pointer */
+ int i, j; /* Counters */
+ herr_t ret; /* Generic return value */
+ hssize_t npoints; /* Number of elements in selection */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Hyperslab Selection Functions with NOTB of 2-D hyperslabs\n"));
+
+ /* Allocate write & read buffers */
+ wbuf = (uint8_t *)HDmalloc(sizeof(uint8_t) * SPACE2_DIM1 * SPACE2_DIM2);
+ CHECK_PTR(wbuf, "HDmalloc");
+ rbuf = (uint8_t *)HDcalloc(sizeof(uint8_t), (size_t)(SPACE2_DIM1 * SPACE2_DIM2));
+ CHECK_PTR(rbuf, "HDcalloc");
+
+ /* Initialize write buffer */
+ for (i = 0, tbuf = wbuf; i < SPACE2_DIM1; i++)
+ for (j = 0; j < SPACE2_DIM2; j++)
+ *tbuf++ = (uint8_t)((i * SPACE2_DIM2) + j);
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset on disk */
+ sid1 = H5Screate_simple(SPACE2_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create dataspace for writing buffer */
+ sid2 = H5Screate_simple(SPACE2A_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select 10x10 hyperslab for disk dataset */
+ start[0] = 0;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 10;
+ count[1] = 10;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Intersect overlapping 10x10 hyperslab */
+ start[0] = 5;
+ start[1] = 5;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 10;
+ count[1] = 10;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_NOTB, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ npoints = H5Sget_select_npoints(sid1);
+ VERIFY(npoints, 75, "H5Sget_select_npoints");
+
+ /* Select 75 hyperslab for memory dataset */
+ start[0] = 0;
+ stride[0] = 1;
+ count[0] = 75;
+ block[0] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ npoints = H5Sget_select_npoints(sid2);
+ VERIFY(npoints, 75, "H5Sget_select_npoints");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, SPACE2_NAME, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Read entire dataset from disk */
+ ret = H5Dread(dataset, H5T_NATIVE_UCHAR, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Initialize write buffer */
+ for (i = 0, tbuf = rbuf, tbuf2 = wbuf; i < SPACE2_DIM1; i++)
+ for (j = 0; j < SPACE2_DIM2; j++, tbuf++) {
+ if (((i >= 0 && i <= 4) && (j >= 0 && j <= 9)) || ((i >= 5 && i <= 9) && (j >= 0 && j <= 4))) {
+ if (*tbuf != *tbuf2)
+ HDprintf("%d: hyperslab values don't match!, i=%d, j=%d, *tbuf=%d, *tbuf2=%d\n", __LINE__,
+ i, j, (int)*tbuf, (int)*tbuf2);
+ tbuf2++;
+ } /* end if */
+ else {
+ if (*tbuf != 0)
+ HDprintf("%d: hyperslab element has wrong value!, i=%d, j=%d, *tbuf=%d\n", __LINE__, i, j,
+ (int)*tbuf);
+ } /* end else */
+ } /* end for */
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+} /* test_select_hyper_notb_2d() */
+
+/****************************************************************
+**
+** test_select_hyper_nota_2d(): Test basic H5S (dataspace) selection code.
+** Tests 'nota' of hyperslabs in 2-D
+**
+****************************************************************/
+static void
+test_select_hyper_nota_2d(void)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1, sid2; /* Dataspace ID */
+ hsize_t dims1[] = {SPACE2_DIM1, SPACE2_DIM2};
+ hsize_t dims2[] = {SPACE2A_DIM1};
+ hsize_t start[SPACE2_RANK]; /* Starting location of hyperslab */
+ hsize_t stride[SPACE2_RANK]; /* Stride of hyperslab */
+ hsize_t count[SPACE2_RANK]; /* Element count of hyperslab */
+ hsize_t block[SPACE2_RANK]; /* Block size of hyperslab */
+ uint8_t *wbuf, /* buffer to write to disk */
+ *rbuf, /* buffer read from disk */
+ *tbuf, /* temporary buffer pointer */
+ *tbuf2; /* temporary buffer pointer */
+ int i, j; /* Counters */
+ herr_t ret; /* Generic return value */
+ hssize_t npoints; /* Number of elements in selection */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Hyperslab Selection Functions with NOTA of 2-D hyperslabs\n"));
+
+ /* Allocate write & read buffers */
+ wbuf = (uint8_t *)HDmalloc(sizeof(uint8_t) * SPACE2_DIM1 * SPACE2_DIM2);
+ CHECK_PTR(wbuf, "HDmalloc");
+ rbuf = (uint8_t *)HDcalloc(sizeof(uint8_t), (size_t)(SPACE2_DIM1 * SPACE2_DIM2));
+ CHECK_PTR(rbuf, "HDcalloc");
+
+ /* Initialize write buffer */
+ for (i = 0, tbuf = wbuf; i < SPACE2_DIM1; i++)
+ for (j = 0; j < SPACE2_DIM2; j++)
+ *tbuf++ = (uint8_t)((i * SPACE2_DIM2) + j);
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset on disk */
+ sid1 = H5Screate_simple(SPACE2_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create dataspace for writing buffer */
+ sid2 = H5Screate_simple(SPACE2A_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Select 10x10 hyperslab for disk dataset */
+ start[0] = 0;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 10;
+ count[1] = 10;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Intersect overlapping 10x10 hyperslab */
+ start[0] = 5;
+ start[1] = 5;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 10;
+ count[1] = 10;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_NOTA, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ npoints = H5Sget_select_npoints(sid1);
+ VERIFY(npoints, 75, "H5Sget_select_npoints");
+
+ /* Select 75 hyperslab for memory dataset */
+ start[0] = 0;
+ stride[0] = 1;
+ count[0] = 75;
+ block[0] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ npoints = H5Sget_select_npoints(sid2);
+ VERIFY(npoints, 75, "H5Sget_select_npoints");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, SPACE2_NAME, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write selection to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Read entire dataset from disk */
+ ret = H5Dread(dataset, H5T_NATIVE_UCHAR, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Initialize write buffer */
+ for (i = 0, tbuf = rbuf, tbuf2 = wbuf; i < SPACE2_DIM1; i++)
+ for (j = 0; j < SPACE2_DIM2; j++, tbuf++) {
+ if (((i >= 10 && i <= 14) && (j >= 5 && j <= 14)) ||
+ ((i >= 5 && i <= 9) && (j >= 10 && j <= 14))) {
+ if (*tbuf != *tbuf2)
+ TestErrPrintf("%d: hyperslab values don't match!, i=%d, j=%d, *tbuf=%d, *tbuf2=%d\n",
+ __LINE__, i, j, (int)*tbuf, (int)*tbuf2);
+ tbuf2++;
+ } /* end if */
+ else {
+ if (*tbuf != 0)
+ TestErrPrintf("%d: hyperslab element has wrong value!, i=%d, j=%d, *tbuf=%d\n", __LINE__,
+ i, j, (int)*tbuf);
+ } /* end else */
+ } /* end for */
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+} /* test_select_hyper_nota_2d() */
+
+/****************************************************************
+**
+** test_select_hyper_iter2(): Iterator for checking hyperslab iteration
+**
+****************************************************************/
+static herr_t
+test_select_hyper_iter2(void *_elem, hid_t H5_ATTR_UNUSED type_id, unsigned ndim, const hsize_t *point,
+ void *_operator_data)
+{
+ int *tbuf = (int *)_elem, /* temporary buffer pointer */
+ **tbuf2 = (int **)_operator_data; /* temporary buffer handle */
+ unsigned u; /* Local counting variable */
+
+ if (*tbuf != **tbuf2) {
+ TestErrPrintf("Error in hyperslab iteration!\n");
+ HDprintf("location: { ");
+ for (u = 0; u < ndim; u++) {
+ HDprintf("%2d", (int)point[u]);
+ if (u < (ndim - 1))
+ HDprintf(", ");
+ } /* end for */
+ HDprintf("}\n");
+ HDprintf("*tbuf=%d, **tbuf2=%d\n", *tbuf, **tbuf2);
+ return (-1);
+ } /* end if */
+ else {
+ (*tbuf2)++;
+ return (0);
+ }
+} /* end test_select_hyper_iter2() */
+
+/****************************************************************
+**
+** test_select_hyper_union_random_5d(): Test basic H5S (dataspace) selection code.
+** Tests random unions of 5-D hyperslabs
+**
+****************************************************************/
+static void
+test_select_hyper_union_random_5d(hid_t read_plist)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1, sid2; /* Dataspace ID */
+ hsize_t dims1[] = {SPACE5_DIM1, SPACE5_DIM2, SPACE5_DIM3, SPACE5_DIM4, SPACE5_DIM5};
+ hsize_t dims2[] = {SPACE6_DIM1};
+ hsize_t start[SPACE5_RANK]; /* Starting location of hyperslab */
+ hsize_t count[SPACE5_RANK]; /* Element count of hyperslab */
+ int *wbuf, /* buffer to write to disk */
+ *rbuf, /* buffer read from disk */
+ *tbuf; /* temporary buffer pointer */
+ int i, j, k, l, m; /* Counters */
+ herr_t ret; /* Generic return value */
+ hssize_t npoints, /* Number of elements in file selection */
+ npoints2; /* Number of elements in memory selection */
+ unsigned seed; /* Random number seed for each test */
+ unsigned test_num; /* Count of tests being executed */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Hyperslab Selection Functions with random unions of 5-D hyperslabs\n"));
+
+ /* Allocate write & read buffers */
+ wbuf = (int *)HDmalloc(sizeof(int) * SPACE5_DIM1 * SPACE5_DIM2 * SPACE5_DIM3 * SPACE5_DIM4 * SPACE5_DIM5);
+ CHECK_PTR(wbuf, "HDmalloc");
+ rbuf = (int *)HDcalloc(sizeof(int),
+ (size_t)(SPACE5_DIM1 * SPACE5_DIM2 * SPACE5_DIM3 * SPACE5_DIM4 * SPACE5_DIM5));
+ CHECK_PTR(rbuf, "HDcalloc");
+
+ /* Initialize write buffer */
+ for (i = 0, tbuf = wbuf; i < SPACE5_DIM1; i++)
+ for (j = 0; j < SPACE5_DIM2; j++)
+ for (k = 0; k < SPACE5_DIM3; k++)
+ for (l = 0; l < SPACE5_DIM4; l++)
+ for (m = 0; m < SPACE5_DIM5; m++)
+ *tbuf++ = (int)(((((((i * SPACE5_DIM2) + j) * SPACE5_DIM3) + k) * SPACE5_DIM4) + l) *
+ SPACE5_DIM5) +
+ m;
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset on disk */
+ sid1 = H5Screate_simple(SPACE5_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, SPACE5_NAME, H5T_NATIVE_INT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write entire dataset to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Create dataspace for reading buffer */
+ sid2 = H5Screate_simple(SPACE6_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Get initial random # seed */
+ seed = (unsigned)HDtime(NULL) + (unsigned)HDclock();
+
+ /* Crunch through a bunch of random hyperslab reads from the file dataset */
+ for (test_num = 0; test_num < NRAND_HYPER; test_num++) {
+ /* Save random # seed for later use */
+ /* (Used in case of errors, to regenerate the hyperslab sequence) */
+ seed += (unsigned)HDclock();
+ HDsrandom(seed);
+
+ for (i = 0; i < NHYPERSLABS; i++) {
+ /* Select random hyperslab location & size for selection */
+ for (j = 0; j < SPACE5_RANK; j++) {
+ start[j] = ((hsize_t)HDrandom() % dims1[j]);
+ count[j] = (((hsize_t)HDrandom() % (dims1[j] - start[j])) + 1);
+ } /* end for */
+
+ /* Select hyperslab */
+ ret = H5Sselect_hyperslab(sid1, (i == 0 ? H5S_SELECT_SET : H5S_SELECT_OR), start, NULL, count,
+ NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+ if (ret < 0) {
+ TestErrPrintf("Random hyperslabs for seed %u failed!\n", seed);
+ break;
+ } /* end if */
+ } /* end for */
+
+ /* Get the number of elements selected */
+ npoints = H5Sget_select_npoints(sid1);
+ CHECK(npoints, 0, "H5Sget_select_npoints");
+
+ /* Select linear 1-D hyperslab for memory dataset */
+ start[0] = 0;
+ count[0] = (hsize_t)npoints;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ npoints2 = H5Sget_select_npoints(sid2);
+ VERIFY(npoints, npoints2, "H5Sget_select_npoints");
+
+ /* Read selection from disk */
+ ret = H5Dread(dataset, H5T_NATIVE_INT, sid2, sid1, read_plist, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+ if (ret < 0) {
+ TestErrPrintf("Random hyperslabs for seed %u failed!\n", seed);
+ break;
+ } /* end if */
+
+ /* Compare data read with data written out */
+ tbuf = rbuf;
+ ret = H5Diterate(wbuf, H5T_NATIVE_INT, sid1, test_select_hyper_iter2, &tbuf);
+ if (ret < 0) {
+ TestErrPrintf("Random hyperslabs for seed %u failed!\n", seed);
+ break;
+ } /* end if */
+
+ /* Set the read buffer back to all zeroes */
+ HDmemset(rbuf, 0, (size_t)SPACE6_DIM1);
+ } /* end for */
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+} /* test_select_hyper_union_random_5d() */
+
+/****************************************************************
+**
+** test_select_hyper_chunk(): Test basic H5S (dataspace) selection code.
+** Tests large hyperslab selection in chunked dataset
+**
+****************************************************************/
+static void
+test_select_hyper_chunk(hid_t fapl_plist, hid_t xfer_plist)
+{
+ hsize_t dimsf[3]; /* dataset dimensions */
+ hsize_t chunk_dimsf[3] = {CHUNK_X, CHUNK_Y, CHUNK_Z}; /* chunk sizes */
+ short *data; /* data to write */
+ short *tmpdata; /* data to write */
+
+ /*
+ * Data and output buffer initialization.
+ */
+ hid_t file, dataset; /* handles */
+ hid_t dataspace;
+ hid_t memspace;
+ hid_t plist;
+ hsize_t dimsm[3]; /* memory space dimensions */
+ hsize_t dims_out[3]; /* dataset dimensions */
+ herr_t status;
+
+ short *data_out; /* output buffer */
+ short *tmpdata_out; /* output buffer */
+
+ hsize_t count[3]; /* size of the hyperslab in the file */
+ hsize_t offset[3]; /* hyperslab offset in the file */
+ hsize_t count_out[3]; /* size of the hyperslab in memory */
+ hsize_t offset_out[3]; /* hyperslab offset in memory */
+ int i, j, k, status_n, rank;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Hyperslab I/O on Large Chunks\n"));
+
+ /* Allocate the transfer buffers */
+ data = (short *)HDmalloc(sizeof(short) * X * Y * Z);
+ CHECK_PTR(data, "HDmalloc");
+ data_out = (short *)HDcalloc((size_t)(NX * NY * NZ), sizeof(short));
+ CHECK_PTR(data_out, "HDcalloc");
+
+ /*
+ * Data buffer initialization.
+ */
+ tmpdata = data;
+ for (j = 0; j < X; j++)
+ for (i = 0; i < Y; i++)
+ for (k = 0; k < Z; k++)
+ *tmpdata++ = (short)((k + 1) % 256);
+
+ /*
+ * Create a new file using H5F_ACC_TRUNC access,
+ * the default file creation properties, and the default file
+ * access properties.
+ */
+ file = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_plist);
+ CHECK(file, FAIL, "H5Fcreate");
+
+ /*
+ * Describe the size of the array and create the dataspace for fixed
+ * size dataset.
+ */
+ dimsf[0] = X;
+ dimsf[1] = Y;
+ dimsf[2] = Z;
+ dataspace = H5Screate_simple(RANK_F, dimsf, NULL);
+ CHECK(dataspace, FAIL, "H5Screate_simple");
+
+ /*
+ * Create a new dataset within the file using defined dataspace and
+ * chunking properties.
+ */
+ plist = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(plist, FAIL, "H5Pcreate");
+ status = H5Pset_chunk(plist, RANK_F, chunk_dimsf);
+ CHECK(status, FAIL, "H5Pset_chunk");
+ dataset = H5Dcreate2(file, DATASETNAME, H5T_NATIVE_UCHAR, dataspace, H5P_DEFAULT, plist, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /*
+ * Define hyperslab in the dataset.
+ */
+ offset[0] = 0;
+ offset[1] = 0;
+ offset[2] = 0;
+ count[0] = NX_SUB;
+ count[1] = NY_SUB;
+ count[2] = NZ_SUB;
+ status = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, offset, NULL, count, NULL);
+ CHECK(status, FAIL, "H5Sselect_hyperslab");
+
+ /*
+ * Define the memory dataspace.
+ */
+ dimsm[0] = NX;
+ dimsm[1] = NY;
+ dimsm[2] = NZ;
+ memspace = H5Screate_simple(RANK_M, dimsm, NULL);
+ CHECK(memspace, FAIL, "H5Screate_simple");
+
+ /*
+ * Define memory hyperslab.
+ */
+ offset_out[0] = 0;
+ offset_out[1] = 0;
+ offset_out[2] = 0;
+ count_out[0] = NX_SUB;
+ count_out[1] = NY_SUB;
+ count_out[2] = NZ_SUB;
+ status = H5Sselect_hyperslab(memspace, H5S_SELECT_SET, offset_out, NULL, count_out, NULL);
+ CHECK(status, FAIL, "H5Sselect_hyperslab");
+
+ /*
+ * Write the data to the dataset using hyperslabs
+ */
+ status = H5Dwrite(dataset, H5T_NATIVE_SHORT, memspace, dataspace, xfer_plist, data);
+ CHECK(status, FAIL, "H5Dwrite");
+
+ /*
+ * Close/release resources.
+ */
+ status = H5Pclose(plist);
+ CHECK(status, FAIL, "H5Pclose");
+ status = H5Sclose(dataspace);
+ CHECK(status, FAIL, "H5Sclose");
+ status = H5Sclose(memspace);
+ CHECK(status, FAIL, "H5Sclose");
+ status = H5Dclose(dataset);
+ CHECK(status, FAIL, "H5Dclose");
+ status = H5Fclose(file);
+ CHECK(status, FAIL, "H5Fclose");
+
+ /*************************************************************
+
+ This reads the hyperslab from the test.h5 file just
+ created, into a 3-dimensional plane of the 3-dimensional
+ array.
+
+ ************************************************************/
+
+ /*
+ * Open the file and the dataset.
+ */
+ file = H5Fopen(FILENAME, H5F_ACC_RDONLY, fapl_plist);
+ CHECK(file, FAIL, "H5Fopen");
+ dataset = H5Dopen2(file, DATASETNAME, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ dataspace = H5Dget_space(dataset); /* dataspace handle */
+ CHECK(dataspace, FAIL, "H5Dget_space");
+ rank = H5Sget_simple_extent_ndims(dataspace);
+ VERIFY(rank, 3, "H5Sget_simple_extent_ndims");
+ status_n = H5Sget_simple_extent_dims(dataspace, dims_out, NULL);
+ CHECK(status_n, FAIL, "H5Sget_simple_extent_dims");
+ VERIFY(dims_out[0], dimsf[0], "Dataset dimensions");
+ VERIFY(dims_out[1], dimsf[1], "Dataset dimensions");
+ VERIFY(dims_out[2], dimsf[2], "Dataset dimensions");
+
+ /*
+ * Define hyperslab in the dataset.
+ */
+ offset[0] = 0;
+ offset[1] = 0;
+ offset[2] = 0;
+ count[0] = NX_SUB;
+ count[1] = NY_SUB;
+ count[2] = NZ_SUB;
+ status = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, offset, NULL, count, NULL);
+ CHECK(status, FAIL, "H5Sselect_hyperslab");
+
+ /*
+ * Define the memory dataspace.
+ */
+ dimsm[0] = NX;
+ dimsm[1] = NY;
+ dimsm[2] = NZ;
+ memspace = H5Screate_simple(RANK_M, dimsm, NULL);
+ CHECK(memspace, FAIL, "H5Screate_simple");
+
+ /*
+ * Define memory hyperslab.
+ */
+ offset_out[0] = 0;
+ offset_out[1] = 0;
+ offset_out[2] = 0;
+ count_out[0] = NX_SUB;
+ count_out[1] = NY_SUB;
+ count_out[2] = NZ_SUB;
+ status = H5Sselect_hyperslab(memspace, H5S_SELECT_SET, offset_out, NULL, count_out, NULL);
+ CHECK(status, FAIL, "H5Sselect_hyperslab");
+
+ /*
+ * Read data from hyperslab in the file into the hyperslab in
+ * memory and display.
+ */
+ status = H5Dread(dataset, H5T_NATIVE_SHORT, memspace, dataspace, xfer_plist, data_out);
+ CHECK(status, FAIL, "H5Dread");
+
+ /* Compare data written with data read in */
+ tmpdata = data;
+ tmpdata_out = data_out;
+ for (j = 0; j < X; j++)
+ for (i = 0; i < Y; i++)
+ for (k = 0; k < Z; k++, tmpdata++, tmpdata_out++) {
+ if (*tmpdata != *tmpdata_out)
+ TestErrPrintf("Line %d: Error! j=%d, i=%d, k=%d, *tmpdata=%x, *tmpdata_out=%x\n",
+ __LINE__, j, i, k, (unsigned)*tmpdata, (unsigned)*tmpdata_out);
+ } /* end for */
+
+ /*
+ * Close and release resources.
+ */
+ status = H5Dclose(dataset);
+ CHECK(status, FAIL, "H5Dclose");
+ status = H5Sclose(dataspace);
+ CHECK(status, FAIL, "H5Sclose");
+ status = H5Sclose(memspace);
+ CHECK(status, FAIL, "H5Sclose");
+ status = H5Fclose(file);
+ CHECK(status, FAIL, "H5Fclose");
+ HDfree(data);
+ HDfree(data_out);
+} /* test_select_hyper_chunk() */
+
+/****************************************************************
+**
+** test_select_point_chunk(): Test basic H5S (dataspace) selection code.
+** Tests combinations of hyperslab and point selections on
+** chunked datasets.
+**
+****************************************************************/
+static void
+test_select_point_chunk(void)
+{
+ hsize_t dimsf[SPACE7_RANK]; /* dataset dimensions */
+ hsize_t chunk_dimsf[SPACE7_RANK] = {SPACE7_CHUNK_DIM1, SPACE7_CHUNK_DIM2}; /* chunk sizes */
+ unsigned *data; /* data to write */
+ unsigned *tmpdata; /* data to write */
+
+ /*
+ * Data and output buffer initialization.
+ */
+ hid_t file, dataset; /* handles */
+ hid_t dataspace;
+ hid_t pnt1_space; /* Dataspace to hold 1st point selection */
+ hid_t pnt2_space; /* Dataspace to hold 2nd point selection */
+ hid_t hyp1_space; /* Dataspace to hold 1st hyperslab selection */
+ hid_t hyp2_space; /* Dataspace to hold 2nd hyperslab selection */
+ hid_t dcpl;
+ herr_t ret; /* Generic return value */
+
+ unsigned *data_out; /* output buffer */
+
+ hsize_t start[SPACE7_RANK]; /* hyperslab offset */
+ hsize_t count[SPACE7_RANK]; /* size of the hyperslab */
+ hsize_t points[SPACE7_NPOINTS][SPACE7_RANK]; /* points for selection */
+ unsigned i, j; /* Local index variables */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Point Selections on Chunked Datasets\n"));
+
+ /* Allocate the transfer buffers */
+ data = (unsigned *)HDmalloc(sizeof(unsigned) * SPACE7_DIM1 * SPACE7_DIM2);
+ CHECK_PTR(data, "HDmalloc");
+ data_out = (unsigned *)HDcalloc((size_t)(SPACE7_DIM1 * SPACE7_DIM2), sizeof(unsigned));
+ CHECK_PTR(data_out, "HDcalloc");
+
+ /*
+ * Data buffer initialization.
+ */
+ tmpdata = data;
+ for (i = 0; i < SPACE7_DIM1; i++)
+ for (j = 0; j < SPACE7_DIM1; j++)
+ *tmpdata++ = ((i * SPACE7_DIM2) + j) % 256;
+
+ /*
+ * Create a new file using H5F_ACC_TRUNC access,
+ * the default file creation properties and file
+ * access properties.
+ */
+ file = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file, FAIL, "H5Fcreate");
+
+ /* Create file dataspace */
+ dimsf[0] = SPACE7_DIM1;
+ dimsf[1] = SPACE7_DIM2;
+ dataspace = H5Screate_simple(SPACE7_RANK, dimsf, NULL);
+ CHECK(dataspace, FAIL, "H5Screate_simple");
+
+ /*
+ * Create a new dataset within the file using defined dataspace and
+ * chunking properties.
+ */
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+ ret = H5Pset_chunk(dcpl, SPACE7_RANK, chunk_dimsf);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+ dataset = H5Dcreate2(file, DATASETNAME, H5T_NATIVE_UCHAR, dataspace, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Create 1st point selection */
+ pnt1_space = H5Scopy(dataspace);
+ CHECK(pnt1_space, FAIL, "H5Scopy");
+
+ points[0][0] = 3;
+ points[0][1] = 3;
+ points[1][0] = 3;
+ points[1][1] = 8;
+ points[2][0] = 8;
+ points[2][1] = 3;
+ points[3][0] = 8;
+ points[3][1] = 8;
+ points[4][0] = 1; /* In same chunk as point #0, but "earlier" in chunk */
+ points[4][1] = 1;
+ points[5][0] = 1; /* In same chunk as point #1, but "earlier" in chunk */
+ points[5][1] = 6;
+ points[6][0] = 6; /* In same chunk as point #2, but "earlier" in chunk */
+ points[6][1] = 1;
+ points[7][0] = 6; /* In same chunk as point #3, but "earlier" in chunk */
+ points[7][1] = 6;
+ ret = H5Sselect_elements(pnt1_space, H5S_SELECT_SET, (size_t)SPACE7_NPOINTS, (const hsize_t *)points);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Create 1st hyperslab selection */
+ hyp1_space = H5Scopy(dataspace);
+ CHECK(hyp1_space, FAIL, "H5Scopy");
+
+ start[0] = 2;
+ start[1] = 2;
+ count[0] = 4;
+ count[1] = 2;
+ ret = H5Sselect_hyperslab(hyp1_space, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Write out data using 1st point selection for file & hyperslab for memory */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UINT, hyp1_space, pnt1_space, H5P_DEFAULT, data);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Create 2nd point selection */
+ pnt2_space = H5Scopy(dataspace);
+ CHECK(pnt2_space, FAIL, "H5Scopy");
+
+ points[0][0] = 4;
+ points[0][1] = 4;
+ points[1][0] = 4;
+ points[1][1] = 9;
+ points[2][0] = 9;
+ points[2][1] = 4;
+ points[3][0] = 9;
+ points[3][1] = 9;
+ points[4][0] = 2; /* In same chunk as point #0, but "earlier" in chunk */
+ points[4][1] = 2;
+ points[5][0] = 2; /* In same chunk as point #1, but "earlier" in chunk */
+ points[5][1] = 7;
+ points[6][0] = 7; /* In same chunk as point #2, but "earlier" in chunk */
+ points[6][1] = 2;
+ points[7][0] = 7; /* In same chunk as point #3, but "earlier" in chunk */
+ points[7][1] = 7;
+ ret = H5Sselect_elements(pnt2_space, H5S_SELECT_SET, (size_t)SPACE7_NPOINTS, (const hsize_t *)points);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Create 2nd hyperslab selection */
+ hyp2_space = H5Scopy(dataspace);
+ CHECK(hyp2_space, FAIL, "H5Scopy");
+
+ start[0] = 2;
+ start[1] = 4;
+ count[0] = 4;
+ count[1] = 2;
+ ret = H5Sselect_hyperslab(hyp2_space, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Write out data using 2nd hyperslab selection for file & point for memory */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UINT, pnt2_space, hyp2_space, H5P_DEFAULT, data);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close everything (except selections) */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Sclose(dataspace);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Re-open file & dataset */
+ file = H5Fopen(FILENAME, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(file, FAIL, "H5Fopen");
+ dataset = H5Dopen2(file, DATASETNAME, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Read data using 1st point selection for file and hyperslab for memory */
+ ret = H5Dread(dataset, H5T_NATIVE_UINT, hyp1_space, pnt1_space, H5P_DEFAULT, data_out);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Verify data (later) */
+
+ /* Read data using 2nd hyperslab selection for file and point for memory */
+ ret = H5Dread(dataset, H5T_NATIVE_UINT, pnt2_space, hyp2_space, H5P_DEFAULT, data_out);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Verify data (later) */
+
+ /* Close everything (including selections) */
+ ret = H5Sclose(pnt1_space);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(pnt2_space);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(hyp1_space);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(hyp2_space);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ HDfree(data);
+ HDfree(data_out);
+} /* test_select_point_chunk() */
+
+/****************************************************************
+**
+** test_select_sclar_chunk(): Test basic H5S (dataspace) selection code.
+** Tests using a scalar dataspace (in memory) to access chunked datasets.
+**
+****************************************************************/
+static void
+test_select_scalar_chunk(void)
+{
+ hid_t file_id; /* File ID */
+ hid_t dcpl; /* Dataset creation property list */
+ hid_t dsid; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t m_sid; /* Memory dataspace */
+ hsize_t dims[] = {2}; /* Dataset dimensions */
+ hsize_t maxdims[] = {H5S_UNLIMITED}; /* Dataset maximum dimensions */
+ hsize_t offset[] = {0}; /* Hyperslab start */
+ hsize_t count[] = {1}; /* Hyperslab count */
+ unsigned data = 2; /* Data to write */
+ herr_t ret;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Scalar Dataspaces and Chunked Datasets\n"));
+
+ file_id = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file_id, FAIL, "H5Fcreate");
+
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+
+ dims[0] = 1024U;
+ ret = H5Pset_chunk(dcpl, 1, dims);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+
+ /* Create 1-D dataspace */
+ sid = H5Screate_simple(1, dims, maxdims);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ dsid = H5Dcreate2(file_id, "dset", H5T_NATIVE_UINT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dsid, FAIL, "H5Dcreate2");
+
+ /* Select scalar area (offset 0, count 1) */
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, offset, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create scalar memory dataspace */
+ m_sid = H5Screate(H5S_SCALAR);
+ CHECK(m_sid, FAIL, "H5Screate");
+
+ /* Write out data using scalar dataspace for memory dataspace */
+ ret = H5Dwrite(dsid, H5T_NATIVE_UINT, m_sid, sid, H5P_DEFAULT, &data);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close resources */
+ ret = H5Sclose(m_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Dclose(dsid);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Fclose(file_id);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_select_scalar_chunk() */
+
+/****************************************************************
+**
+** test_select_valid(): Test basic H5S (dataspace) selection code.
+** Tests selection validity
+**
+****************************************************************/
+static void
+test_select_valid(void)
+{
+ herr_t error;
+ htri_t valid;
+ hid_t main_space, sub_space;
+ hsize_t safe_start[2] = {1, 1};
+ hsize_t safe_count[2] = {1, 1};
+ hsize_t start[2];
+ hsize_t dims[2], maxdims[2], size[2], count[2];
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Selection Validity\n"));
+
+ MESSAGE(8, ("Case 1 : sub_space is not a valid dataspace\n"));
+ dims[0] = dims[1] = H5S_UNLIMITED;
+
+ H5E_BEGIN_TRY
+ {
+ sub_space = H5Screate_simple(2, dims, NULL);
+ }
+ H5E_END_TRY;
+ VERIFY(sub_space, FAIL, "H5Screate_simple");
+
+ H5E_BEGIN_TRY
+ {
+ valid = H5Sselect_valid(sub_space);
+ }
+ H5E_END_TRY;
+ VERIFY(valid, FAIL, "H5Sselect_valid");
+
+ /* Set arrays and dataspace for the rest of the cases */
+ count[0] = count[1] = 1;
+ dims[0] = dims[1] = maxdims[0] = maxdims[1] = 10;
+
+ main_space = H5Screate_simple(2, dims, maxdims);
+ CHECK(main_space, FAIL, "H5Screate_simple");
+
+ MESSAGE(8, ("Case 2 : sub_space is a valid but closed dataspace\n"));
+ sub_space = H5Scopy(main_space);
+ CHECK(sub_space, FAIL, "H5Scopy");
+
+ error = H5Sclose(sub_space);
+ CHECK(error, FAIL, "H5Sclose");
+
+ H5E_BEGIN_TRY
+ {
+ valid = H5Sselect_valid(sub_space);
+ }
+ H5E_END_TRY;
+ VERIFY(valid, FAIL, "H5Sselect_valid");
+
+ MESSAGE(8, ("Case 3 : in the dimensions\nTry offset (4,4) and size(6,6), the original space is of size "
+ "(10,10)\n"));
+ start[0] = start[1] = 4;
+ size[0] = size[1] = 6;
+
+ sub_space = H5Scopy(main_space);
+ CHECK(sub_space, FAIL, "H5Scopy");
+
+ error = H5Sselect_hyperslab(sub_space, H5S_SELECT_SET, start, size, count, size);
+ CHECK(error, FAIL, "H5Sselect_hyperslab");
+
+ valid = H5Sselect_valid(sub_space);
+ VERIFY(valid, TRUE, "H5Sselect_valid");
+
+ error = H5Sselect_hyperslab(sub_space, H5S_SELECT_OR, safe_start, NULL, safe_count, NULL);
+ CHECK(error, FAIL, "H5Sselect_hyperslab");
+
+ valid = H5Sselect_valid(sub_space);
+ VERIFY(valid, TRUE, "H5Sselect_valid");
+
+ error = H5Sclose(sub_space);
+ CHECK(error, FAIL, "H5Sclose");
+
+ MESSAGE(8, ("Case 4 : exceed dimensions by 1\nTry offset (5,5) and size(6,6), the original space is of "
+ "size (10,10)\n"));
+ start[0] = start[1] = 5;
+ size[0] = size[1] = 6;
+
+ sub_space = H5Scopy(main_space);
+ CHECK(sub_space, FAIL, "H5Scopy");
+
+ error = H5Sselect_hyperslab(sub_space, H5S_SELECT_SET, start, size, count, size);
+ CHECK(error, FAIL, "H5Sselect_hyperslab");
+
+ valid = H5Sselect_valid(sub_space);
+ VERIFY(valid, FALSE, "H5Sselect_valid");
+
+ error = H5Sselect_hyperslab(sub_space, H5S_SELECT_OR, safe_start, NULL, safe_count, NULL);
+ CHECK(error, FAIL, "H5Sselect_hyperslab");
+
+ valid = H5Sselect_valid(sub_space);
+ VERIFY(valid, FALSE, "H5Sselect_valid");
+
+ error = H5Sclose(sub_space);
+ CHECK(error, FAIL, "H5Sclose");
+
+ MESSAGE(8, ("Case 5 : exceed dimensions by 2\nTry offset (6,6) and size(6,6), the original space is of "
+ "size (10,10)\n"));
+ start[0] = start[1] = 6;
+ size[0] = size[1] = 6;
+
+ sub_space = H5Scopy(main_space);
+ CHECK(sub_space, FAIL, "H5Scopy");
+
+ error = H5Sselect_hyperslab(sub_space, H5S_SELECT_SET, start, size, count, size);
+ CHECK(error, FAIL, "H5Sselect_hyperslab");
+
+ valid = H5Sselect_valid(sub_space);
+ VERIFY(valid, FALSE, "H5Sselect_valid");
+
+ error = H5Sselect_hyperslab(sub_space, H5S_SELECT_OR, safe_start, NULL, safe_count, NULL);
+ CHECK(error, FAIL, "H5Sselect_hyperslab");
+
+ valid = H5Sselect_valid(sub_space);
+ VERIFY(valid, FALSE, "H5Sselect_valid");
+
+ error = H5Sclose(sub_space);
+ CHECK(error, FAIL, "H5Sclose");
+ error = H5Sclose(main_space);
+ CHECK(error, FAIL, "H5Sclose");
+} /* test_select_valid() */
+
+/****************************************************************
+**
+** test_select_combine(): Test basic H5S (dataspace) selection code.
+** Tests combining "all" and "none" selections with hyperslab
+** operations.
+**
+****************************************************************/
+static void
+test_select_combine(void)
+{
+ hid_t base_id; /* Base dataspace for test */
+ hid_t all_id; /* Dataspace for "all" selection */
+ hid_t none_id; /* Dataspace for "none" selection */
+ hid_t space1; /* Temporary dataspace #1 */
+ hsize_t start[SPACE7_RANK]; /* Hyperslab start */
+ hsize_t stride[SPACE7_RANK]; /* Hyperslab stride */
+ hsize_t count[SPACE7_RANK]; /* Hyperslab count */
+ hsize_t block[SPACE7_RANK]; /* Hyperslab block */
+ hsize_t dims[SPACE7_RANK] = {SPACE7_DIM1, SPACE7_DIM2}; /* Dimensions of dataspace */
+ H5S_sel_type sel_type; /* Selection type */
+ hssize_t nblocks; /* Number of hyperslab blocks */
+ hsize_t blocks[16][2][SPACE7_RANK]; /* List of blocks */
+ herr_t error;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Selection Combinations\n"));
+
+ /* Create dataspace for dataset on disk */
+ base_id = H5Screate_simple(SPACE7_RANK, dims, NULL);
+ CHECK(base_id, FAIL, "H5Screate_simple");
+
+ /* Copy base dataspace and set selection to "all" */
+ all_id = H5Scopy(base_id);
+ CHECK(all_id, FAIL, "H5Scopy");
+ error = H5Sselect_all(all_id);
+ CHECK(error, FAIL, "H5Sselect_all");
+ sel_type = H5Sget_select_type(all_id);
+ VERIFY(sel_type, H5S_SEL_ALL, "H5Sget_select_type");
+
+ /* Copy base dataspace and set selection to "none" */
+ none_id = H5Scopy(base_id);
+ CHECK(none_id, FAIL, "H5Scopy");
+ error = H5Sselect_none(none_id);
+ CHECK(error, FAIL, "H5Sselect_none");
+ sel_type = H5Sget_select_type(none_id);
+ VERIFY(sel_type, H5S_SEL_NONE, "H5Sget_select_type");
+
+ /* Copy "all" selection & space */
+ space1 = H5Scopy(all_id);
+ CHECK(space1, FAIL, "H5Scopy");
+
+ /* 'OR' "all" selection with another hyperslab */
+ start[0] = start[1] = 0;
+ stride[0] = stride[1] = 1;
+ count[0] = count[1] = 1;
+ block[0] = block[1] = 5;
+ error = H5Sselect_hyperslab(space1, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(error, FAIL, "H5Sselect_hyperslab");
+
+ /* Verify that it's still "all" selection */
+ sel_type = H5Sget_select_type(space1);
+ VERIFY(sel_type, H5S_SEL_ALL, "H5Sget_select_type");
+
+ /* Close temporary dataspace */
+ error = H5Sclose(space1);
+ CHECK(error, FAIL, "H5Sclose");
+
+ /* Copy "all" selection & space */
+ space1 = H5Scopy(all_id);
+ CHECK(space1, FAIL, "H5Scopy");
+
+ /* 'AND' "all" selection with another hyperslab */
+ start[0] = start[1] = 0;
+ stride[0] = stride[1] = 1;
+ count[0] = count[1] = 1;
+ block[0] = block[1] = 5;
+ error = H5Sselect_hyperslab(space1, H5S_SELECT_AND, start, stride, count, block);
+ CHECK(error, FAIL, "H5Sselect_hyperslab");
+
+ /* Verify that the new selection is the same at the original block */
+ sel_type = H5Sget_select_type(space1);
+ VERIFY(sel_type, H5S_SEL_HYPERSLABS, "H5Sget_select_type");
+
+ /* Verify that there is only one block */
+ nblocks = H5Sget_select_hyper_nblocks(space1);
+ VERIFY(nblocks, 1, "H5Sget_select_hyper_nblocks");
+
+ /* Retrieve the block defined */
+ HDmemset(blocks, -1, sizeof(blocks)); /* Reset block list */
+ error = H5Sget_select_hyper_blocklist(space1, (hsize_t)0, (hsize_t)nblocks, (hsize_t *)blocks);
+ CHECK(error, FAIL, "H5Sget_select_hyper_blocklist");
+
+ /* Verify that the correct block is defined */
+ VERIFY(blocks[0][0][0], (hsize_t)start[0], "H5Sget_select_hyper_blocklist");
+ VERIFY(blocks[0][0][1], (hsize_t)start[1], "H5Sget_select_hyper_blocklist");
+ VERIFY(blocks[0][1][0], (block[0] - 1), "H5Sget_select_hyper_blocklist");
+ VERIFY(blocks[0][1][1], (block[1] - 1), "H5Sget_select_hyper_blocklist");
+
+ /* Close temporary dataspace */
+ error = H5Sclose(space1);
+ CHECK(error, FAIL, "H5Sclose");
+
+ /* Copy "all" selection & space */
+ space1 = H5Scopy(all_id);
+ CHECK(space1, FAIL, "H5Scopy");
+
+ /* 'XOR' "all" selection with another hyperslab */
+ start[0] = start[1] = 0;
+ stride[0] = stride[1] = 1;
+ count[0] = count[1] = 1;
+ block[0] = block[1] = 5;
+ error = H5Sselect_hyperslab(space1, H5S_SELECT_XOR, start, stride, count, block);
+ CHECK(error, FAIL, "H5Sselect_hyperslab");
+
+ /* Verify that the new selection is an inversion of the original block */
+ sel_type = H5Sget_select_type(space1);
+ VERIFY(sel_type, H5S_SEL_HYPERSLABS, "H5Sget_select_type");
+
+ /* Verify that there are two blocks */
+ nblocks = H5Sget_select_hyper_nblocks(space1);
+ VERIFY(nblocks, 2, "H5Sget_select_hyper_nblocks");
+
+ /* Retrieve the block defined */
+ HDmemset(blocks, -1, sizeof(blocks)); /* Reset block list */
+ error = H5Sget_select_hyper_blocklist(space1, (hsize_t)0, (hsize_t)nblocks, (hsize_t *)blocks);
+ CHECK(error, FAIL, "H5Sget_select_hyper_blocklist");
+
+ /* Verify that the correct block is defined */
+ VERIFY(blocks[0][0][0], 0, "H5Sget_select_hyper_blocklist");
+ VERIFY(blocks[0][0][1], 5, "H5Sget_select_hyper_blocklist");
+ VERIFY(blocks[0][1][0], 4, "H5Sget_select_hyper_blocklist");
+ VERIFY(blocks[0][1][1], 9, "H5Sget_select_hyper_blocklist");
+ VERIFY(blocks[1][0][0], 5, "H5Sget_select_hyper_blocklist");
+ VERIFY(blocks[1][0][1], 0, "H5Sget_select_hyper_blocklist");
+ VERIFY(blocks[1][1][0], 9, "H5Sget_select_hyper_blocklist");
+ VERIFY(blocks[1][1][1], 9, "H5Sget_select_hyper_blocklist");
+
+ /* Close temporary dataspace */
+ error = H5Sclose(space1);
+ CHECK(error, FAIL, "H5Sclose");
+
+ /* Copy "all" selection & space */
+ space1 = H5Scopy(all_id);
+ CHECK(space1, FAIL, "H5Scopy");
+
+ /* 'NOTB' "all" selection with another hyperslab */
+ start[0] = start[1] = 0;
+ stride[0] = stride[1] = 1;
+ count[0] = count[1] = 1;
+ block[0] = block[1] = 5;
+ error = H5Sselect_hyperslab(space1, H5S_SELECT_NOTB, start, stride, count, block);
+ CHECK(error, FAIL, "H5Sselect_hyperslab");
+
+ /* Verify that the new selection is an inversion of the original block */
+ sel_type = H5Sget_select_type(space1);
+ VERIFY(sel_type, H5S_SEL_HYPERSLABS, "H5Sget_select_type");
+
+ /* Verify that there are two blocks */
+ nblocks = H5Sget_select_hyper_nblocks(space1);
+ VERIFY(nblocks, 2, "H5Sget_select_hyper_nblocks");
+
+ /* Retrieve the block defined */
+ HDmemset(blocks, -1, sizeof(blocks)); /* Reset block list */
+ error = H5Sget_select_hyper_blocklist(space1, (hsize_t)0, (hsize_t)nblocks, (hsize_t *)blocks);
+ CHECK(error, FAIL, "H5Sget_select_hyper_blocklist");
+
+ /* Verify that the correct block is defined */
+ VERIFY(blocks[0][0][0], 0, "H5Sget_select_hyper_blocklist");
+ VERIFY(blocks[0][0][1], 5, "H5Sget_select_hyper_blocklist");
+ VERIFY(blocks[0][1][0], 4, "H5Sget_select_hyper_blocklist");
+ VERIFY(blocks[0][1][1], 9, "H5Sget_select_hyper_blocklist");
+ VERIFY(blocks[1][0][0], 5, "H5Sget_select_hyper_blocklist");
+ VERIFY(blocks[1][0][1], 0, "H5Sget_select_hyper_blocklist");
+ VERIFY(blocks[1][1][0], 9, "H5Sget_select_hyper_blocklist");
+ VERIFY(blocks[1][1][1], 9, "H5Sget_select_hyper_blocklist");
+
+ /* Close temporary dataspace */
+ error = H5Sclose(space1);
+ CHECK(error, FAIL, "H5Sclose");
+
+ /* Copy "all" selection & space */
+ space1 = H5Scopy(all_id);
+ CHECK(space1, FAIL, "H5Scopy");
+
+ /* 'NOTA' "all" selection with another hyperslab */
+ start[0] = start[1] = 0;
+ stride[0] = stride[1] = 1;
+ count[0] = count[1] = 1;
+ block[0] = block[1] = 5;
+ error = H5Sselect_hyperslab(space1, H5S_SELECT_NOTA, start, stride, count, block);
+ CHECK(error, FAIL, "H5Sselect_hyperslab");
+
+ /* Verify that the new selection is the "none" selection */
+ sel_type = H5Sget_select_type(space1);
+ VERIFY(sel_type, H5S_SEL_NONE, "H5Sget_select_type");
+
+ /* Close temporary dataspace */
+ error = H5Sclose(space1);
+ CHECK(error, FAIL, "H5Sclose");
+
+ /* Copy "none" selection & space */
+ space1 = H5Scopy(none_id);
+ CHECK(space1, FAIL, "H5Scopy");
+
+ /* 'OR' "none" selection with another hyperslab */
+ start[0] = start[1] = 0;
+ stride[0] = stride[1] = 1;
+ count[0] = count[1] = 1;
+ block[0] = block[1] = 5;
+ error = H5Sselect_hyperslab(space1, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(error, FAIL, "H5Sselect_hyperslab");
+
+ /* Verify that the new selection is the same as the original hyperslab */
+ sel_type = H5Sget_select_type(space1);
+ VERIFY(sel_type, H5S_SEL_HYPERSLABS, "H5Sget_select_type");
+
+ /* Verify that there is only one block */
+ nblocks = H5Sget_select_hyper_nblocks(space1);
+ VERIFY(nblocks, 1, "H5Sget_select_hyper_nblocks");
+
+ /* Retrieve the block defined */
+ HDmemset(blocks, -1, sizeof(blocks)); /* Reset block list */
+ error = H5Sget_select_hyper_blocklist(space1, (hsize_t)0, (hsize_t)nblocks, (hsize_t *)blocks);
+ CHECK(error, FAIL, "H5Sget_select_hyper_blocklist");
+
+ /* Verify that the correct block is defined */
+ VERIFY(blocks[0][0][0], (hsize_t)start[0], "H5Sget_select_hyper_blocklist");
+ VERIFY(blocks[0][0][1], (hsize_t)start[1], "H5Sget_select_hyper_blocklist");
+ VERIFY(blocks[0][1][0], (block[0] - 1), "H5Sget_select_hyper_blocklist");
+ VERIFY(blocks[0][1][1], (block[1] - 1), "H5Sget_select_hyper_blocklist");
+
+ /* Close temporary dataspace */
+ error = H5Sclose(space1);
+ CHECK(error, FAIL, "H5Sclose");
+
+ /* Copy "none" selection & space */
+ space1 = H5Scopy(none_id);
+ CHECK(space1, FAIL, "H5Scopy");
+
+ /* 'AND' "none" selection with another hyperslab */
+ start[0] = start[1] = 0;
+ stride[0] = stride[1] = 1;
+ count[0] = count[1] = 1;
+ block[0] = block[1] = 5;
+ error = H5Sselect_hyperslab(space1, H5S_SELECT_AND, start, stride, count, block);
+ CHECK(error, FAIL, "H5Sselect_hyperslab");
+
+ /* Verify that the new selection is the "none" selection */
+ sel_type = H5Sget_select_type(space1);
+ VERIFY(sel_type, H5S_SEL_NONE, "H5Sget_select_type");
+
+ /* Close temporary dataspace */
+ error = H5Sclose(space1);
+ CHECK(error, FAIL, "H5Sclose");
+
+ /* Copy "none" selection & space */
+ space1 = H5Scopy(none_id);
+ CHECK(space1, FAIL, "H5Scopy");
+
+ /* 'XOR' "none" selection with another hyperslab */
+ start[0] = start[1] = 0;
+ stride[0] = stride[1] = 1;
+ count[0] = count[1] = 1;
+ block[0] = block[1] = 5;
+ error = H5Sselect_hyperslab(space1, H5S_SELECT_XOR, start, stride, count, block);
+ CHECK(error, FAIL, "H5Sselect_hyperslab");
+
+ /* Verify that the new selection is the same as the original hyperslab */
+ sel_type = H5Sget_select_type(space1);
+ VERIFY(sel_type, H5S_SEL_HYPERSLABS, "H5Sget_select_type");
+
+ /* Verify that there is only one block */
+ nblocks = H5Sget_select_hyper_nblocks(space1);
+ VERIFY(nblocks, 1, "H5Sget_select_hyper_nblocks");
+
+ /* Retrieve the block defined */
+ HDmemset(blocks, -1, sizeof(blocks)); /* Reset block list */
+ error = H5Sget_select_hyper_blocklist(space1, (hsize_t)0, (hsize_t)nblocks, (hsize_t *)blocks);
+ CHECK(error, FAIL, "H5Sget_select_hyper_blocklist");
+
+ /* Verify that the correct block is defined */
+ VERIFY(blocks[0][0][0], (hsize_t)start[0], "H5Sget_select_hyper_blocklist");
+ VERIFY(blocks[0][0][1], (hsize_t)start[1], "H5Sget_select_hyper_blocklist");
+ VERIFY(blocks[0][1][0], (block[0] - 1), "H5Sget_select_hyper_blocklist");
+ VERIFY(blocks[0][1][1], (block[1] - 1), "H5Sget_select_hyper_blocklist");
+
+ /* Close temporary dataspace */
+ error = H5Sclose(space1);
+ CHECK(error, FAIL, "H5Sclose");
+
+ /* Copy "none" selection & space */
+ space1 = H5Scopy(none_id);
+ CHECK(space1, FAIL, "H5Scopy");
+
+ /* 'NOTB' "none" selection with another hyperslab */
+ start[0] = start[1] = 0;
+ stride[0] = stride[1] = 1;
+ count[0] = count[1] = 1;
+ block[0] = block[1] = 5;
+ error = H5Sselect_hyperslab(space1, H5S_SELECT_NOTB, start, stride, count, block);
+ CHECK(error, FAIL, "H5Sselect_hyperslab");
+
+ /* Verify that the new selection is the "none" selection */
+ sel_type = H5Sget_select_type(space1);
+ VERIFY(sel_type, H5S_SEL_NONE, "H5Sget_select_type");
+
+ /* Close temporary dataspace */
+ error = H5Sclose(space1);
+ CHECK(error, FAIL, "H5Sclose");
+
+ /* Copy "none" selection & space */
+ space1 = H5Scopy(none_id);
+ CHECK(space1, FAIL, "H5Scopy");
+
+ /* 'NOTA' "none" selection with another hyperslab */
+ start[0] = start[1] = 0;
+ stride[0] = stride[1] = 1;
+ count[0] = count[1] = 1;
+ block[0] = block[1] = 5;
+ error = H5Sselect_hyperslab(space1, H5S_SELECT_NOTA, start, stride, count, block);
+ CHECK(error, FAIL, "H5Sselect_hyperslab");
+
+ /* Verify that the new selection is the same as the original hyperslab */
+ sel_type = H5Sget_select_type(space1);
+ VERIFY(sel_type, H5S_SEL_HYPERSLABS, "H5Sget_select_type");
+
+ /* Verify that there is only one block */
+ nblocks = H5Sget_select_hyper_nblocks(space1);
+ VERIFY(nblocks, 1, "H5Sget_select_hyper_nblocks");
+
+ /* Retrieve the block defined */
+ HDmemset(blocks, -1, sizeof(blocks)); /* Reset block list */
+ error = H5Sget_select_hyper_blocklist(space1, (hsize_t)0, (hsize_t)nblocks, (hsize_t *)blocks);
+ CHECK(error, FAIL, "H5Sget_select_hyper_blocklist");
+
+ /* Verify that the correct block is defined */
+ VERIFY(blocks[0][0][0], (hsize_t)start[0], "H5Sget_select_hyper_blocklist");
+ VERIFY(blocks[0][0][1], (hsize_t)start[1], "H5Sget_select_hyper_blocklist");
+ VERIFY(blocks[0][1][0], (block[0] - 1), "H5Sget_select_hyper_blocklist");
+ VERIFY(blocks[0][1][1], (block[1] - 1), "H5Sget_select_hyper_blocklist");
+
+ /* Close temporary dataspace */
+ error = H5Sclose(space1);
+ CHECK(error, FAIL, "H5Sclose");
+
+ /* Close dataspaces */
+ error = H5Sclose(base_id);
+ CHECK(error, FAIL, "H5Sclose");
+
+ error = H5Sclose(all_id);
+ CHECK(error, FAIL, "H5Sclose");
+
+ error = H5Sclose(none_id);
+ CHECK(error, FAIL, "H5Sclose");
+} /* test_select_combine() */
+
+/*
+ * Typedef for iteration structure used in the fill value tests
+ */
+typedef struct {
+ unsigned short fill_value; /* The fill value to check */
+ size_t curr_coord; /* Current coordinate to examine */
+ hsize_t *coords; /* Pointer to selection's coordinates */
+} fill_iter_info;
+
+/****************************************************************
+**
+** test_select_hyper_iter3(): Iterator for checking hyperslab iteration
+**
+****************************************************************/
+static herr_t
+test_select_hyper_iter3(void *_elem, hid_t H5_ATTR_UNUSED type_id, unsigned ndim, const hsize_t *point,
+ void *_operator_data)
+{
+ unsigned *tbuf = (unsigned *)_elem; /* temporary buffer pointer */
+ fill_iter_info *iter_info =
+ (fill_iter_info *)_operator_data; /* Get the pointer to the iterator information */
+ hsize_t *coord_ptr; /* Pointer to the coordinate information for a point*/
+
+ /* Check value in current buffer location */
+ if (*tbuf != iter_info->fill_value)
+ return (-1);
+ else {
+ /* Check number of dimensions */
+ if (ndim != SPACE7_RANK)
+ return (-1);
+ else {
+ /* Check Coordinates */
+ coord_ptr = iter_info->coords + (2 * iter_info->curr_coord);
+ iter_info->curr_coord++;
+ if (coord_ptr[0] != point[0])
+ return (-1);
+ else if (coord_ptr[1] != point[1])
+ return (-1);
+ else
+ return (0);
+ } /* end else */
+ } /* end else */
+} /* end test_select_hyper_iter3() */
+
+/****************************************************************
+**
+** test_select_fill_all(): Test basic H5S (dataspace) selection code.
+** Tests filling "all" selections
+**
+****************************************************************/
+static void
+test_select_fill_all(void)
+{
+ hid_t sid1; /* Dataspace ID */
+ hsize_t dims1[] = {SPACE7_DIM1, SPACE7_DIM2};
+ unsigned fill_value; /* Fill value */
+ fill_iter_info iter_info; /* Iterator information structure */
+ hsize_t points[SPACE7_DIM1 * SPACE7_DIM2][SPACE7_RANK]; /* Coordinates of selection */
+ unsigned *wbuf, /* buffer to write to disk */
+ *tbuf; /* temporary buffer pointer */
+ unsigned u, v; /* Counters */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Filling 'all' Selections\n"));
+
+ /* Allocate memory buffer */
+ wbuf = (unsigned *)HDmalloc(sizeof(unsigned) * SPACE7_DIM1 * SPACE7_DIM2);
+ CHECK_PTR(wbuf, "HDmalloc");
+
+ /* Initialize memory buffer */
+ for (u = 0, tbuf = wbuf; u < SPACE7_DIM1; u++)
+ for (v = 0; v < SPACE7_DIM2; v++)
+ *tbuf++ = (u * SPACE7_DIM2) + v;
+
+ /* Create dataspace for dataset on disk */
+ sid1 = H5Screate_simple(SPACE7_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Space defaults to "all" selection */
+
+ /* Set fill value */
+ fill_value = SPACE7_FILL;
+
+ /* Fill selection in memory */
+ ret = H5Dfill(&fill_value, H5T_NATIVE_UINT, wbuf, H5T_NATIVE_UINT, sid1);
+ CHECK(ret, FAIL, "H5Dfill");
+
+ /* Verify memory buffer the hard way... */
+ for (u = 0, tbuf = wbuf; u < SPACE7_DIM1; u++)
+ for (v = 0; v < SPACE7_DIM2; v++)
+ if (*tbuf != fill_value)
+ TestErrPrintf("Error! v=%d, u=%u, *tbuf=%u, fill_value=%u\n", v, u, *tbuf, fill_value);
+
+ /* Set the coordinates of the selection */
+ for (u = 0; u < SPACE7_DIM1; u++)
+ for (v = 0; v < SPACE7_DIM2; v++) {
+ points[(u * SPACE7_DIM2) + v][0] = u;
+ points[(u * SPACE7_DIM2) + v][1] = v;
+ } /* end for */
+
+ /* Initialize the iterator structure */
+ iter_info.fill_value = SPACE7_FILL;
+ iter_info.curr_coord = 0;
+ iter_info.coords = (hsize_t *)points;
+
+ /* Iterate through selection, verifying correct data */
+ ret = H5Diterate(wbuf, H5T_NATIVE_UINT, sid1, test_select_hyper_iter3, &iter_info);
+ CHECK(ret, FAIL, "H5Diterate");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+} /* test_select_fill_all() */
+
+/****************************************************************
+**
+** test_select_fill_point(): Test basic H5S (dataspace) selection code.
+** Tests filling "point" selections
+**
+****************************************************************/
+static void
+test_select_fill_point(hssize_t *offset)
+{
+ hid_t sid1; /* Dataspace ID */
+ hsize_t dims1[] = {SPACE7_DIM1, SPACE7_DIM2};
+ hssize_t real_offset[SPACE7_RANK]; /* Actual offset to use */
+ hsize_t points[5][SPACE7_RANK] = {{2, 4}, {3, 8}, {8, 4}, {7, 5}, {7, 7}};
+ size_t num_points = 5; /* Number of points selected */
+ int fill_value; /* Fill value */
+ fill_iter_info iter_info; /* Iterator information structure */
+ unsigned *wbuf, /* buffer to write to disk */
+ *tbuf; /* temporary buffer pointer */
+ unsigned u, v, w; /* Counters */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Filling 'point' Selections\n"));
+
+ /* Allocate memory buffer */
+ wbuf = (unsigned *)HDmalloc(sizeof(unsigned) * SPACE7_DIM1 * SPACE7_DIM2);
+ CHECK_PTR(wbuf, "HDmalloc");
+
+ /* Initialize memory buffer */
+ for (u = 0, tbuf = wbuf; u < SPACE7_DIM1; u++)
+ for (v = 0; v < SPACE7_DIM2; v++)
+ *tbuf++ = (unsigned short)(u * SPACE7_DIM2) + v;
+
+ /* Create dataspace for dataset on disk */
+ sid1 = H5Screate_simple(SPACE7_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Select "point" selection */
+ ret = H5Sselect_elements(sid1, H5S_SELECT_SET, num_points, (const hsize_t *)points);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ if (offset != NULL) {
+ HDmemcpy(real_offset, offset, SPACE7_RANK * sizeof(hssize_t));
+
+ /* Set offset, if provided */
+ ret = H5Soffset_simple(sid1, real_offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+ } /* end if */
+ else
+ HDmemset(real_offset, 0, SPACE7_RANK * sizeof(hssize_t));
+
+ /* Set fill value */
+ fill_value = SPACE7_FILL;
+
+ /* Fill selection in memory */
+ ret = H5Dfill(&fill_value, H5T_NATIVE_INT, wbuf, H5T_NATIVE_UINT, sid1);
+ CHECK(ret, FAIL, "H5Dfill");
+
+ /* Verify memory buffer the hard way... */
+ for (u = 0, tbuf = wbuf; u < SPACE7_DIM1; u++)
+ for (v = 0; v < SPACE7_DIM2; v++, tbuf++) {
+ for (w = 0; w < (unsigned)num_points; w++) {
+ if (u == (unsigned)(points[w][0] + (hsize_t)real_offset[0]) &&
+ v == (unsigned)(points[w][1] + (hsize_t)real_offset[1])) {
+ if (*tbuf != (unsigned)fill_value)
+ TestErrPrintf("Error! v=%u, u=%u, *tbuf=%u, fill_value=%u\n", v, u, *tbuf,
+ (unsigned)fill_value);
+ break;
+ } /* end if */
+ } /* end for */
+ if (w == (unsigned)num_points && *tbuf != ((u * SPACE7_DIM2) + v))
+ TestErrPrintf("Error! v=%d, u=%d, *tbuf=%u, should be: %u\n", v, u, *tbuf,
+ ((u * SPACE7_DIM2) + v));
+ } /* end for */
+
+ /* Initialize the iterator structure */
+ iter_info.fill_value = SPACE7_FILL;
+ iter_info.curr_coord = 0;
+ iter_info.coords = (hsize_t *)points;
+
+ /* Add in the offset */
+ for (u = 0; u < (unsigned)num_points; u++) {
+ points[u][0] = (hsize_t)((hssize_t)points[u][0] + real_offset[0]);
+ points[u][1] = (hsize_t)((hssize_t)points[u][1] + real_offset[1]);
+ } /* end for */
+
+ /* Iterate through selection, verifying correct data */
+ ret = H5Diterate(wbuf, H5T_NATIVE_UINT, sid1, test_select_hyper_iter3, &iter_info);
+ CHECK(ret, FAIL, "H5Diterate");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+} /* test_select_fill_point() */
+
+/****************************************************************
+**
+** test_select_fill_hyper_simple(): Test basic H5S (dataspace) selection code.
+** Tests filling "simple" (i.e. one block) hyperslab selections
+**
+****************************************************************/
+static void
+test_select_fill_hyper_simple(hssize_t *offset)
+{
+ hid_t sid1; /* Dataspace ID */
+ hsize_t dims1[] = {SPACE7_DIM1, SPACE7_DIM2};
+ hssize_t real_offset[SPACE7_RANK]; /* Actual offset to use */
+ hsize_t start[SPACE7_RANK]; /* Hyperslab start */
+ hsize_t count[SPACE7_RANK]; /* Hyperslab block size */
+ size_t num_points; /* Number of points in selection */
+ hsize_t points[16][SPACE7_RANK]; /* Coordinates selected */
+ int fill_value; /* Fill value */
+ fill_iter_info iter_info; /* Iterator information structure */
+ unsigned *wbuf, /* buffer to write to disk */
+ *tbuf; /* temporary buffer pointer */
+ unsigned u, v; /* Counters */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Filling Simple 'hyperslab' Selections\n"));
+
+ /* Allocate memory buffer */
+ wbuf = (unsigned *)HDmalloc(sizeof(unsigned) * SPACE7_DIM1 * SPACE7_DIM2);
+ CHECK_PTR(wbuf, "HDmalloc");
+
+ /* Initialize memory buffer */
+ for (u = 0, tbuf = wbuf; u < SPACE7_DIM1; u++)
+ for (v = 0; v < SPACE7_DIM2; v++)
+ *tbuf++ = (unsigned short)(u * SPACE7_DIM2) + v;
+
+ /* Create dataspace for dataset on disk */
+ sid1 = H5Screate_simple(SPACE7_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Select "hyperslab" selection */
+ start[0] = 3;
+ start[1] = 3;
+ count[0] = 4;
+ count[1] = 4;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ if (offset != NULL) {
+ HDmemcpy(real_offset, offset, SPACE7_RANK * sizeof(hssize_t));
+
+ /* Set offset, if provided */
+ ret = H5Soffset_simple(sid1, real_offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+ } /* end if */
+ else
+ HDmemset(real_offset, 0, SPACE7_RANK * sizeof(hssize_t));
+
+ /* Set fill value */
+ fill_value = SPACE7_FILL;
+
+ /* Fill selection in memory */
+ ret = H5Dfill(&fill_value, H5T_NATIVE_INT, wbuf, H5T_NATIVE_UINT, sid1);
+ CHECK(ret, FAIL, "H5Dfill");
+
+ /* Verify memory buffer the hard way... */
+ for (u = 0, tbuf = wbuf; u < SPACE7_DIM1; u++)
+ for (v = 0; v < SPACE7_DIM2; v++, tbuf++) {
+ if ((u >= (unsigned)((hssize_t)start[0] + real_offset[0]) &&
+ u < (unsigned)((hssize_t)(start[0] + count[0]) + real_offset[0])) &&
+ (v >= (unsigned)((hssize_t)start[1] + real_offset[1]) &&
+ v < (unsigned)((hssize_t)(start[1] + count[1]) + real_offset[1]))) {
+ if (*tbuf != (unsigned)fill_value)
+ TestErrPrintf("Error! v=%u, u=%u, *tbuf=%u, fill_value=%u\n", v, u, *tbuf,
+ (unsigned)fill_value);
+ } /* end if */
+ else {
+ if (*tbuf != ((unsigned)(u * SPACE7_DIM2) + v))
+ TestErrPrintf("Error! v=%u, u=%u, *tbuf=%u, should be: %u\n", v, u, *tbuf,
+ ((u * SPACE7_DIM2) + v));
+ } /* end else */
+ } /* end for */
+
+ /* Initialize the iterator structure */
+ iter_info.fill_value = SPACE7_FILL;
+ iter_info.curr_coord = 0;
+ iter_info.coords = (hsize_t *)points;
+
+ /* Set the coordinates of the selection (with the offset) */
+ for (u = 0, num_points = 0; u < (unsigned)count[0]; u++)
+ for (v = 0; v < (unsigned)count[1]; v++, num_points++) {
+ points[num_points][0] = (hsize_t)((hssize_t)(u + start[0]) + real_offset[0]);
+ points[num_points][1] = (hsize_t)((hssize_t)(v + start[1]) + real_offset[1]);
+ } /* end for */
+
+ /* Iterate through selection, verifying correct data */
+ ret = H5Diterate(wbuf, H5T_NATIVE_UINT, sid1, test_select_hyper_iter3, &iter_info);
+ CHECK(ret, FAIL, "H5Diterate");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+} /* test_select_fill_hyper_simple() */
+
+/****************************************************************
+**
+** test_select_fill_hyper_regular(): Test basic H5S (dataspace) selection code.
+** Tests filling "regular" (i.e. strided block) hyperslab selections
+**
+****************************************************************/
+static void
+test_select_fill_hyper_regular(hssize_t *offset)
+{
+ hid_t sid1; /* Dataspace ID */
+ hsize_t dims1[] = {SPACE7_DIM1, SPACE7_DIM2};
+ hssize_t real_offset[SPACE7_RANK]; /* Actual offset to use */
+ hsize_t start[SPACE7_RANK]; /* Hyperslab start */
+ hsize_t stride[SPACE7_RANK]; /* Hyperslab stride size */
+ hsize_t count[SPACE7_RANK]; /* Hyperslab block count */
+ hsize_t block[SPACE7_RANK]; /* Hyperslab block size */
+ hsize_t points[16][SPACE7_RANK] = {
+ {2, 2}, {2, 3}, {2, 6}, {2, 7}, {3, 2}, {3, 3}, {3, 6}, {3, 7},
+ {6, 2}, {6, 3}, {6, 6}, {6, 7}, {7, 2}, {7, 3}, {7, 6}, {7, 7},
+ };
+ size_t num_points = 16; /* Number of points selected */
+ int fill_value; /* Fill value */
+ fill_iter_info iter_info; /* Iterator information structure */
+ unsigned *wbuf, /* buffer to write to disk */
+ *tbuf; /* temporary buffer pointer */
+ unsigned u, v, w; /* Counters */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Filling Regular 'hyperslab' Selections\n"));
+
+ /* Allocate memory buffer */
+ wbuf = (unsigned *)HDmalloc(sizeof(unsigned) * SPACE7_DIM1 * SPACE7_DIM2);
+ CHECK_PTR(wbuf, "HDmalloc");
+
+ /* Initialize memory buffer */
+ for (u = 0, tbuf = wbuf; u < SPACE7_DIM1; u++)
+ for (v = 0; v < SPACE7_DIM2; v++)
+ *tbuf++ = (u * SPACE7_DIM2) + v;
+
+ /* Create dataspace for dataset on disk */
+ sid1 = H5Screate_simple(SPACE7_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Select "hyperslab" selection */
+ start[0] = 2;
+ start[1] = 2;
+ stride[0] = 4;
+ stride[1] = 4;
+ count[0] = 2;
+ count[1] = 2;
+ block[0] = 2;
+ block[1] = 2;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ if (offset != NULL) {
+ HDmemcpy(real_offset, offset, SPACE7_RANK * sizeof(hssize_t));
+
+ /* Set offset, if provided */
+ ret = H5Soffset_simple(sid1, real_offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+ } /* end if */
+ else
+ HDmemset(real_offset, 0, SPACE7_RANK * sizeof(hssize_t));
+
+ /* Set fill value */
+ fill_value = SPACE7_FILL;
+
+ /* Fill selection in memory */
+ ret = H5Dfill(&fill_value, H5T_NATIVE_INT, wbuf, H5T_NATIVE_UINT, sid1);
+ CHECK(ret, FAIL, "H5Dfill");
+
+ /* Verify memory buffer the hard way... */
+ for (u = 0, tbuf = wbuf; u < SPACE7_DIM1; u++)
+ for (v = 0; v < SPACE7_DIM2; v++, tbuf++) {
+ for (w = 0; w < (unsigned)num_points; w++) {
+ if (u == (unsigned)((hssize_t)points[w][0] + real_offset[0]) &&
+ v == (unsigned)((hssize_t)points[w][1] + real_offset[1])) {
+ if (*tbuf != (unsigned)fill_value)
+ TestErrPrintf("Error! v=%u, u=%u, *tbuf=%u, fill_value=%u\n", v, u, *tbuf,
+ (unsigned)fill_value);
+ break;
+ } /* end if */
+ } /* end for */
+ if (w == (unsigned)num_points && *tbuf != ((u * SPACE7_DIM2) + v))
+ TestErrPrintf("Error! v=%d, u=%d, *tbuf=%u, should be: %u\n", v, u, *tbuf,
+ ((u * SPACE7_DIM2) + v));
+ } /* end for */
+
+ /* Initialize the iterator structure */
+ iter_info.fill_value = SPACE7_FILL;
+ iter_info.curr_coord = 0;
+ iter_info.coords = (hsize_t *)points;
+
+ /* Add in the offset */
+ for (u = 0; u < (unsigned)num_points; u++) {
+ points[u][0] = (hsize_t)((hssize_t)points[u][0] + real_offset[0]);
+ points[u][1] = (hsize_t)((hssize_t)points[u][1] + real_offset[1]);
+ } /* end for */
+
+ /* Iterate through selection, verifying correct data */
+ ret = H5Diterate(wbuf, H5T_NATIVE_UINT, sid1, test_select_hyper_iter3, &iter_info);
+ CHECK(ret, FAIL, "H5Diterate");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+} /* test_select_fill_hyper_regular() */
+
+/****************************************************************
+**
+** test_select_fill_hyper_irregular(): Test basic H5S (dataspace) selection code.
+** Tests filling "irregular" (i.e. combined blocks) hyperslab selections
+**
+****************************************************************/
+static void
+test_select_fill_hyper_irregular(hssize_t *offset)
+{
+ hid_t sid1; /* Dataspace ID */
+ hsize_t dims1[] = {SPACE7_DIM1, SPACE7_DIM2};
+ hssize_t real_offset[SPACE7_RANK]; /* Actual offset to use */
+ hsize_t start[SPACE7_RANK]; /* Hyperslab start */
+ hsize_t count[SPACE7_RANK]; /* Hyperslab block count */
+ hsize_t points[32][SPACE7_RANK] = {
+ /* Yes, some of the are duplicated.. */
+ {2, 2}, {2, 3}, {2, 4}, {2, 5}, {3, 2}, {3, 3}, {3, 4}, {3, 5}, {4, 2}, {4, 3}, {4, 4},
+ {4, 5}, {5, 2}, {5, 3}, {5, 4}, {5, 5}, {4, 4}, {4, 5}, {4, 6}, {4, 7}, {5, 4}, {5, 5},
+ {5, 6}, {5, 7}, {6, 4}, {6, 5}, {6, 6}, {6, 7}, {7, 4}, {7, 5}, {7, 6}, {7, 7},
+ };
+ hsize_t iter_points[28][SPACE7_RANK] = {
+ /* Coordinates, as iterated through */
+ {2, 2}, {2, 3}, {2, 4}, {2, 5}, {3, 2}, {3, 3}, {3, 4}, {3, 5}, {4, 2}, {4, 3},
+ {4, 4}, {4, 5}, {4, 6}, {4, 7}, {5, 2}, {5, 3}, {5, 4}, {5, 5}, {5, 6}, {5, 7},
+ {6, 4}, {6, 5}, {6, 6}, {6, 7}, {7, 4}, {7, 5}, {7, 6}, {7, 7},
+ };
+ size_t num_points = 32; /* Number of points selected */
+ size_t num_iter_points = 28; /* Number of resulting points */
+ int fill_value; /* Fill value */
+ fill_iter_info iter_info; /* Iterator information structure */
+ unsigned *wbuf, /* buffer to write to disk */
+ *tbuf; /* temporary buffer pointer */
+ unsigned u, v, w; /* Counters */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Filling Irregular 'hyperslab' Selections\n"));
+
+ /* Allocate memory buffer */
+ wbuf = (unsigned *)HDmalloc(sizeof(unsigned) * SPACE7_DIM1 * SPACE7_DIM2);
+ CHECK_PTR(wbuf, "HDmalloc");
+
+ /* Initialize memory buffer */
+ for (u = 0, tbuf = wbuf; u < SPACE7_DIM1; u++)
+ for (v = 0; v < SPACE7_DIM2; v++)
+ *tbuf++ = (u * SPACE7_DIM2) + v;
+
+ /* Create dataspace for dataset on disk */
+ sid1 = H5Screate_simple(SPACE7_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Select first "hyperslab" selection */
+ start[0] = 2;
+ start[1] = 2;
+ count[0] = 4;
+ count[1] = 4;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Combine with second "hyperslab" selection */
+ start[0] = 4;
+ start[1] = 4;
+ count[0] = 4;
+ count[1] = 4;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_OR, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ if (offset != NULL) {
+ HDmemcpy(real_offset, offset, SPACE7_RANK * sizeof(hssize_t));
+
+ /* Set offset, if provided */
+ ret = H5Soffset_simple(sid1, real_offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+ } /* end if */
+ else
+ HDmemset(real_offset, 0, SPACE7_RANK * sizeof(hssize_t));
+
+ /* Set fill value */
+ fill_value = SPACE7_FILL;
+
+ /* Fill selection in memory */
+ ret = H5Dfill(&fill_value, H5T_NATIVE_INT, wbuf, H5T_NATIVE_UINT, sid1);
+ CHECK(ret, FAIL, "H5Dfill");
+
+ /* Verify memory buffer the hard way... */
+ for (u = 0, tbuf = wbuf; u < SPACE7_DIM1; u++)
+ for (v = 0; v < SPACE7_DIM2; v++, tbuf++) {
+ for (w = 0; w < (unsigned)num_points; w++) {
+ if (u == (unsigned)((hssize_t)points[w][0] + real_offset[0]) &&
+ v == (unsigned)((hssize_t)points[w][1] + real_offset[1])) {
+ if (*tbuf != (unsigned)fill_value)
+ TestErrPrintf("Error! v=%u, u=%u, *tbuf=%u, fill_value=%u\n", v, u, *tbuf,
+ (unsigned)fill_value);
+ break;
+ } /* end if */
+ } /* end for */
+ if (w == (unsigned)num_points && *tbuf != ((u * SPACE7_DIM2) + v))
+ TestErrPrintf("Error! v=%u, u=%u, *tbuf=%u, should be: %u\n", v, u, *tbuf,
+ ((u * SPACE7_DIM2) + v));
+ } /* end for */
+
+ /* Initialize the iterator structure */
+ iter_info.fill_value = SPACE7_FILL;
+ iter_info.curr_coord = 0;
+ iter_info.coords = (hsize_t *)iter_points;
+
+ /* Add in the offset */
+ for (u = 0; u < (unsigned)num_iter_points; u++) {
+ iter_points[u][0] = (hsize_t)((hssize_t)iter_points[u][0] + real_offset[0]);
+ iter_points[u][1] = (hsize_t)((hssize_t)iter_points[u][1] + real_offset[1]);
+ } /* end for */
+
+ /* Iterate through selection, verifying correct data */
+ ret = H5Diterate(wbuf, H5T_NATIVE_UINT, sid1, test_select_hyper_iter3, &iter_info);
+ CHECK(ret, FAIL, "H5Diterate");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+} /* test_select_fill_hyper_irregular() */
+
+/****************************************************************
+**
+** test_select_none(): Test basic H5S (dataspace) selection code.
+** Tests I/O on 0-sized point selections
+**
+****************************************************************/
+static void
+test_select_none(void)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1, sid2; /* Dataspace ID */
+ hsize_t dims1[] = {SPACE7_DIM1, SPACE7_DIM2};
+ hsize_t dims2[] = {SPACE7_DIM1, SPACE7_DIM2};
+ uint8_t *wbuf, /* buffer to write to disk */
+ *rbuf, /* buffer to read from disk */
+ *tbuf; /* temporary buffer pointer */
+ int i, j; /* Counters */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing I/O on 0-sized Selections\n"));
+
+ /* Allocate write & read buffers */
+ wbuf = (uint8_t *)HDmalloc(sizeof(uint8_t) * SPACE7_DIM1 * SPACE7_DIM2);
+ CHECK_PTR(wbuf, "HDmalloc");
+ rbuf = (uint8_t *)HDcalloc(sizeof(uint8_t), SPACE7_DIM1 * SPACE7_DIM2);
+ CHECK_PTR(rbuf, "HDcalloc");
+
+ /* Initialize write buffer */
+ for (i = 0, tbuf = wbuf; i < SPACE7_DIM1; i++)
+ for (j = 0; j < SPACE7_DIM2; j++)
+ *tbuf++ = (uint8_t)((i * SPACE7_DIM2) + j);
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset */
+ sid1 = H5Screate_simple(SPACE7_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create dataspace for writing buffer */
+ sid2 = H5Screate_simple(SPACE7_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, "Dataset1", H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Make "none" selection in both disk and memory datasets */
+ ret = H5Sselect_none(sid1);
+ CHECK(ret, FAIL, "H5Sselect_none");
+
+ ret = H5Sselect_none(sid2);
+ CHECK(ret, FAIL, "H5Sselect_none");
+
+ /* Attempt to read "nothing" from disk (before space is allocated) */
+ ret = H5Dread(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Write "nothing" to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Write "nothing" to disk (with a datatype conversion :-) */
+ ret = H5Dwrite(dataset, H5T_NATIVE_INT, sid2, sid1, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Write "nothing" to disk (with NULL buffer argument) */
+ ret = H5Dwrite(dataset, H5T_NATIVE_INT, sid2, sid1, H5P_DEFAULT, NULL);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Read "nothing" from disk (with NULL buffer argument) */
+ ret = H5Dread(dataset, H5T_NATIVE_INT, sid2, sid1, H5P_DEFAULT, NULL);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+} /* test_select_none() */
+
+/****************************************************************
+**
+** test_scalar_select(): Test basic H5S (dataspace) selection code.
+** Tests selections on scalar dataspaces
+**
+****************************************************************/
+static void
+test_scalar_select(void)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1, sid2; /* Dataspace ID */
+ hsize_t dims2[] = {SPACE7_DIM1, SPACE7_DIM2};
+ hsize_t coord1[SPACE7_RANK]; /* Coordinates for point selection */
+ hsize_t start[SPACE7_RANK]; /* Hyperslab start */
+ hsize_t count[SPACE7_RANK]; /* Hyperslab block count */
+ uint8_t *wbuf_uint8, /* buffer to write to disk */
+ rval_uint8, /* value read back in */
+ *tbuf_uint8; /* temporary buffer pointer */
+ unsigned short *wbuf_ushort, /* another buffer to write to disk */
+ rval_ushort, /* value read back in */
+ *tbuf_ushort; /* temporary buffer pointer */
+ int i, j; /* Counters */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing I/O on Selections in Scalar Dataspaces\n"));
+
+ /* Allocate write & read buffers */
+ wbuf_uint8 = (uint8_t *)HDmalloc(sizeof(uint8_t) * SPACE7_DIM1 * SPACE7_DIM2);
+ CHECK_PTR(wbuf_uint8, "HDmalloc");
+ wbuf_ushort = (unsigned short *)HDmalloc(sizeof(unsigned short) * SPACE7_DIM1 * SPACE7_DIM2);
+ CHECK_PTR(wbuf_ushort, "HDmalloc");
+
+ /* Initialize write buffers */
+ for (i = 0, tbuf_uint8 = wbuf_uint8, tbuf_ushort = wbuf_ushort; i < SPACE7_DIM1; i++)
+ for (j = 0; j < SPACE7_DIM2; j++) {
+ *tbuf_uint8++ = (uint8_t)((i * SPACE7_DIM2) + j);
+ *tbuf_ushort++ = (unsigned short)((j * SPACE7_DIM2) + i);
+ } /* end for */
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset */
+ sid1 = H5Screate(H5S_SCALAR);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create dataspace for writing buffer */
+ sid2 = H5Screate_simple(SPACE7_RANK, dims2, NULL);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, "Dataset1", H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Select one element in memory with a point selection */
+ coord1[0] = 0;
+ coord1[1] = 2;
+ ret = H5Sselect_elements(sid2, H5S_SELECT_SET, (size_t)1, (const hsize_t *)&coord1);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Write single point to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, wbuf_uint8);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Read scalar element from disk */
+ ret = H5Dread(dataset, H5T_NATIVE_UCHAR, sid1, sid1, H5P_DEFAULT, &rval_uint8);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check value read back in */
+ if (rval_uint8 != *(wbuf_uint8 + 2))
+ TestErrPrintf("Error! rval=%u, should be: *(wbuf+2)=%u\n", (unsigned)rval_uint8,
+ (unsigned)*(wbuf_uint8 + 2));
+
+ /* Write single point to disk (with a datatype conversion) */
+ ret = H5Dwrite(dataset, H5T_NATIVE_USHORT, sid2, sid1, H5P_DEFAULT, wbuf_ushort);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Read scalar element from disk */
+ ret = H5Dread(dataset, H5T_NATIVE_USHORT, sid1, sid1, H5P_DEFAULT, &rval_ushort);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check value read back in */
+ if (rval_ushort != *(wbuf_ushort + 2))
+ TestErrPrintf("Error! rval=%u, should be: *(wbuf+2)=%u\n", (unsigned)rval_ushort,
+ (unsigned)*(wbuf_ushort + 2));
+
+ /* Select one element in memory with a hyperslab selection */
+ start[0] = 4;
+ start[1] = 3;
+ count[0] = 1;
+ count[1] = 1;
+ ret = H5Sselect_hyperslab(sid2, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Write single hyperslab element to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, wbuf_uint8);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Read scalar element from disk */
+ ret = H5Dread(dataset, H5T_NATIVE_UCHAR, sid1, sid1, H5P_DEFAULT, &rval_uint8);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check value read back in */
+ if (rval_uint8 != *(wbuf_uint8 + (SPACE7_DIM2 * 4) + 3))
+ TestErrPrintf("Error! rval=%u, should be: *(wbuf+(SPACE7_DIM2*4)+3)=%u\n", (unsigned)rval_uint8,
+ (unsigned)*(wbuf_uint8 + (SPACE7_DIM2 * 4) + 3));
+
+ /* Write single hyperslab element to disk (with a datatype conversion) */
+ ret = H5Dwrite(dataset, H5T_NATIVE_USHORT, sid2, sid1, H5P_DEFAULT, wbuf_ushort);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Read scalar element from disk */
+ ret = H5Dread(dataset, H5T_NATIVE_USHORT, sid1, sid1, H5P_DEFAULT, &rval_ushort);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check value read back in */
+ if (rval_ushort != *(wbuf_ushort + (SPACE7_DIM2 * 4) + 3))
+ TestErrPrintf("Error! rval=%u, should be: *(wbuf+(SPACE7_DIM2*4)+3)=%u\n", (unsigned)rval_ushort,
+ (unsigned)*(wbuf_ushort + (SPACE7_DIM2 * 4) + 3));
+
+ /* Select no elements in memory & file with "none" selections */
+ ret = H5Sselect_none(sid1);
+ CHECK(ret, FAIL, "H5Sselect_none");
+
+ ret = H5Sselect_none(sid2);
+ CHECK(ret, FAIL, "H5Sselect_none");
+
+ /* Write no data to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, wbuf_uint8);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Write no data to disk (with a datatype conversion) */
+ ret = H5Dwrite(dataset, H5T_NATIVE_USHORT, sid2, sid1, H5P_DEFAULT, wbuf_ushort);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free memory buffers */
+ HDfree(wbuf_uint8);
+ HDfree(wbuf_ushort);
+} /* test_scalar_select() */
+
+/****************************************************************
+**
+** test_scalar_select2(): Tests selections on scalar dataspace,
+** verify H5Sselect_hyperslab and H5Sselect_elements fails for
+** scalar dataspace.
+**
+****************************************************************/
+static void
+test_scalar_select2(void)
+{
+ hid_t sid; /* Dataspace ID */
+ hsize_t coord1[1]; /* Coordinates for point selection */
+ hsize_t start[1]; /* Hyperslab start */
+ hsize_t count[1]; /* Hyperslab block count */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(6, ("Testing Selections in Scalar Dataspaces\n"));
+
+ /* Create dataspace for dataset */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Select one element in memory with a point selection */
+ coord1[0] = 0;
+ H5E_BEGIN_TRY
+ {
+ ret = H5Sselect_elements(sid, H5S_SELECT_SET, (size_t)1, (const hsize_t *)&coord1);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Sselect_elements");
+
+ /* Select one element in memory with a hyperslab selection */
+ start[0] = 0;
+ count[0] = 0;
+ H5E_BEGIN_TRY
+ {
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, NULL, count, NULL);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Select no elements in memory & file with "none" selection */
+ ret = H5Sselect_none(sid);
+ CHECK(ret, FAIL, "H5Sselect_none");
+
+ /* Select all elements in memory & file with "all" selection */
+ ret = H5Sselect_all(sid);
+ CHECK(ret, FAIL, "H5Sselect_all");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_scalar_select2() */
+
+/****************************************************************
+**
+** test_scalar_select3(): Test basic H5S (dataspace) selection code.
+** Tests selections on scalar dataspaces in memory
+**
+****************************************************************/
+static void
+test_scalar_select3(void)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1, sid2; /* Dataspace ID */
+ hsize_t dims2[] = {SPACE7_DIM1, SPACE7_DIM2};
+ hsize_t coord1[SPACE7_RANK]; /* Coordinates for point selection */
+ hsize_t start[SPACE7_RANK]; /* Hyperslab start */
+ hsize_t count[SPACE7_RANK]; /* Hyperslab block count */
+ uint8_t wval_uint8, /* Value written out */
+ rval_uint8; /* Value read in */
+ unsigned short wval_ushort, /* Another value written out */
+ rval_ushort; /* Another value read in */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing I/O on Selections in Scalar Dataspaces in Memory\n"));
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset */
+ sid1 = H5Screate_simple(SPACE7_RANK, dims2, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create dataspace for writing buffer */
+ sid2 = H5Screate(H5S_SCALAR);
+ CHECK(sid2, FAIL, "H5Screate_simple");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, "Dataset1", H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Select one element in file with a point selection */
+ coord1[0] = 0;
+ coord1[1] = 2;
+ ret = H5Sselect_elements(sid1, H5S_SELECT_SET, (size_t)1, (const hsize_t *)&coord1);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Write single point to disk */
+ wval_uint8 = 12;
+ ret = H5Dwrite(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, &wval_uint8);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Read scalar element from disk */
+ rval_uint8 = 0;
+ ret = H5Dread(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, &rval_uint8);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check value read back in */
+ if (rval_uint8 != wval_uint8)
+ TestErrPrintf("%u: Error! rval=%u, should be: wval=%u\n", (unsigned)__LINE__, (unsigned)rval_uint8,
+ (unsigned)wval_uint8);
+
+ /* Write single point to disk (with a datatype conversion) */
+ wval_ushort = 23;
+ ret = H5Dwrite(dataset, H5T_NATIVE_USHORT, sid2, sid1, H5P_DEFAULT, &wval_ushort);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Read scalar element from disk */
+ rval_ushort = 0;
+ ret = H5Dread(dataset, H5T_NATIVE_USHORT, sid2, sid1, H5P_DEFAULT, &rval_ushort);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check value read back in */
+ if (rval_ushort != wval_ushort)
+ TestErrPrintf("%u: Error! rval=%u, should be: wval=%u\n", (unsigned)__LINE__, (unsigned)rval_ushort,
+ (unsigned)wval_ushort);
+
+ /* Select one element in file with a hyperslab selection */
+ start[0] = 4;
+ start[1] = 3;
+ count[0] = 1;
+ count[1] = 1;
+ ret = H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Write single hyperslab element to disk */
+ wval_uint8 = 92;
+ ret = H5Dwrite(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, &wval_uint8);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Read scalar element from disk */
+ rval_uint8 = 0;
+ ret = H5Dread(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, &rval_uint8);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check value read back in */
+ if (rval_uint8 != wval_uint8)
+ TestErrPrintf("%u: Error! rval=%u, should be: wval=%u\n", (unsigned)__LINE__, (unsigned)rval_uint8,
+ (unsigned)wval_uint8);
+
+ /* Write single hyperslab element to disk (with a datatype conversion) */
+ wval_ushort = 107;
+ ret = H5Dwrite(dataset, H5T_NATIVE_USHORT, sid2, sid1, H5P_DEFAULT, &wval_ushort);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Read scalar element from disk */
+ rval_ushort = 0;
+ ret = H5Dread(dataset, H5T_NATIVE_USHORT, sid2, sid1, H5P_DEFAULT, &rval_ushort);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check value read back in */
+ if (rval_ushort != wval_ushort)
+ TestErrPrintf("%u: Error! rval=%u, should be: wval=%u\n", (unsigned)__LINE__, (unsigned)rval_ushort,
+ (unsigned)wval_ushort);
+
+ /* Select no elements in memory & file with "none" selections */
+ ret = H5Sselect_none(sid1);
+ CHECK(ret, FAIL, "H5Sselect_none");
+
+ ret = H5Sselect_none(sid2);
+ CHECK(ret, FAIL, "H5Sselect_none");
+
+ /* Write no data to disk */
+ ret = H5Dwrite(dataset, H5T_NATIVE_UCHAR, sid2, sid1, H5P_DEFAULT, &wval_uint8);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Write no data to disk (with a datatype conversion) */
+ ret = H5Dwrite(dataset, H5T_NATIVE_USHORT, sid2, sid1, H5P_DEFAULT, &wval_ushort);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_scalar_select3() */
+
+/****************************************************************
+**
+** test_shape_same(): Tests selections on dataspace, verify that
+** "shape same" routine is working correctly.
+**
+****************************************************************/
+static void
+test_shape_same(void)
+{
+ hid_t all_sid; /* Dataspace ID with "all" selection */
+ hid_t none_sid; /* Dataspace ID with "none" selection */
+ hid_t single_pt_sid; /* Dataspace ID with single point selection */
+ hid_t mult_pt_sid; /* Dataspace ID with multiple point selection */
+ hid_t single_hyper_sid; /* Dataspace ID with single block hyperslab selection */
+ hid_t single_hyper_all_sid; /* Dataspace ID with single block hyperslab
+ * selection that is the entire dataspace
+ */
+ hid_t single_hyper_pt_sid; /* Dataspace ID with single block hyperslab
+ * selection that is the same as the single
+ * point selection
+ */
+ hid_t regular_hyper_sid; /* Dataspace ID with regular hyperslab selection */
+ hid_t irreg_hyper_sid; /* Dataspace ID with irregular hyperslab selection */
+ hid_t none_hyper_sid; /* Dataspace ID with "no hyperslabs" selection */
+ hid_t scalar_all_sid; /* ID for scalar dataspace with "all" selection */
+ hid_t scalar_none_sid; /* ID for scalar dataspace with "none" selection */
+ hid_t tmp_sid; /* Temporary dataspace ID */
+ hsize_t dims[] = {SPACE9_DIM1, SPACE9_DIM2};
+ hsize_t coord1[1][SPACE2_RANK]; /* Coordinates for single point selection */
+ hsize_t coord2[SPACE9_DIM2][SPACE9_RANK]; /* Coordinates for multiple point selection */
+ hsize_t start[SPACE9_RANK]; /* Hyperslab start */
+ hsize_t stride[SPACE9_RANK]; /* Hyperslab stride */
+ hsize_t count[SPACE9_RANK]; /* Hyperslab block count */
+ hsize_t block[SPACE9_RANK]; /* Hyperslab block size */
+ unsigned u, v; /* Local index variables */
+ htri_t check; /* Shape comparison return value */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(6, ("Testing Same Shape Comparisons\n"));
+ HDassert(SPACE9_DIM2 >= POINT1_NPOINTS);
+
+ /* Create dataspace for "all" selection */
+ all_sid = H5Screate_simple(SPACE9_RANK, dims, NULL);
+ CHECK(all_sid, FAIL, "H5Screate_simple");
+
+ /* Select entire extent for dataspace */
+ ret = H5Sselect_all(all_sid);
+ CHECK(ret, FAIL, "H5Sselect_all");
+
+ /* Create dataspace for "none" selection */
+ none_sid = H5Screate_simple(SPACE9_RANK, dims, NULL);
+ CHECK(none_sid, FAIL, "H5Screate_simple");
+
+ /* Un-Select entire extent for dataspace */
+ ret = H5Sselect_none(none_sid);
+ CHECK(ret, FAIL, "H5Sselect_none");
+
+ /* Create dataspace for single point selection */
+ single_pt_sid = H5Screate_simple(SPACE9_RANK, dims, NULL);
+ CHECK(single_pt_sid, FAIL, "H5Screate_simple");
+
+ /* Select sequence of ten points for multiple point selection */
+ coord1[0][0] = 2;
+ coord1[0][1] = 2;
+ ret = H5Sselect_elements(single_pt_sid, H5S_SELECT_SET, (size_t)1, (const hsize_t *)coord1);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Create dataspace for multiple point selection */
+ mult_pt_sid = H5Screate_simple(SPACE9_RANK, dims, NULL);
+ CHECK(mult_pt_sid, FAIL, "H5Screate_simple");
+
+ /* Select sequence of ten points for multiple point selection */
+ coord2[0][0] = 2;
+ coord2[0][1] = 2;
+ coord2[1][0] = 7;
+ coord2[1][1] = 2;
+ coord2[2][0] = 1;
+ coord2[2][1] = 4;
+ coord2[3][0] = 2;
+ coord2[3][1] = 6;
+ coord2[4][0] = 0;
+ coord2[4][1] = 8;
+ coord2[5][0] = 3;
+ coord2[5][1] = 2;
+ coord2[6][0] = 4;
+ coord2[6][1] = 4;
+ coord2[7][0] = 1;
+ coord2[7][1] = 0;
+ coord2[8][0] = 5;
+ coord2[8][1] = 1;
+ coord2[9][0] = 9;
+ coord2[9][1] = 3;
+ ret = H5Sselect_elements(mult_pt_sid, H5S_SELECT_SET, (size_t)POINT1_NPOINTS, (const hsize_t *)coord2);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Create dataspace for single hyperslab selection */
+ single_hyper_sid = H5Screate_simple(SPACE9_RANK, dims, NULL);
+ CHECK(single_hyper_sid, FAIL, "H5Screate_simple");
+
+ /* Select 10x10 hyperslab for single hyperslab selection */
+ start[0] = 1;
+ start[1] = 1;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 1;
+ count[1] = 1;
+ block[0] = (SPACE9_DIM1 - 2);
+ block[1] = (SPACE9_DIM2 - 2);
+ ret = H5Sselect_hyperslab(single_hyper_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create dataspace for single hyperslab selection with entire extent selected */
+ single_hyper_all_sid = H5Screate_simple(SPACE9_RANK, dims, NULL);
+ CHECK(single_hyper_all_sid, FAIL, "H5Screate_simple");
+
+ /* Select entire extent for hyperslab selection */
+ start[0] = 0;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 1;
+ count[1] = 1;
+ block[0] = SPACE9_DIM1;
+ block[1] = SPACE9_DIM2;
+ ret = H5Sselect_hyperslab(single_hyper_all_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create dataspace for single hyperslab selection with single point selected */
+ single_hyper_pt_sid = H5Screate_simple(SPACE9_RANK, dims, NULL);
+ CHECK(single_hyper_pt_sid, FAIL, "H5Screate_simple");
+
+ /* Select entire extent for hyperslab selection */
+ start[0] = 2;
+ start[1] = 2;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 1;
+ count[1] = 1;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(single_hyper_pt_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create dataspace for regular hyperslab selection */
+ regular_hyper_sid = H5Screate_simple(SPACE9_RANK, dims, NULL);
+ CHECK(regular_hyper_sid, FAIL, "H5Screate_simple");
+
+ /* Select regular, strided hyperslab selection */
+ start[0] = 2;
+ start[1] = 2;
+ stride[0] = 2;
+ stride[1] = 2;
+ count[0] = 5;
+ count[1] = 2;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(regular_hyper_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create dataspace for irregular hyperslab selection */
+ irreg_hyper_sid = H5Screate_simple(SPACE9_RANK, dims, NULL);
+ CHECK(irreg_hyper_sid, FAIL, "H5Screate_simple");
+
+ /* Create irregular hyperslab selection by OR'ing two blocks together */
+ start[0] = 2;
+ start[1] = 2;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 1;
+ count[1] = 1;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(irreg_hyper_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[0] = 4;
+ start[1] = 4;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 1;
+ count[1] = 1;
+ block[0] = 3;
+ block[1] = 3;
+ ret = H5Sselect_hyperslab(irreg_hyper_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create dataspace for "no" hyperslab selection */
+ none_hyper_sid = H5Screate_simple(SPACE9_RANK, dims, NULL);
+ CHECK(none_hyper_sid, FAIL, "H5Screate_simple");
+
+ /* Create "no" hyperslab selection by XOR'ing same blocks together */
+ start[0] = 2;
+ start[1] = 2;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 1;
+ count[1] = 1;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(none_hyper_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ ret = H5Sselect_hyperslab(none_hyper_sid, H5S_SELECT_XOR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create scalar dataspace for "all" selection */
+ scalar_all_sid = H5Screate(H5S_SCALAR);
+ CHECK(scalar_all_sid, FAIL, "H5Screate");
+
+ /* Create scalar dataspace for "none" selection */
+ scalar_none_sid = H5Screate(H5S_SCALAR);
+ CHECK(scalar_none_sid, FAIL, "H5Screate");
+
+ /* Un-Select entire extent for dataspace */
+ ret = H5Sselect_none(scalar_none_sid);
+ CHECK(ret, FAIL, "H5Sselect_none");
+
+ /* Compare "all" selection to all the selections created */
+ /* Compare against itself */
+ check = H5Sselect_shape_same(all_sid, all_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* Compare against copy of itself */
+ tmp_sid = H5Scopy(all_sid);
+ CHECK(tmp_sid, FAIL, "H5Scopy");
+
+ check = H5Sselect_shape_same(all_sid, tmp_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ ret = H5Sclose(tmp_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Compare against "none" selection */
+ check = H5Sselect_shape_same(all_sid, none_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against single point selection */
+ check = H5Sselect_shape_same(all_sid, single_pt_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against multiple point selection */
+ check = H5Sselect_shape_same(all_sid, mult_pt_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "plain" single hyperslab selection */
+ check = H5Sselect_shape_same(all_sid, single_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "all" single hyperslab selection */
+ check = H5Sselect_shape_same(all_sid, single_hyper_all_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* Compare against "single point" single hyperslab selection */
+ check = H5Sselect_shape_same(all_sid, single_hyper_pt_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against regular, strided hyperslab selection */
+ check = H5Sselect_shape_same(all_sid, regular_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against irregular hyperslab selection */
+ check = H5Sselect_shape_same(all_sid, irreg_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "no" hyperslab selection */
+ check = H5Sselect_shape_same(all_sid, none_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against scalar "all" hyperslab selection */
+ check = H5Sselect_shape_same(all_sid, scalar_all_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against scalar "none" hyperslab selection */
+ check = H5Sselect_shape_same(all_sid, scalar_none_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare "none" selection to all the selections created */
+ /* Compare against itself */
+ check = H5Sselect_shape_same(none_sid, none_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* Compare against copy of itself */
+ tmp_sid = H5Scopy(none_sid);
+ CHECK(tmp_sid, FAIL, "H5Scopy");
+
+ check = H5Sselect_shape_same(none_sid, tmp_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ ret = H5Sclose(tmp_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Compare against "all" selection */
+ check = H5Sselect_shape_same(none_sid, all_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against single point selection */
+ check = H5Sselect_shape_same(none_sid, single_pt_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against multiple point selection */
+ check = H5Sselect_shape_same(none_sid, mult_pt_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "plain" single hyperslab selection */
+ check = H5Sselect_shape_same(none_sid, single_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "all" single hyperslab selection */
+ check = H5Sselect_shape_same(none_sid, single_hyper_all_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "single point" single hyperslab selection */
+ check = H5Sselect_shape_same(none_sid, single_hyper_pt_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against regular, strided hyperslab selection */
+ check = H5Sselect_shape_same(none_sid, regular_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against irregular hyperslab selection */
+ check = H5Sselect_shape_same(none_sid, irreg_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "no" hyperslab selection */
+ check = H5Sselect_shape_same(none_sid, none_hyper_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* Compare against scalar "all" hyperslab selection */
+ check = H5Sselect_shape_same(none_sid, scalar_all_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against scalar "none" hyperslab selection */
+ check = H5Sselect_shape_same(none_sid, scalar_none_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* Compare single point selection to all the selections created */
+ /* Compare against itself */
+ check = H5Sselect_shape_same(single_pt_sid, single_pt_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* Compare against copy of itself */
+ tmp_sid = H5Scopy(single_pt_sid);
+ CHECK(tmp_sid, FAIL, "H5Scopy");
+
+ check = H5Sselect_shape_same(single_pt_sid, tmp_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ ret = H5Sclose(tmp_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Compare against "all" selection */
+ check = H5Sselect_shape_same(single_pt_sid, all_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "none" selection */
+ check = H5Sselect_shape_same(single_pt_sid, none_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against multiple point selection */
+ check = H5Sselect_shape_same(single_pt_sid, mult_pt_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "plain" single hyperslab selection */
+ check = H5Sselect_shape_same(single_pt_sid, single_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "all" single hyperslab selection */
+ check = H5Sselect_shape_same(single_pt_sid, single_hyper_all_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "single point" single hyperslab selection */
+ check = H5Sselect_shape_same(single_pt_sid, single_hyper_pt_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* Compare against regular, strided hyperslab selection */
+ check = H5Sselect_shape_same(single_pt_sid, regular_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against irregular hyperslab selection */
+ check = H5Sselect_shape_same(single_pt_sid, irreg_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "no" hyperslab selection */
+ check = H5Sselect_shape_same(single_pt_sid, none_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against scalar "all" hyperslab selection */
+ check = H5Sselect_shape_same(single_pt_sid, scalar_all_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* Compare against scalar "none" hyperslab selection */
+ check = H5Sselect_shape_same(single_pt_sid, scalar_none_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare multiple point selection to all the selections created */
+ /* Compare against itself */
+ check = H5Sselect_shape_same(mult_pt_sid, mult_pt_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* Compare against copy of itself */
+ tmp_sid = H5Scopy(mult_pt_sid);
+ CHECK(tmp_sid, FAIL, "H5Scopy");
+
+ check = H5Sselect_shape_same(mult_pt_sid, tmp_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ ret = H5Sclose(tmp_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Compare against "all" selection */
+ check = H5Sselect_shape_same(mult_pt_sid, all_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "none" selection */
+ check = H5Sselect_shape_same(mult_pt_sid, none_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against single point selection */
+ check = H5Sselect_shape_same(mult_pt_sid, single_pt_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "plain" single hyperslab selection */
+ check = H5Sselect_shape_same(mult_pt_sid, single_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "all" single hyperslab selection */
+ check = H5Sselect_shape_same(mult_pt_sid, single_hyper_all_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "single point" single hyperslab selection */
+ check = H5Sselect_shape_same(mult_pt_sid, single_hyper_pt_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against regular, strided hyperslab selection */
+ check = H5Sselect_shape_same(mult_pt_sid, regular_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against irregular hyperslab selection */
+ check = H5Sselect_shape_same(mult_pt_sid, irreg_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "no" hyperslab selection */
+ check = H5Sselect_shape_same(mult_pt_sid, none_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against scalar "all" hyperslab selection */
+ check = H5Sselect_shape_same(mult_pt_sid, scalar_all_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against scalar "none" hyperslab selection */
+ check = H5Sselect_shape_same(mult_pt_sid, scalar_none_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare single "normal" hyperslab selection to all the selections created */
+ /* Compare against itself */
+ check = H5Sselect_shape_same(single_hyper_sid, single_hyper_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* Compare against copy of itself */
+ tmp_sid = H5Scopy(single_hyper_sid);
+ CHECK(tmp_sid, FAIL, "H5Scopy");
+
+ check = H5Sselect_shape_same(single_hyper_sid, tmp_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ ret = H5Sclose(tmp_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Compare against "all" selection */
+ check = H5Sselect_shape_same(single_hyper_sid, all_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "none" selection */
+ check = H5Sselect_shape_same(single_hyper_sid, none_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against single point selection */
+ check = H5Sselect_shape_same(single_hyper_sid, single_pt_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against multiple point selection */
+ check = H5Sselect_shape_same(single_hyper_sid, mult_pt_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "all" single hyperslab selection */
+ check = H5Sselect_shape_same(single_hyper_sid, single_hyper_all_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "single point" single hyperslab selection */
+ check = H5Sselect_shape_same(single_hyper_sid, single_hyper_pt_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against regular, strided hyperslab selection */
+ check = H5Sselect_shape_same(single_hyper_sid, regular_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against irregular hyperslab selection */
+ check = H5Sselect_shape_same(single_hyper_sid, irreg_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "no" hyperslab selection */
+ check = H5Sselect_shape_same(single_hyper_sid, none_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+#ifdef NOT_YET
+ /* In theory, these two selections are the same shape, but the
+ * H5Sselect_shape_same() routine is just not this sophisticated yet and it
+ * would take too much effort to make this work. The worst case is that the
+ * non-optimized chunk mapping routines will be invoked instead of the more
+ * optimized routines, so this only hurts performance, not correctness
+ */
+ /* Construct point selection which matches "plain" hyperslab selection */
+ /* Create dataspace for point selection */
+ tmp_sid = H5Screate_simple(SPACE9_RANK, dims, NULL);
+ CHECK(tmp_sid, FAIL, "H5Screate_simple");
+
+ /* Select sequence of points for point selection */
+ for (u = 1; u < (SPACE9_DIM1 - 1); u++) {
+ for (v = 1; v < (SPACE9_DIM2 - 1); v++) {
+ coord2[v - 1][0] = u;
+ coord2[v - 1][1] = v;
+ } /* end for */
+
+ ret = H5Sselect_elements(tmp_sid, H5S_SELECT_APPEND, (SPACE9_DIM2 - 2), coord2);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+ } /* end for */
+
+ /* Compare against hyperslab selection */
+ check = H5Sselect_shape_same(single_hyper_sid, tmp_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ ret = H5Sclose(tmp_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+#endif /* NOT_YET */
+
+ /* Construct hyperslab selection which matches "plain" hyperslab selection */
+ /* Create dataspace for hyperslab selection */
+ tmp_sid = H5Screate_simple(SPACE9_RANK, dims, NULL);
+ CHECK(tmp_sid, FAIL, "H5Screate_simple");
+
+ /* Un-select entire extent */
+ ret = H5Sselect_none(tmp_sid);
+ CHECK(ret, FAIL, "H5Sselect_none");
+
+ /* Select sequence of rows for hyperslab selection */
+ for (u = 1; u < (SPACE9_DIM1 - 1); u++) {
+ start[0] = u;
+ start[1] = 1;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 1;
+ count[1] = 1;
+ block[0] = 1;
+ block[1] = (SPACE9_DIM2 - 2);
+ ret = H5Sselect_hyperslab(tmp_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+ } /* end for */
+
+ /* Compare against hyperslab selection */
+ check = H5Sselect_shape_same(single_hyper_sid, tmp_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ ret = H5Sclose(tmp_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Compare against scalar "all" hyperslab selection */
+ check = H5Sselect_shape_same(single_hyper_sid, scalar_all_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against scalar "none" hyperslab selection */
+ check = H5Sselect_shape_same(single_hyper_sid, scalar_none_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare single "all" hyperslab selection to all the selections created */
+ /* Compare against itself */
+ check = H5Sselect_shape_same(single_hyper_all_sid, single_hyper_all_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* Compare against copy of itself */
+ tmp_sid = H5Scopy(single_hyper_all_sid);
+ CHECK(tmp_sid, FAIL, "H5Scopy");
+
+ check = H5Sselect_shape_same(single_hyper_all_sid, tmp_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ ret = H5Sclose(tmp_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Compare against "all" selection */
+ check = H5Sselect_shape_same(single_hyper_all_sid, all_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* Compare against "none" selection */
+ check = H5Sselect_shape_same(single_hyper_all_sid, none_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against single point selection */
+ check = H5Sselect_shape_same(single_hyper_all_sid, single_pt_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against multiple point selection */
+ check = H5Sselect_shape_same(single_hyper_all_sid, mult_pt_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "plain" single hyperslab selection */
+ check = H5Sselect_shape_same(single_hyper_all_sid, single_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "single point" single hyperslab selection */
+ check = H5Sselect_shape_same(single_hyper_all_sid, single_hyper_pt_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against regular, strided hyperslab selection */
+ check = H5Sselect_shape_same(single_hyper_all_sid, regular_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against irregular hyperslab selection */
+ check = H5Sselect_shape_same(single_hyper_all_sid, irreg_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "no" hyperslab selection */
+ check = H5Sselect_shape_same(single_hyper_all_sid, none_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+#ifdef NOT_YET
+ /* In theory, these two selections are the same shape, but the
+ * H5S_select_shape_same() routine is just not this sophisticated yet and it
+ * would take too much effort to make this work. The worst case is that the
+ * non-optimized chunk mapping routines will be invoked instead of the more
+ * optimized routines, so this only hurts performance, not correctness
+ */
+ /* Construct point selection which matches "all" hyperslab selection */
+ /* Create dataspace for point selection */
+ tmp_sid = H5Screate_simple(SPACE9_RANK, dims, NULL);
+ CHECK(tmp_sid, FAIL, "H5Screate_simple");
+
+ /* Select sequence of points for point selection */
+ for (u = 0; u < SPACE9_DIM1; u++) {
+ for (v = 0; v < SPACE9_DIM2; v++) {
+ coord2[v][0] = u;
+ coord2[v][1] = v;
+ } /* end for */
+ ret = H5Sselect_elements(tmp_sid, H5S_SELECT_APPEND, SPACE9_DIM2, coord2);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+ } /* end for */
+
+ /* Compare against hyperslab selection */
+ check = H5Sselect_shape_same(single_hyper_all_sid, tmp_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ ret = H5Sclose(tmp_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+#endif /* NOT_YET */
+
+ /* Construct hyperslab selection which matches "all" hyperslab selection */
+ /* Create dataspace for hyperslab selection */
+ tmp_sid = H5Screate_simple(SPACE9_RANK, dims, NULL);
+ CHECK(tmp_sid, FAIL, "H5Screate_simple");
+
+ /* Un-select entire extent */
+ ret = H5Sselect_none(tmp_sid);
+ CHECK(ret, FAIL, "H5Sselect_none");
+
+ /* Select sequence of rows for hyperslab selection */
+ for (u = 0; u < SPACE9_DIM2; u++) {
+ start[0] = u;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 1;
+ count[1] = 1;
+ block[0] = 1;
+ block[1] = SPACE9_DIM2;
+ ret = H5Sselect_hyperslab(tmp_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+ } /* end for */
+
+ /* Compare against hyperslab selection */
+ check = H5Sselect_shape_same(single_hyper_all_sid, tmp_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ ret = H5Sclose(tmp_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Compare against scalar "all" hyperslab selection */
+ check = H5Sselect_shape_same(single_hyper_all_sid, scalar_all_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against scalar "none" hyperslab selection */
+ check = H5Sselect_shape_same(single_hyper_all_sid, scalar_none_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare single "point" hyperslab selection to all the selections created */
+ /* Compare against itself */
+ check = H5Sselect_shape_same(single_hyper_pt_sid, single_hyper_pt_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* Compare against copy of itself */
+ tmp_sid = H5Scopy(single_hyper_pt_sid);
+ CHECK(tmp_sid, FAIL, "H5Scopy");
+
+ check = H5Sselect_shape_same(single_hyper_pt_sid, tmp_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ ret = H5Sclose(tmp_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Compare against "all" selection */
+ check = H5Sselect_shape_same(single_hyper_pt_sid, all_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "none" selection */
+ check = H5Sselect_shape_same(single_hyper_pt_sid, none_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against single point selection */
+ check = H5Sselect_shape_same(single_hyper_pt_sid, single_pt_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* Compare against multiple point selection */
+ check = H5Sselect_shape_same(single_hyper_pt_sid, mult_pt_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "plain" single hyperslab selection */
+ check = H5Sselect_shape_same(single_hyper_pt_sid, single_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "all" single hyperslab selection */
+ check = H5Sselect_shape_same(single_hyper_pt_sid, single_hyper_all_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against regular, strided hyperslab selection */
+ check = H5Sselect_shape_same(single_hyper_pt_sid, regular_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against irregular hyperslab selection */
+ check = H5Sselect_shape_same(single_hyper_pt_sid, irreg_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "no" hyperslab selection */
+ check = H5Sselect_shape_same(single_hyper_pt_sid, none_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against scalar "all" hyperslab selection */
+ check = H5Sselect_shape_same(single_hyper_pt_sid, scalar_all_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* Compare against scalar "none" hyperslab selection */
+ check = H5Sselect_shape_same(single_hyper_pt_sid, scalar_none_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare regular, strided hyperslab selection to all the selections created */
+ /* Compare against itself */
+ check = H5Sselect_shape_same(regular_hyper_sid, regular_hyper_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* Compare against copy of itself */
+ tmp_sid = H5Scopy(regular_hyper_sid);
+ CHECK(tmp_sid, FAIL, "H5Scopy");
+
+ check = H5Sselect_shape_same(regular_hyper_sid, tmp_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ ret = H5Sclose(tmp_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Compare against "all" selection */
+ check = H5Sselect_shape_same(regular_hyper_sid, all_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "none" selection */
+ check = H5Sselect_shape_same(regular_hyper_sid, none_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against single point selection */
+ check = H5Sselect_shape_same(regular_hyper_sid, single_pt_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against multiple point selection */
+ check = H5Sselect_shape_same(regular_hyper_sid, mult_pt_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "plain" single hyperslab selection */
+ check = H5Sselect_shape_same(regular_hyper_sid, single_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "all" single hyperslab selection */
+ check = H5Sselect_shape_same(regular_hyper_sid, single_hyper_all_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "single point" single hyperslab selection */
+ check = H5Sselect_shape_same(regular_hyper_sid, single_hyper_pt_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against irregular hyperslab selection */
+ check = H5Sselect_shape_same(regular_hyper_sid, irreg_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "no" hyperslab selection */
+ check = H5Sselect_shape_same(regular_hyper_sid, none_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Construct point selection which matches regular, strided hyperslab selection */
+ /* Create dataspace for point selection */
+ tmp_sid = H5Screate_simple(SPACE9_RANK, dims, NULL);
+ CHECK(tmp_sid, FAIL, "H5Screate_simple");
+
+ /* Select sequence of points for point selection */
+ for (u = 2; u < 11; u += 2) {
+ for (v = 0; v < 2; v++) {
+ coord2[v][0] = u;
+ coord2[v][1] = (v * 2) + 2;
+ } /* end for */
+ ret = H5Sselect_elements(tmp_sid, H5S_SELECT_APPEND, (size_t)2, (const hsize_t *)coord2);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+ } /* end for */
+
+ /* Compare against hyperslab selection */
+ check = H5Sselect_shape_same(regular_hyper_sid, tmp_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ ret = H5Sclose(tmp_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Construct hyperslab selection which matches regular, strided hyperslab selection */
+ /* Create dataspace for hyperslab selection */
+ tmp_sid = H5Screate_simple(SPACE9_RANK, dims, NULL);
+ CHECK(tmp_sid, FAIL, "H5Screate_simple");
+
+ /* Un-select entire extent */
+ ret = H5Sselect_none(tmp_sid);
+ CHECK(ret, FAIL, "H5Sselect_none");
+
+ /* Select sequence of rows for hyperslab selection */
+ for (u = 2; u < 11; u += 2) {
+ start[0] = u;
+ start[1] = 3;
+ stride[0] = 1;
+ stride[1] = 2;
+ count[0] = 1;
+ count[1] = 2;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(tmp_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+ } /* end for */
+
+ /* Compare against hyperslab selection */
+ check = H5Sselect_shape_same(regular_hyper_sid, tmp_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ ret = H5Sclose(tmp_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Construct regular hyperslab selection with an offset which matches regular, strided hyperslab selection
+ */
+ /* Create dataspace for hyperslab selection */
+ tmp_sid = H5Screate_simple(SPACE9_RANK, dims, NULL);
+ CHECK(tmp_sid, FAIL, "H5Screate_simple");
+
+ /* Select regular, strided hyperslab selection at an offset */
+ start[0] = 1;
+ start[1] = 1;
+ stride[0] = 2;
+ stride[1] = 2;
+ count[0] = 5;
+ count[1] = 2;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(tmp_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Compare against hyperslab selection */
+ check = H5Sselect_shape_same(regular_hyper_sid, tmp_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ ret = H5Sclose(tmp_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Compare against scalar "all" hyperslab selection */
+ check = H5Sselect_shape_same(regular_hyper_sid, scalar_all_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against scalar "none" hyperslab selection */
+ check = H5Sselect_shape_same(regular_hyper_sid, scalar_none_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare irregular hyperslab selection to all the selections created */
+ /* Compare against itself */
+ check = H5Sselect_shape_same(irreg_hyper_sid, irreg_hyper_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* Compare against copy of itself */
+ tmp_sid = H5Scopy(irreg_hyper_sid);
+ CHECK(tmp_sid, FAIL, "H5Scopy");
+
+ check = H5Sselect_shape_same(irreg_hyper_sid, tmp_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ ret = H5Sclose(tmp_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Compare against "all" selection */
+ check = H5Sselect_shape_same(irreg_hyper_sid, all_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "none" selection */
+ check = H5Sselect_shape_same(irreg_hyper_sid, none_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against single point selection */
+ check = H5Sselect_shape_same(irreg_hyper_sid, single_pt_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against multiple point selection */
+ check = H5Sselect_shape_same(irreg_hyper_sid, mult_pt_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "plain" single hyperslab selection */
+ check = H5Sselect_shape_same(irreg_hyper_sid, single_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "all" single hyperslab selection */
+ check = H5Sselect_shape_same(irreg_hyper_sid, single_hyper_all_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "single point" single hyperslab selection */
+ check = H5Sselect_shape_same(irreg_hyper_sid, single_hyper_pt_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against regular, strided hyperslab selection */
+ check = H5Sselect_shape_same(irreg_hyper_sid, regular_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "no" hyperslab selection */
+ check = H5Sselect_shape_same(irreg_hyper_sid, none_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Construct hyperslab selection which matches irregular hyperslab selection */
+ /* Create dataspace for hyperslab selection */
+ tmp_sid = H5Screate_simple(SPACE9_RANK, dims, NULL);
+ CHECK(tmp_sid, FAIL, "H5Screate_simple");
+
+ start[0] = 2;
+ start[1] = 2;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 1;
+ count[1] = 1;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(tmp_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Select sequence of columns for hyperslab selection */
+ for (u = 0; u < 3; u++) {
+ start[0] = 4;
+ start[1] = u + 4;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 1;
+ count[1] = 1;
+ block[0] = 3;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(tmp_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+ } /* end for */
+
+ /* Compare against hyperslab selection */
+ check = H5Sselect_shape_same(irreg_hyper_sid, tmp_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ ret = H5Sclose(tmp_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Compare against scalar "all" hyperslab selection */
+ check = H5Sselect_shape_same(irreg_hyper_sid, scalar_all_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against scalar "none" hyperslab selection */
+ check = H5Sselect_shape_same(irreg_hyper_sid, scalar_none_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare scalar "all" dataspace with all selections created */
+
+ /* Compare against itself */
+ check = H5Sselect_shape_same(scalar_all_sid, scalar_all_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* Compare against copy of itself */
+ tmp_sid = H5Scopy(scalar_all_sid);
+ CHECK(tmp_sid, FAIL, "H5Scopy");
+
+ check = H5Sselect_shape_same(scalar_all_sid, tmp_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ ret = H5Sclose(tmp_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Compare against "all" selection */
+ check = H5Sselect_shape_same(scalar_all_sid, all_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "none" selection */
+ check = H5Sselect_shape_same(scalar_all_sid, none_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against single point selection */
+ check = H5Sselect_shape_same(scalar_all_sid, single_pt_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* Compare against multiple point selection */
+ check = H5Sselect_shape_same(scalar_all_sid, mult_pt_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "plain" single hyperslab selection */
+ check = H5Sselect_shape_same(scalar_all_sid, single_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "all" single hyperslab selection */
+ check = H5Sselect_shape_same(scalar_all_sid, single_hyper_all_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "single point" single hyperslab selection */
+ check = H5Sselect_shape_same(scalar_all_sid, single_hyper_pt_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* Compare against regular, strided hyperslab selection */
+ check = H5Sselect_shape_same(scalar_all_sid, regular_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against irregular hyperslab selection */
+ check = H5Sselect_shape_same(scalar_all_sid, irreg_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "no" hyperslab selection */
+ check = H5Sselect_shape_same(scalar_all_sid, none_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against scalar "none" hyperslab selection */
+ check = H5Sselect_shape_same(scalar_all_sid, scalar_none_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare scalar "none" dataspace with all selections created */
+
+ /* Compare against itself */
+ check = H5Sselect_shape_same(scalar_none_sid, scalar_none_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* Compare against copy of itself */
+ tmp_sid = H5Scopy(scalar_none_sid);
+ CHECK(tmp_sid, FAIL, "H5Scopy");
+
+ check = H5Sselect_shape_same(scalar_none_sid, tmp_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ ret = H5Sclose(tmp_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Compare against "all" selection */
+ check = H5Sselect_shape_same(scalar_none_sid, all_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "none" selection */
+ check = H5Sselect_shape_same(scalar_none_sid, none_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* Compare against single point selection */
+ check = H5Sselect_shape_same(scalar_none_sid, single_pt_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against multiple point selection */
+ check = H5Sselect_shape_same(scalar_none_sid, mult_pt_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "plain" single hyperslab selection */
+ check = H5Sselect_shape_same(scalar_none_sid, single_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "all" single hyperslab selection */
+ check = H5Sselect_shape_same(scalar_none_sid, single_hyper_all_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "single point" single hyperslab selection */
+ check = H5Sselect_shape_same(scalar_none_sid, single_hyper_pt_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against regular, strided hyperslab selection */
+ check = H5Sselect_shape_same(scalar_none_sid, regular_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against irregular hyperslab selection */
+ check = H5Sselect_shape_same(scalar_none_sid, irreg_hyper_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "no" hyperslab selection */
+ check = H5Sselect_shape_same(scalar_none_sid, none_hyper_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* Compare against scalar "all" hyperslab selection */
+ check = H5Sselect_shape_same(scalar_none_sid, scalar_all_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Close dataspaces */
+ ret = H5Sclose(all_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(none_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(single_pt_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(mult_pt_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(single_hyper_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(single_hyper_all_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(single_hyper_pt_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(regular_hyper_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(irreg_hyper_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(none_hyper_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(scalar_all_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(scalar_none_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_shape_same() */
+
+/****************************************************************
+**
+** test_shape_same_dr__smoke_check_1():
+**
+** Create a square, 2-D dataspace (10 X 10), and select
+** all of it.
+**
+** Similarly, create nine, 3-D dataspaces (10 X 10 X 10),
+** and select (10 X 10 X 1) hyperslabs in each, three with
+** the slab parallel to the xy plane, three parallel to the
+** xz plane, and three parallel to the yz plane.
+**
+** Assuming that z is the fastest changing dimension,
+** H5Sselect_shape_same() should return TRUE when comparing
+** the full 2-D space against any hyperslab parallel to the
+** yz plane in the 3-D space, and FALSE when comparing the
+** full 2-D space against the other two hyperslabs.
+**
+** Also create two additional 3-D dataspaces (10 X 10 X 10),
+** and select a (10 X 10 X 2) hyperslab parallel to the yz
+** axis in one of them, and two parallel (10 X 10 X 1) hyper
+** slabs parallel to the yz axis in the other.
+** H5Sselect_shape_same() should return FALSE when comparing
+** each to the 2-D selection.
+**
+****************************************************************/
+static void
+test_shape_same_dr__smoke_check_1(void)
+{
+ hid_t small_square_sid;
+ hid_t small_cube_xy_slice_0_sid;
+ hid_t small_cube_xy_slice_1_sid;
+ hid_t small_cube_xy_slice_2_sid;
+ hid_t small_cube_xz_slice_0_sid;
+ hid_t small_cube_xz_slice_1_sid;
+ hid_t small_cube_xz_slice_2_sid;
+ hid_t small_cube_yz_slice_0_sid;
+ hid_t small_cube_yz_slice_1_sid;
+ hid_t small_cube_yz_slice_2_sid;
+ hid_t small_cube_yz_slice_3_sid;
+ hid_t small_cube_yz_slice_4_sid;
+ hsize_t small_cube_dims[] = {10, 10, 10};
+ hsize_t start[3];
+ hsize_t stride[3];
+ hsize_t count[3];
+ hsize_t block[3];
+ htri_t check; /* Shape comparison return value */
+ herr_t ret; /* Generic return value */
+
+ MESSAGE(7, (" Smoke check 1: Slices through a cube.\n"));
+
+ /* Create the 10 x 10 dataspace */
+ small_square_sid = H5Screate_simple(2, small_cube_dims, NULL);
+ CHECK(small_square_sid, FAIL, "H5Screate_simple");
+
+ /* Create the 10 X 10 X 10 dataspaces for the hyperslab parallel to the xy axis */
+ small_cube_xy_slice_0_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_xy_slice_0_sid, FAIL, "H5Screate_simple");
+
+ small_cube_xy_slice_1_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_xy_slice_1_sid, FAIL, "H5Screate_simple");
+
+ small_cube_xy_slice_2_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_xy_slice_2_sid, FAIL, "H5Screate_simple");
+
+ start[0] = 0; /* x */
+ start[1] = 0; /* y */
+ start[2] = 0; /* z */
+
+ /* stride is a bit silly here, since we are only selecting a single */
+ /* contiguous plane, but include it anyway, with values large enough */
+ /* to ensure that we will only get the single block selected. */
+ stride[0] = 20; /* x */
+ stride[1] = 20; /* y */
+ stride[2] = 20; /* z */
+
+ count[0] = 1; /* x */
+ count[1] = 1; /* y */
+ count[2] = 1; /* z */
+
+ block[0] = 10; /* x */
+ block[1] = 10; /* y */
+ block[2] = 1; /* z */
+ ret = H5Sselect_hyperslab(small_cube_xy_slice_0_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[2] = 5;
+ ret = H5Sselect_hyperslab(small_cube_xy_slice_1_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[2] = 9;
+ ret = H5Sselect_hyperslab(small_cube_xy_slice_2_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create the 10 X 10 X 10 dataspaces for the hyperslab parallel to the xz axis */
+ small_cube_xz_slice_0_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_xz_slice_0_sid, FAIL, "H5Screate_simple");
+
+ small_cube_xz_slice_1_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_xz_slice_1_sid, FAIL, "H5Screate_simple");
+
+ small_cube_xz_slice_2_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_xz_slice_2_sid, FAIL, "H5Screate_simple");
+
+ start[0] = 0; /* x */
+ start[1] = 0; /* y */
+ start[2] = 0; /* z */
+
+ /* stride is a bit silly here, since we are only selecting a single */
+ /* contiguous chunk, but include it anyway, with values large enough */
+ /* to ensure that we will only get the single chunk. */
+ stride[0] = 20; /* x */
+ stride[1] = 20; /* y */
+ stride[2] = 20; /* z */
+
+ count[0] = 1; /* x */
+ count[1] = 1; /* y */
+ count[2] = 1; /* z */
+
+ block[0] = 10; /* x */
+ block[1] = 1; /* y */
+ block[2] = 10; /* z */
+ ret = H5Sselect_hyperslab(small_cube_xz_slice_0_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[1] = 4;
+ ret = H5Sselect_hyperslab(small_cube_xz_slice_1_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[1] = 9;
+ ret = H5Sselect_hyperslab(small_cube_xz_slice_2_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create the 10 X 10 X 10 dataspaces for the hyperslabs parallel to the yz axis */
+ small_cube_yz_slice_0_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_yz_slice_0_sid, FAIL, "H5Screate_simple");
+
+ small_cube_yz_slice_1_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_yz_slice_1_sid, FAIL, "H5Screate_simple");
+
+ small_cube_yz_slice_2_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_yz_slice_2_sid, FAIL, "H5Screate_simple");
+
+ small_cube_yz_slice_3_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_yz_slice_3_sid, FAIL, "H5Screate_simple");
+
+ small_cube_yz_slice_4_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_yz_slice_4_sid, FAIL, "H5Screate_simple");
+
+ start[0] = 0; /* x */
+ start[1] = 0; /* y */
+ start[2] = 0; /* z */
+
+ /* stride is a bit silly here, since we are only selecting a single */
+ /* contiguous chunk, but include it anyway, with values large enough */
+ /* to ensure that we will only get the single chunk. */
+ stride[0] = 20; /* x */
+ stride[1] = 20; /* y */
+ stride[2] = 20; /* z */
+
+ count[0] = 1; /* x */
+ count[1] = 1; /* y */
+ count[2] = 1; /* z */
+
+ block[0] = 1; /* x */
+ block[1] = 10; /* y */
+ block[2] = 10; /* z */
+
+ ret = H5Sselect_hyperslab(small_cube_yz_slice_0_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[0] = 4;
+ ret = H5Sselect_hyperslab(small_cube_yz_slice_1_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[0] = 9;
+ ret = H5Sselect_hyperslab(small_cube_yz_slice_2_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[0] = 4;
+ block[0] = 2;
+ ret = H5Sselect_hyperslab(small_cube_yz_slice_3_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[0] = 3;
+ block[0] = 1;
+ ret = H5Sselect_hyperslab(small_cube_yz_slice_4_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[0] = 6;
+ ret = H5Sselect_hyperslab(small_cube_yz_slice_4_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* setup is done -- run the tests: */
+
+ /* Compare against "xy" selection */
+ check = H5Sselect_shape_same(small_cube_xy_slice_0_sid, small_square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(small_cube_xy_slice_1_sid, small_square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(small_cube_xy_slice_2_sid, small_square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "xz" selection */
+ check = H5Sselect_shape_same(small_cube_xz_slice_0_sid, small_square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(small_cube_xz_slice_1_sid, small_square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(small_cube_xz_slice_2_sid, small_square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "yz" selection */
+ check = H5Sselect_shape_same(small_cube_yz_slice_0_sid, small_square_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(small_cube_yz_slice_1_sid, small_square_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(small_cube_yz_slice_2_sid, small_square_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(small_cube_yz_slice_3_sid, small_square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(small_cube_yz_slice_4_sid, small_square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Close dataspaces */
+ ret = H5Sclose(small_square_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_xy_slice_0_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_xy_slice_1_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_xy_slice_2_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_xz_slice_0_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_xz_slice_1_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_xz_slice_2_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_yz_slice_0_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_yz_slice_1_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_yz_slice_2_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_yz_slice_3_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_yz_slice_4_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+} /* test_shape_same_dr__smoke_check_1() */
+
+/****************************************************************
+**
+** test_shape_same_dr__smoke_check_2():
+**
+** Create a square, 2-D dataspace (10 X 10), and select
+** a "checker board" hyperslab as follows:
+**
+** * * - - * * - - * *
+** * * - - * * - - * *
+** - - * * - - * * - -
+** - - * * - - * * - -
+** * * - - * * - - * *
+** * * - - * * - - * *
+** - - * * - - * * - -
+** - - * * - - * * - -
+** * * - - * * - - * *
+** * * - - * * - - * *
+**
+** where asterisks indicate selected elements, and dashes
+** indicate unselected elements.
+**
+** Similarly, create nine, 3-D dataspaces (10 X 10 X 10),
+** and select similar (10 X 10 X 1) checker board hyper
+** slabs in each, three with the slab parallel to the xy
+** plane, three parallel to the xz plane, and three parallel
+** to the yz plane.
+**
+** Assuming that z is the fastest changing dimension,
+** H5Sselect_shape_same() should return TRUE when comparing
+** the 2-D space checker board selection against a checker
+** board hyperslab parallel to the yz plane in the 3-D
+** space, and FALSE when comparing the 2-D checkerboard
+** selection against two hyperslabs parallel to the xy
+** or xz planes.
+**
+** Also create an additional 3-D dataspaces (10 X 10 X 10),
+** and select a checker board parallel with the yz axis,
+** save with some squares being on different planes.
+** H5Sselect_shape_same() should return FALSE when
+** comparing this selection to the 2-D selection.
+**
+****************************************************************/
+static void
+test_shape_same_dr__smoke_check_2(void)
+{
+ hid_t small_square_sid;
+ hid_t small_cube_xy_slice_0_sid;
+ hid_t small_cube_xy_slice_1_sid;
+ hid_t small_cube_xy_slice_2_sid;
+ hid_t small_cube_xz_slice_0_sid;
+ hid_t small_cube_xz_slice_1_sid;
+ hid_t small_cube_xz_slice_2_sid;
+ hid_t small_cube_yz_slice_0_sid;
+ hid_t small_cube_yz_slice_1_sid;
+ hid_t small_cube_yz_slice_2_sid;
+ hid_t small_cube_yz_slice_3_sid;
+ hsize_t small_cube_dims[] = {10, 10, 10};
+ hsize_t start[3];
+ hsize_t stride[3];
+ hsize_t count[3];
+ hsize_t block[3];
+ htri_t check; /* Shape comparison return value */
+ herr_t ret; /* Generic return value */
+
+ MESSAGE(7, (" Smoke check 2: Checker board slices through a cube.\n"));
+
+ /* Create the 10 x 10 dataspace */
+ small_square_sid = H5Screate_simple(2, small_cube_dims, NULL);
+ CHECK(small_square_sid, FAIL, "H5Screate_simple");
+
+ start[0] = 0; /* x */
+ start[1] = 0; /* y */
+
+ stride[0] = 4; /* x */
+ stride[1] = 4; /* y */
+
+ count[0] = 3; /* x */
+ count[1] = 3; /* y */
+
+ block[0] = 2; /* x */
+ block[1] = 2; /* y */
+ ret = H5Sselect_hyperslab(small_square_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[0] = 2; /* x */
+ start[1] = 2; /* y */
+
+ stride[0] = 4; /* x */
+ stride[1] = 4; /* y */
+
+ count[0] = 2; /* x */
+ count[1] = 2; /* y */
+
+ block[0] = 2; /* x */
+ block[1] = 2; /* y */
+ ret = H5Sselect_hyperslab(small_square_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create the 10 X 10 X 10 dataspaces for the hyperslab parallel to the xy axis */
+ small_cube_xy_slice_0_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_xy_slice_0_sid, FAIL, "H5Screate_simple");
+
+ small_cube_xy_slice_1_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_xy_slice_1_sid, FAIL, "H5Screate_simple");
+
+ small_cube_xy_slice_2_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_xy_slice_2_sid, FAIL, "H5Screate_simple");
+
+ start[0] = 0; /* x */
+ start[1] = 0; /* y */
+ start[2] = 0; /* z */
+
+ stride[0] = 4; /* x */
+ stride[1] = 4; /* y */
+ stride[2] = 20; /* z -- large enough that there will only be one slice */
+
+ count[0] = 3; /* x */
+ count[1] = 3; /* y */
+ count[2] = 1; /* z */
+
+ block[0] = 2; /* x */
+ block[1] = 2; /* y */
+ block[2] = 1; /* z */
+ ret = H5Sselect_hyperslab(small_cube_xy_slice_0_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[2] = 3;
+ ret = H5Sselect_hyperslab(small_cube_xy_slice_1_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[2] = 9;
+ ret = H5Sselect_hyperslab(small_cube_xy_slice_2_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[0] = 2; /* x */
+ start[1] = 2; /* y */
+ start[2] = 0; /* z */
+
+ stride[0] = 4; /* x */
+ stride[1] = 4; /* y */
+ stride[2] = 20; /* z -- large enough that there will only be one slice */
+
+ count[0] = 2; /* x */
+ count[1] = 2; /* y */
+ count[2] = 1; /* z */
+
+ block[0] = 2; /* x */
+ block[1] = 2; /* y */
+ block[2] = 1; /* z */
+ ret = H5Sselect_hyperslab(small_cube_xy_slice_0_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[2] = 3;
+ ret = H5Sselect_hyperslab(small_cube_xy_slice_1_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[2] = 9;
+ ret = H5Sselect_hyperslab(small_cube_xy_slice_2_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create the 10 X 10 X 10 dataspaces for the hyperslab parallel to the xz axis */
+ small_cube_xz_slice_0_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_xz_slice_0_sid, FAIL, "H5Screate_simple");
+
+ small_cube_xz_slice_1_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_xz_slice_1_sid, FAIL, "H5Screate_simple");
+
+ small_cube_xz_slice_2_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_xz_slice_2_sid, FAIL, "H5Screate_simple");
+
+ start[0] = 0; /* x */
+ start[1] = 0; /* y */
+ start[2] = 0; /* z */
+
+ stride[0] = 4; /* x */
+ stride[1] = 20; /* y -- large enough that there will only be one slice */
+ stride[2] = 4; /* z */
+
+ count[0] = 3; /* x */
+ count[1] = 1; /* y */
+ count[2] = 3; /* z */
+
+ block[0] = 2; /* x */
+ block[1] = 1; /* y */
+ block[2] = 2; /* z */
+ ret = H5Sselect_hyperslab(small_cube_xz_slice_0_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[1] = 5;
+ ret = H5Sselect_hyperslab(small_cube_xz_slice_1_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[1] = 9;
+ ret = H5Sselect_hyperslab(small_cube_xz_slice_2_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[0] = 2; /* x */
+ start[1] = 0; /* y */
+ start[2] = 2; /* z */
+
+ stride[0] = 4; /* x */
+ stride[1] = 20; /* y -- large enough that there will only be one slice */
+ stride[2] = 4; /* z */
+
+ count[0] = 2; /* x */
+ count[1] = 1; /* y */
+ count[2] = 2; /* z */
+
+ block[0] = 2; /* x */
+ block[1] = 1; /* y */
+ block[2] = 2; /* z */
+ ret = H5Sselect_hyperslab(small_cube_xz_slice_0_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[1] = 5;
+ ret = H5Sselect_hyperslab(small_cube_xz_slice_1_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[1] = 9;
+ ret = H5Sselect_hyperslab(small_cube_xz_slice_2_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create the 10 X 10 X 10 dataspaces for the hyperslabs parallel to the yz axis */
+ small_cube_yz_slice_0_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_yz_slice_0_sid, FAIL, "H5Screate_simple");
+
+ small_cube_yz_slice_1_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_yz_slice_1_sid, FAIL, "H5Screate_simple");
+
+ small_cube_yz_slice_2_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_yz_slice_2_sid, FAIL, "H5Screate_simple");
+
+ small_cube_yz_slice_3_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_yz_slice_3_sid, FAIL, "H5Screate_simple");
+
+ start[0] = 0; /* x */
+ start[1] = 0; /* y */
+ start[2] = 0; /* z */
+
+ stride[0] = 20; /* x -- large enough that there will only be one slice */
+ stride[1] = 4; /* y */
+ stride[2] = 4; /* z */
+
+ count[0] = 1; /* x */
+ count[1] = 3; /* y */
+ count[2] = 3; /* z */
+
+ block[0] = 1; /* x */
+ block[1] = 2; /* y */
+ block[2] = 2; /* z */
+ ret = H5Sselect_hyperslab(small_cube_yz_slice_0_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[0] = 8;
+ ret = H5Sselect_hyperslab(small_cube_yz_slice_1_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[0] = 9;
+ ret = H5Sselect_hyperslab(small_cube_yz_slice_2_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[0] = 3;
+ ret = H5Sselect_hyperslab(small_cube_yz_slice_3_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[0] = 0; /* x */
+ start[1] = 2; /* y */
+ start[2] = 2; /* z */
+
+ stride[0] = 20; /* x -- large enough that there will only be one slice */
+ stride[1] = 4; /* y */
+ stride[2] = 4; /* z */
+
+ count[0] = 1; /* x */
+ count[1] = 2; /* y */
+ count[2] = 2; /* z */
+
+ block[0] = 1; /* x */
+ block[1] = 2; /* y */
+ block[2] = 2; /* z */
+ ret = H5Sselect_hyperslab(small_cube_yz_slice_0_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[0] = 8;
+ ret = H5Sselect_hyperslab(small_cube_yz_slice_1_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[0] = 9;
+ ret = H5Sselect_hyperslab(small_cube_yz_slice_2_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[0] = 4;
+ /* This test gets the right answer, but it fails the shape same
+ * test in an unexpected point. Bring this up with Quincey, as
+ * the oddness looks like it is not related to my code.
+ * -- JRM
+ */
+ ret = H5Sselect_hyperslab(small_cube_yz_slice_3_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* setup is done -- run the tests: */
+
+ /* Compare against "xy" selection */
+ check = H5Sselect_shape_same(small_cube_xy_slice_0_sid, small_square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(small_cube_xy_slice_1_sid, small_square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(small_cube_xy_slice_2_sid, small_square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "xz" selection */
+ check = H5Sselect_shape_same(small_cube_xz_slice_0_sid, small_square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(small_cube_xz_slice_1_sid, small_square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(small_cube_xz_slice_2_sid, small_square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "yz" selection */
+ check = H5Sselect_shape_same(small_cube_yz_slice_0_sid, small_square_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(small_cube_yz_slice_1_sid, small_square_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(small_cube_yz_slice_2_sid, small_square_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(small_cube_yz_slice_3_sid, small_square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Close dataspaces */
+ ret = H5Sclose(small_square_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_xy_slice_0_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_xy_slice_1_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_xy_slice_2_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_xz_slice_0_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_xz_slice_1_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_xz_slice_2_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_yz_slice_0_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_yz_slice_1_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_yz_slice_2_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_yz_slice_3_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+} /* test_shape_same_dr__smoke_check_2() */
+
+/****************************************************************
+**
+** test_shape_same_dr__smoke_check_3():
+**
+** Create a square, 2-D dataspace (10 X 10), and select an
+** irregular hyperslab as follows:
+**
+** y
+** 9 - - - - - - - - - -
+** 8 - - - - - - - - - -
+** 7 - - - * * * * - - -
+** 6 - - * * * * * - - -
+** 5 - - * * - - - - - -
+** 4 - - * * - * * - - -
+** 3 - - * * - * * - - -
+** 2 - - - - - - - - - -
+** 1 - - - - - - - - - -
+** 0 - - - - - - - - - -
+** 0 1 2 3 4 5 6 7 8 9 x
+**
+** where asterisks indicate selected elements, and dashes
+** indicate unselected elements.
+**
+** Similarly, create nine, 3-D dataspaces (10 X 10 X 10),
+** and select similar irregular hyperslabs in each, three
+** with the slab parallel to the xy plane, three parallel
+** to the xz plane, and three parallel to the yz plane.
+** Further, translate the irregular slab in 2/3rds of the
+** cases.
+**
+** Assuming that z is the fastest changing dimension,
+** H5Sselect_shape_same() should return TRUE when
+** comparing the 2-D irregular hyperslab selection
+** against the irregular hyperslab selections parallel
+** to the yz plane in the 3-D space, and FALSE when
+** comparing it against the irregular hyperslabs
+** selections parallel to the xy or xz planes.
+**
+****************************************************************/
+static void
+test_shape_same_dr__smoke_check_3(void)
+{
+ hid_t small_square_sid;
+ hid_t small_cube_xy_slice_0_sid;
+ hid_t small_cube_xy_slice_1_sid;
+ hid_t small_cube_xy_slice_2_sid;
+ hid_t small_cube_xz_slice_0_sid;
+ hid_t small_cube_xz_slice_1_sid;
+ hid_t small_cube_xz_slice_2_sid;
+ hid_t small_cube_yz_slice_0_sid;
+ hid_t small_cube_yz_slice_1_sid;
+ hid_t small_cube_yz_slice_2_sid;
+ hsize_t small_cube_dims[] = {10, 10, 10};
+ hsize_t start[3];
+ hsize_t stride[3];
+ hsize_t count[3];
+ hsize_t block[3];
+ htri_t check; /* Shape comparison return value */
+ herr_t ret; /* Generic return value */
+
+ MESSAGE(7, (" Smoke check 3: Offset subsets of slices through a cube.\n"));
+
+ /* Create the 10 x 10 dataspace */
+ small_square_sid = H5Screate_simple(2, small_cube_dims, NULL);
+ CHECK(small_square_sid, FAIL, "H5Screate_simple");
+
+ start[0] = 2; /* x */
+ start[1] = 3; /* y */
+
+ stride[0] = 20; /* x */
+ stride[1] = 20; /* y */
+
+ count[0] = 1; /* x */
+ count[1] = 1; /* y */
+
+ block[0] = 2; /* x */
+ block[1] = 4; /* y */
+ ret = H5Sselect_hyperslab(small_square_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[0] = 3; /* x */
+ start[1] = 6; /* y */
+
+ stride[0] = 20; /* x */
+ stride[1] = 20; /* y */
+
+ count[0] = 1; /* x */
+ count[1] = 1; /* y */
+
+ block[0] = 4; /* x */
+ block[1] = 2; /* y */
+ ret = H5Sselect_hyperslab(small_square_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[0] = 5; /* x */
+ start[1] = 3; /* y */
+
+ stride[0] = 20; /* x */
+ stride[1] = 20; /* y */
+
+ count[0] = 1; /* x */
+ count[1] = 1; /* y */
+
+ block[0] = 2; /* x */
+ block[1] = 2; /* y */
+ ret = H5Sselect_hyperslab(small_square_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create the 10 X 10 X 10 dataspaces for the hyperslab parallel to the xy axis */
+ small_cube_xy_slice_0_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_xy_slice_0_sid, FAIL, "H5Screate_simple");
+
+ small_cube_xy_slice_1_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_xy_slice_1_sid, FAIL, "H5Screate_simple");
+
+ small_cube_xy_slice_2_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_xy_slice_2_sid, FAIL, "H5Screate_simple");
+
+ start[0] = 2; /* x */
+ start[1] = 3; /* y */
+ start[2] = 5; /* z */
+
+ stride[0] = 20; /* x */
+ stride[1] = 20; /* y */
+ stride[2] = 20; /* z */
+
+ count[0] = 1; /* x */
+ count[1] = 1; /* y */
+ count[2] = 1; /* z */
+
+ block[0] = 2; /* x */
+ block[1] = 4; /* y */
+ block[2] = 1; /* z */
+ ret = H5Sselect_hyperslab(small_cube_xy_slice_0_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* move the starting point to the origin */
+ start[0] -= 1; /* x */
+ start[1] -= 2; /* y */
+ ret = H5Sselect_hyperslab(small_cube_xy_slice_1_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* move the irregular selection to the upper right hand corner */
+ start[0] += 5; /* x */
+ start[1] += 5; /* y */
+ ret = H5Sselect_hyperslab(small_cube_xy_slice_2_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[0] = 3; /* x */
+ start[1] = 6; /* y */
+ start[2] = 5; /* z */
+
+ stride[0] = 20; /* x */
+ stride[1] = 20; /* y */
+ stride[2] = 20; /* z */
+
+ count[0] = 1; /* x */
+ count[1] = 1; /* y */
+ count[2] = 1; /* z */
+
+ block[0] = 4; /* x */
+ block[1] = 2; /* y */
+ block[2] = 1; /* z */
+ ret = H5Sselect_hyperslab(small_cube_xy_slice_0_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* move the starting point to the origin */
+ start[0] -= 1; /* x */
+ start[1] -= 2; /* y */
+ ret = H5Sselect_hyperslab(small_cube_xy_slice_1_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* move the irregular selection to the upper right hand corner */
+ start[0] += 5; /* x */
+ start[1] += 5; /* y */
+ ret = H5Sselect_hyperslab(small_cube_xy_slice_2_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[0] = 5; /* x */
+ start[1] = 3; /* y */
+ start[2] = 5; /* z */
+
+ stride[0] = 20; /* x */
+ stride[1] = 20; /* y */
+ stride[2] = 20; /* z */
+
+ count[0] = 1; /* x */
+ count[1] = 1; /* y */
+ count[2] = 1; /* z */
+
+ block[0] = 2; /* x */
+ block[1] = 2; /* y */
+ block[2] = 1; /* z */
+ ret = H5Sselect_hyperslab(small_cube_xy_slice_0_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* move the starting point to the origin */
+ start[0] -= 1; /* x */
+ start[1] -= 2; /* y */
+ ret = H5Sselect_hyperslab(small_cube_xy_slice_1_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* move the irregular selection to the upper right hand corner */
+ start[0] += 5; /* x */
+ start[1] += 5; /* y */
+ ret = H5Sselect_hyperslab(small_cube_xy_slice_2_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create the 10 X 10 X 10 dataspaces for the hyperslab parallel to the xz axis */
+ small_cube_xz_slice_0_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_xz_slice_0_sid, FAIL, "H5Screate_simple");
+
+ small_cube_xz_slice_1_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_xz_slice_1_sid, FAIL, "H5Screate_simple");
+
+ small_cube_xz_slice_2_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_xz_slice_2_sid, FAIL, "H5Screate_simple");
+
+ start[0] = 2; /* x */
+ start[1] = 5; /* y */
+ start[2] = 3; /* z */
+
+ stride[0] = 20; /* x */
+ stride[1] = 20; /* y */
+ stride[2] = 20; /* z */
+
+ count[0] = 1; /* x */
+ count[1] = 1; /* y */
+ count[2] = 1; /* z */
+
+ block[0] = 2; /* x */
+ block[1] = 1; /* y */
+ block[2] = 4; /* z */
+ ret = H5Sselect_hyperslab(small_cube_xz_slice_0_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* move the starting point to the origin */
+ start[0] -= 1; /* x */
+ start[2] -= 2; /* y */
+ ret = H5Sselect_hyperslab(small_cube_xz_slice_1_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* move the irregular selection to the upper right hand corner */
+ start[0] += 5; /* x */
+ start[2] += 5; /* y */
+ ret = H5Sselect_hyperslab(small_cube_xz_slice_2_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[0] = 3; /* x */
+ start[1] = 5; /* y */
+ start[2] = 6; /* z */
+
+ stride[0] = 20; /* x */
+ stride[1] = 20; /* y */
+ stride[2] = 20; /* z */
+
+ count[0] = 1; /* x */
+ count[1] = 1; /* y */
+ count[2] = 1; /* z */
+
+ block[0] = 4; /* x */
+ block[1] = 1; /* y */
+ block[2] = 2; /* z */
+ ret = H5Sselect_hyperslab(small_cube_xz_slice_0_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* move the starting point to the origin */
+ start[0] -= 1; /* x */
+ start[2] -= 2; /* y */
+ ret = H5Sselect_hyperslab(small_cube_xz_slice_1_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* move the irregular selection to the upper right hand corner */
+ start[0] += 5; /* x */
+ start[2] += 5; /* y */
+ ret = H5Sselect_hyperslab(small_cube_xz_slice_2_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[0] = 5; /* x */
+ start[1] = 5; /* y */
+ start[2] = 3; /* z */
+
+ stride[0] = 20; /* x */
+ stride[1] = 20; /* y */
+ stride[2] = 20; /* z */
+
+ count[0] = 1; /* x */
+ count[1] = 1; /* y */
+ count[2] = 1; /* z */
+
+ block[0] = 2; /* x */
+ block[1] = 1; /* y */
+ block[2] = 2; /* z */
+ ret = H5Sselect_hyperslab(small_cube_xz_slice_0_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* move the starting point to the origin */
+ start[0] -= 1; /* x */
+ start[2] -= 2; /* y */
+ ret = H5Sselect_hyperslab(small_cube_xz_slice_1_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* move the irregular selection to the upper right hand corner */
+ start[0] += 5; /* x */
+ start[2] += 5; /* y */
+ ret = H5Sselect_hyperslab(small_cube_xz_slice_2_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* QAK: Start here.
+ */
+ /* Create the 10 X 10 X 10 dataspaces for the hyperslabs parallel to the yz axis */
+ small_cube_yz_slice_0_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_yz_slice_0_sid, FAIL, "H5Screate_simple");
+
+ small_cube_yz_slice_1_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_yz_slice_1_sid, FAIL, "H5Screate_simple");
+
+ small_cube_yz_slice_2_sid = H5Screate_simple(3, small_cube_dims, NULL);
+ CHECK(small_cube_yz_slice_2_sid, FAIL, "H5Screate_simple");
+
+ start[0] = 8; /* x */
+ start[1] = 2; /* y */
+ start[2] = 3; /* z */
+
+ stride[0] = 20; /* x -- large enough that there will only be one slice */
+ stride[1] = 20; /* y */
+ stride[2] = 20; /* z */
+
+ count[0] = 1; /* x */
+ count[1] = 1; /* y */
+ count[2] = 1; /* z */
+
+ block[0] = 1; /* x */
+ block[1] = 2; /* y */
+ block[2] = 4; /* z */
+ ret = H5Sselect_hyperslab(small_cube_yz_slice_0_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* move the starting point to the origin */
+ start[1] -= 1; /* x */
+ start[2] -= 2; /* y */
+ ret = H5Sselect_hyperslab(small_cube_yz_slice_1_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* move the irregular selection to the upper right hand corner */
+ start[0] += 5; /* x */
+ start[2] += 5; /* y */
+ ret = H5Sselect_hyperslab(small_cube_yz_slice_2_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[0] = 8; /* x */
+ start[1] = 3; /* y */
+ start[2] = 6; /* z */
+
+ stride[0] = 20; /* x */
+ stride[1] = 20; /* y */
+ stride[2] = 20; /* z */
+
+ count[0] = 1; /* x */
+ count[1] = 1; /* y */
+ count[2] = 1; /* z */
+
+ block[0] = 1; /* x */
+ block[1] = 4; /* y */
+ block[2] = 2; /* z */
+ ret = H5Sselect_hyperslab(small_cube_yz_slice_0_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* move the starting point to the origin */
+ start[1] -= 1; /* x */
+ start[2] -= 2; /* y */
+ ret = H5Sselect_hyperslab(small_cube_yz_slice_1_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* move the irregular selection to the upper right hand corner */
+ start[0] += 5; /* x */
+ start[2] += 5; /* y */
+ ret = H5Sselect_hyperslab(small_cube_yz_slice_2_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[0] = 8; /* x */
+ start[1] = 5; /* y */
+ start[2] = 3; /* z */
+
+ stride[0] = 20; /* x */
+ stride[1] = 20; /* y */
+ stride[2] = 20; /* z */
+
+ count[0] = 1; /* x */
+ count[1] = 1; /* y */
+ count[2] = 1; /* z */
+
+ block[0] = 1; /* x */
+ block[1] = 2; /* y */
+ block[2] = 2; /* z */
+ ret = H5Sselect_hyperslab(small_cube_yz_slice_0_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* move the starting point to the origin */
+ start[1] -= 1; /* x */
+ start[2] -= 2; /* y */
+ ret = H5Sselect_hyperslab(small_cube_yz_slice_1_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* move the irregular selection to the upper right hand corner */
+ start[0] += 5; /* x */
+ start[2] += 5; /* y */
+ ret = H5Sselect_hyperslab(small_cube_yz_slice_2_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* setup is done -- run the tests: */
+
+ /* Compare against "xy" selection */
+ check = H5Sselect_shape_same(small_cube_xy_slice_0_sid, small_square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(small_cube_xy_slice_1_sid, small_square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(small_cube_xy_slice_2_sid, small_square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "xz" selection */
+ check = H5Sselect_shape_same(small_cube_xz_slice_0_sid, small_square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(small_cube_xz_slice_1_sid, small_square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(small_cube_xz_slice_2_sid, small_square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Compare against "yz" selection */
+ check = H5Sselect_shape_same(small_cube_yz_slice_0_sid, small_square_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(small_cube_yz_slice_1_sid, small_square_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(small_cube_yz_slice_2_sid, small_square_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ /* Close dataspaces */
+ ret = H5Sclose(small_square_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_xy_slice_0_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_xy_slice_1_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_xy_slice_2_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_xz_slice_0_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_xz_slice_1_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_xz_slice_2_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_yz_slice_0_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_yz_slice_1_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_cube_yz_slice_2_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_shape_same_dr__smoke_check_3() */
+
+/****************************************************************
+**
+** test_shape_same_dr__smoke_check_4():
+**
+** Create a square, 2-D dataspace (10 X 10), and select
+** the entire space.
+**
+** Similarly, create 3-D and 4-D dataspaces:
+**
+** (1 X 10 X 10)
+** (10 X 1 X 10)
+** (10 X 10 X 1)
+** (10 X 10 X 10)
+**
+** (1 X 1 X 10 X 10)
+** (1 X 10 X 1 X 10)
+** (1 X 10 X 10 X 1)
+** (10 X 1 X 1 X 10)
+** (10 X 1 X 10 X 1)
+** (10 X 10 X 1 X 1)
+** (10 X 1 X 10 X 10)
+**
+** And select these entire spaces as well.
+**
+** Compare the 2-D space against all the other spaces
+** with H5Sselect_shape_same(). The (1 X 10 X 10) &
+** (1 X 1 X 10 X 10) should return TRUE. All others
+** should return FALSE.
+**
+****************************************************************/
+static void
+test_shape_same_dr__smoke_check_4(void)
+{
+ hid_t square_sid;
+ hid_t three_d_space_0_sid;
+ hid_t three_d_space_1_sid;
+ hid_t three_d_space_2_sid;
+ hid_t three_d_space_3_sid;
+ hid_t four_d_space_0_sid;
+ hid_t four_d_space_1_sid;
+ hid_t four_d_space_2_sid;
+ hid_t four_d_space_3_sid;
+ hid_t four_d_space_4_sid;
+ hid_t four_d_space_5_sid;
+ hid_t four_d_space_6_sid;
+ hsize_t dims[] = {10, 10, 10, 10};
+ htri_t check; /* Shape comparison return value */
+ herr_t ret; /* Generic return value */
+
+ MESSAGE(7, (" Smoke check 4: Spaces of different dimension but same size.\n"));
+
+ /* Create the 10 x 10 dataspace */
+ square_sid = H5Screate_simple(2, dims, NULL);
+ CHECK(square_sid, FAIL, "H5Screate_simple");
+
+ /* create (1 X 10 X 10) dataspace */
+ dims[0] = 1;
+ dims[1] = 10;
+ dims[2] = 10;
+ three_d_space_0_sid = H5Screate_simple(3, dims, NULL);
+ CHECK(three_d_space_0_sid, FAIL, "H5Screate_simple");
+
+ /* create (10 X 1 X 10) dataspace */
+ dims[0] = 10;
+ dims[1] = 1;
+ dims[2] = 10;
+ three_d_space_1_sid = H5Screate_simple(3, dims, NULL);
+ CHECK(three_d_space_1_sid, FAIL, "H5Screate_simple");
+
+ /* create (10 X 10 X 1) dataspace */
+ dims[0] = 10;
+ dims[1] = 10;
+ dims[2] = 1;
+ three_d_space_2_sid = H5Screate_simple(3, dims, NULL);
+ CHECK(three_d_space_2_sid, FAIL, "H5Screate_simple");
+
+ /* create (10 X 10 X 10) dataspace */
+ dims[0] = 10;
+ dims[1] = 10;
+ dims[2] = 10;
+ three_d_space_3_sid = H5Screate_simple(3, dims, NULL);
+ CHECK(three_d_space_3_sid, FAIL, "H5Screate_simple");
+
+ /* create (1 X 1 X 10 X 10) dataspace */
+ dims[0] = 1;
+ dims[1] = 1;
+ dims[2] = 10;
+ dims[3] = 10;
+ four_d_space_0_sid = H5Screate_simple(4, dims, NULL);
+ CHECK(four_d_space_0_sid, FAIL, "H5Screate_simple");
+
+ /* create (1 X 10 X 1 X 10) dataspace */
+ dims[0] = 1;
+ dims[1] = 10;
+ dims[2] = 1;
+ dims[3] = 10;
+ four_d_space_1_sid = H5Screate_simple(4, dims, NULL);
+ CHECK(four_d_space_1_sid, FAIL, "H5Screate_simple");
+
+ /* create (1 X 10 X 10 X 1) dataspace */
+ dims[0] = 1;
+ dims[1] = 10;
+ dims[2] = 10;
+ dims[3] = 1;
+ four_d_space_2_sid = H5Screate_simple(4, dims, NULL);
+ CHECK(four_d_space_2_sid, FAIL, "H5Screate_simple");
+
+ /* create (10 X 1 X 1 X 10) dataspace */
+ dims[0] = 10;
+ dims[1] = 1;
+ dims[2] = 1;
+ dims[3] = 10;
+ four_d_space_3_sid = H5Screate_simple(4, dims, NULL);
+ CHECK(four_d_space_3_sid, FAIL, "H5Screate_simple");
+
+ /* create (10 X 1 X 10 X 1) dataspace */
+ dims[0] = 10;
+ dims[1] = 1;
+ dims[2] = 10;
+ dims[3] = 1;
+ four_d_space_4_sid = H5Screate_simple(4, dims, NULL);
+ CHECK(four_d_space_4_sid, FAIL, "H5Screate_simple");
+
+ /* create (10 X 10 X 1 X 1) dataspace */
+ dims[0] = 10;
+ dims[1] = 10;
+ dims[2] = 1;
+ dims[3] = 1;
+ four_d_space_5_sid = H5Screate_simple(4, dims, NULL);
+ CHECK(four_d_space_5_sid, FAIL, "H5Screate_simple");
+
+ /* create (10 X 1 X 10 X 10) dataspace */
+ dims[0] = 10;
+ dims[1] = 1;
+ dims[2] = 10;
+ dims[3] = 10;
+ four_d_space_6_sid = H5Screate_simple(4, dims, NULL);
+ CHECK(four_d_space_6_sid, FAIL, "H5Screate_simple");
+
+ /* setup is done -- run the tests: */
+
+ check = H5Sselect_shape_same(three_d_space_0_sid, square_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(three_d_space_1_sid, square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(three_d_space_2_sid, square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(three_d_space_3_sid, square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(four_d_space_0_sid, square_sid);
+ VERIFY(check, TRUE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(four_d_space_1_sid, square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(four_d_space_2_sid, square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(four_d_space_3_sid, square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(four_d_space_4_sid, square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(four_d_space_5_sid, square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ check = H5Sselect_shape_same(four_d_space_6_sid, square_sid);
+ VERIFY(check, FALSE, "H5Sselect_shape_same");
+
+ /* Close dataspaces */
+ ret = H5Sclose(square_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(three_d_space_0_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(three_d_space_1_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(three_d_space_2_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(three_d_space_3_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(four_d_space_0_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(four_d_space_1_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(four_d_space_2_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(four_d_space_3_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(four_d_space_4_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(four_d_space_5_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(four_d_space_6_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_shape_same_dr__smoke_check_4() */
+
+/****************************************************************
+**
+** test_shape_same_dr__full_space_vs_slice(): Tests selection
+** of a full n-cube dataspace vs an n-dimensional slice of
+** of an m-cube (m > n) in a call to H5Sselect_shape_same().
+** Note that this test does not require the n-cube and the
+** n-dimensional slice to have the same rank (although
+** H5Sselect_shape_same() should always return FALSE if
+** they don't).
+**
+** Per Quincey's suggestion, only test up to 5 dimensional
+** spaces.
+**
+****************************************************************/
+static void
+test_shape_same_dr__full_space_vs_slice(int test_num, int small_rank, int large_rank, int offset,
+ hsize_t edge_size, hbool_t dim_selected[], hbool_t expected_result)
+{
+ char test_desc_0[128];
+ char test_desc_1[256];
+ int i;
+ hid_t n_cube_0_sid; /* the fully selected hyper cube */
+ hid_t n_cube_1_sid; /* the hyper cube in which a slice is selected */
+ hsize_t dims[SS_DR_MAX_RANK];
+ hsize_t start[SS_DR_MAX_RANK];
+ hsize_t *start_ptr;
+ hsize_t stride[SS_DR_MAX_RANK];
+ hsize_t *stride_ptr;
+ hsize_t count[SS_DR_MAX_RANK];
+ hsize_t *count_ptr;
+ hsize_t block[SS_DR_MAX_RANK];
+ hsize_t *block_ptr;
+ htri_t check; /* Shape comparison return value */
+ herr_t ret; /* Generic return value */
+
+ HDassert(0 < small_rank);
+ HDassert(small_rank <= large_rank);
+ HDassert(large_rank <= SS_DR_MAX_RANK);
+ HDassert(0 <= offset);
+ HDassert(offset < large_rank);
+ HDassert(edge_size > 0);
+ HDassert(edge_size <= 1000);
+
+ HDsnprintf(test_desc_0, sizeof(test_desc_0), "\tn-cube slice through m-cube (n <= m) test %d.\n",
+ test_num);
+ MESSAGE(7, ("%s", test_desc_0));
+
+ /* This statement must be updated if SS_DR_MAX_RANK is changed */
+ HDsnprintf(test_desc_1, sizeof(test_desc_1),
+ "\t\tranks: %d/%d offset: %d dim_selected: %d/%d/%d/%d/%d.\n", small_rank, large_rank, offset,
+ (int)dim_selected[0], (int)dim_selected[1], (int)dim_selected[2], (int)dim_selected[3],
+ (int)dim_selected[4]);
+ MESSAGE(7, ("%s", test_desc_1));
+
+ /* copy the edge size into the dims array */
+ for (i = 0; i < SS_DR_MAX_RANK; i++)
+ dims[i] = edge_size;
+
+ /* Create the small n-cube */
+ n_cube_0_sid = H5Screate_simple(small_rank, dims, NULL);
+ CHECK(n_cube_0_sid, FAIL, "H5Screate_simple");
+
+ /* Create the large n-cube */
+ n_cube_1_sid = H5Screate_simple(large_rank, dims, NULL);
+ CHECK(n_cube_1_sid, FAIL, "H5Screate_simple");
+
+ /* set up start, stride, count, and block for the hyperslab selection */
+ for (i = 0; i < SS_DR_MAX_RANK; i++) {
+ stride[i] = 2 * edge_size; /* a bit silly in this case */
+ count[i] = 1;
+ if (dim_selected[i]) {
+ start[i] = 0;
+ block[i] = edge_size;
+ }
+ else {
+ start[i] = (hsize_t)offset;
+ block[i] = 1;
+ }
+ }
+
+ /* since large rank may be less than SS_DR_MAX_RANK, we may not
+ * use the entire start, stride, count, and block arrays. This
+ * is a problem, since it is inconvenient to set up the dim_selected
+ * array to reflect the large rank, and thus if large_rank <
+ * SS_DR_MAX_RANK, we need to hide the lower index entries
+ * from H5Sselect_hyperslab().
+ *
+ * Do this by setting up pointers to the first valid entry in start,
+ * stride, count, and block below, and pass these pointers in
+ * to H5Sselect_hyperslab() instead of the array base addresses.
+ */
+
+ i = SS_DR_MAX_RANK - large_rank;
+ HDassert(i >= 0);
+
+ start_ptr = &(start[i]);
+ stride_ptr = &(stride[i]);
+ count_ptr = &(count[i]);
+ block_ptr = &(block[i]);
+
+ /* select the hyperslab */
+ ret = H5Sselect_hyperslab(n_cube_1_sid, H5S_SELECT_SET, start_ptr, stride_ptr, count_ptr, block_ptr);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* setup is done -- run the test: */
+ check = H5Sselect_shape_same(n_cube_0_sid, n_cube_1_sid);
+ VERIFY(check, expected_result, "H5Sselect_shape_same");
+
+ /* Close dataspaces */
+ ret = H5Sclose(n_cube_0_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(n_cube_1_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_shape_same_dr__full_space_vs_slice() */
+
+/****************************************************************
+**
+** test_shape_same_dr__run_full_space_vs_slice_tests():
+**
+** Run the test_shape_same_dr__full_space_vs_slice() test
+** over a variety of ranks and offsets.
+**
+** At present, we test H5Sselect_shape_same() with
+** fully selected 1, 2, 3, and 4 cubes as one parameter, and
+** 1, 2, 3, and 4 dimensional slices through a n-cube of rank
+** no more than 5 (and at least the rank of the slice).
+** We stop at rank 5, as Quincey suggested that it would be
+** sufficient.
+**
+** All the n-cubes will have lengths of the same size, so
+** H5Sselect_shape_same() should return true iff:
+**
+** 1) the rank for the fully selected n cube equals the
+** number of dimensions selected in the slice through the
+** m-cube (m >= n).
+**
+** 2) The dimensions selected in the slice through the m-cube
+** are the dimensions with the most quickly changing
+** indices.
+**
+****************************************************************/
+static void
+test_shape_same_dr__run_full_space_vs_slice_tests(void)
+{
+ hbool_t dim_selected[5];
+ hbool_t expected_result;
+ int i, j;
+ int v, w, x, y, z;
+ int test_num = 0;
+ int small_rank;
+ int large_rank;
+ hsize_t edge_size = 10;
+
+ for (large_rank = 1; large_rank <= 5; large_rank++) {
+ for (small_rank = 1; small_rank <= large_rank; small_rank++) {
+ v = 0;
+ do {
+ if (v == 0)
+ dim_selected[0] = FALSE;
+ else
+ dim_selected[0] = TRUE;
+
+ w = 0;
+ do {
+ if (w == 0)
+ dim_selected[1] = FALSE;
+ else
+ dim_selected[1] = TRUE;
+
+ x = 0;
+ do {
+ if (x == 0)
+ dim_selected[2] = FALSE;
+ else
+ dim_selected[2] = TRUE;
+
+ y = 0;
+ do {
+ if (y == 0)
+ dim_selected[3] = FALSE;
+ else
+ dim_selected[3] = TRUE;
+
+ z = 0;
+ do {
+ if (z == 0)
+ dim_selected[4] = FALSE;
+ else
+ dim_selected[4] = TRUE;
+
+ /* compute the expected result: */
+ i = 0;
+ j = 4;
+ expected_result = TRUE;
+ while ((i < small_rank) && expected_result) {
+ if (!dim_selected[j])
+ expected_result = FALSE;
+ i++;
+ j--;
+ }
+
+ while ((i < large_rank) && expected_result) {
+ if (dim_selected[j])
+ expected_result = FALSE;
+ i++;
+ j--;
+ }
+
+ /* everything is set up -- run the tests */
+
+ test_shape_same_dr__full_space_vs_slice(test_num++, small_rank, large_rank, 0,
+ edge_size, dim_selected,
+ expected_result);
+
+ test_shape_same_dr__full_space_vs_slice(test_num++, small_rank, large_rank,
+ large_rank / 2, edge_size,
+ dim_selected, expected_result);
+
+ test_shape_same_dr__full_space_vs_slice(test_num++, small_rank, large_rank,
+ large_rank - 1, edge_size,
+ dim_selected, expected_result);
+
+ z++;
+ } while ((z < 2) && (large_rank >= 1));
+
+ y++;
+ } while ((y < 2) && (large_rank >= 2));
+
+ x++;
+ } while ((x < 2) && (large_rank >= 3));
+
+ w++;
+ } while ((w < 2) && (large_rank >= 4));
+
+ v++;
+ } while ((v < 2) && (large_rank >= 5));
+ } /* end for */
+ } /* end for */
+} /* test_shape_same_dr__run_full_space_vs_slice_tests() */
+
+/****************************************************************
+**
+** test_shape_same_dr__checkerboard(): Tests selection of a
+** "checker board" subset of a full n-cube dataspace vs
+** a "checker board" n-dimensional slice of an m-cube (m > n).
+** in a call to H5Sselect_shape_same().
+**
+** Note that this test does not require the n-cube and the
+** n-dimensional slice to have the same rank (although
+** H5Sselect_shape_same() should always return FALSE if
+** they don't).
+**
+** Per Quincey's suggestion, only test up to 5 dimensional
+** spaces.
+**
+****************************************************************/
+static void
+test_shape_same_dr__checkerboard(int test_num, int small_rank, int large_rank, int offset, hsize_t edge_size,
+ hsize_t checker_size, hbool_t dim_selected[], hbool_t expected_result)
+{
+ char test_desc_0[128];
+ char test_desc_1[256];
+ int i;
+ int dims_selected = 0;
+ hid_t n_cube_0_sid; /* the checker board selected
+ * hyper cube
+ */
+ hid_t n_cube_1_sid; /* the hyper cube in which a
+ * checkerboard slice is selected
+ */
+ hsize_t dims[SS_DR_MAX_RANK];
+ hsize_t base_start[2];
+ hsize_t start[SS_DR_MAX_RANK];
+ hsize_t *start_ptr;
+ hsize_t base_stride[2];
+ hsize_t stride[SS_DR_MAX_RANK];
+ hsize_t *stride_ptr;
+ hsize_t base_count[2];
+ hsize_t count[SS_DR_MAX_RANK];
+ hsize_t *count_ptr;
+ hsize_t base_block[2];
+ hsize_t block[SS_DR_MAX_RANK];
+ hsize_t *block_ptr;
+ htri_t check; /* Shape comparison return value */
+ herr_t ret; /* Generic return value */
+
+ HDassert(0 < small_rank);
+ HDassert(small_rank <= large_rank);
+ HDassert(large_rank <= SS_DR_MAX_RANK);
+ HDassert(0 < checker_size);
+ HDassert(checker_size <= edge_size);
+ HDassert(edge_size <= 1000);
+ HDassert(0 <= offset);
+ HDassert(offset < (int)edge_size);
+
+ for (i = SS_DR_MAX_RANK - large_rank; i < SS_DR_MAX_RANK; i++)
+ if (dim_selected[i] == TRUE)
+ dims_selected++;
+
+ HDassert(dims_selected >= 0);
+ HDassert(dims_selected <= large_rank);
+
+ HDsnprintf(test_desc_0, sizeof(test_desc_0),
+ "\tcheckerboard n-cube slice through m-cube (n <= m) test %d.\n", test_num);
+ MESSAGE(7, ("%s", test_desc_0));
+
+ /* This statement must be updated if SS_DR_MAX_RANK is changed */
+ HDsnprintf(test_desc_1, sizeof(test_desc_1),
+ "\tranks: %d/%d edge/chkr size: %d/%d offset: %d dim_selected: %d/%d/%d/%d/%d:%d.\n",
+ small_rank, large_rank, (int)edge_size, (int)checker_size, offset, (int)dim_selected[0],
+ (int)dim_selected[1], (int)dim_selected[2], (int)dim_selected[3], (int)dim_selected[4],
+ dims_selected);
+ MESSAGE(7, ("%s", test_desc_1));
+
+ /* copy the edge size into the dims array */
+ for (i = 0; i < SS_DR_MAX_RANK; i++)
+ dims[i] = edge_size;
+
+ /* Create the small n-cube */
+ n_cube_0_sid = H5Screate_simple(small_rank, dims, NULL);
+ CHECK(n_cube_0_sid, FAIL, "H5Screate_simple");
+
+ /* Select a "checkerboard" pattern in the small n-cube.
+ *
+ * In the 1-D case, the "checkerboard" would look like this:
+ *
+ * * * - - * * - - * *
+ *
+ * and in the 2-D case, it would look like this:
+ *
+ * * * - - * * - - * *
+ * * * - - * * - - * *
+ * - - * * - - * * - -
+ * - - * * - - * * - -
+ * * * - - * * - - * *
+ * * * - - * * - - * *
+ * - - * * - - * * - -
+ * - - * * - - * * - -
+ * * * - - * * - - * *
+ * * * - - * * - - * *
+ *
+ * In both cases, asterisks indicate selected elements,
+ * and dashes indicate unselected elements.
+ *
+ * 3-D and 4-D ascii art is somewhat painful, so I'll
+ * leave those selections to your imagination. :-)
+ *
+ * Note, that since the edge_size and checker_size are
+ * parameters that are passed in, the selection need
+ * not look exactly like the selection shown above.
+ * At present, the function allows checker sizes that
+ * are not even divisors of the edge size -- thus
+ * something like the following is also possible:
+ *
+ * * * * - - - * * * -
+ * * * * - - - * * * -
+ * * * * - - - * * * -
+ * - - - * * * - - - *
+ * - - - * * * - - - *
+ * - - - * * * - - - *
+ * * * * - - - * * * -
+ * * * * - - - * * * -
+ * * * * - - - * * * -
+ * - - - * * * - - - *
+ *
+ * As the above pattern can't be selected in one
+ * call to H5Sselect_hyperslab(), and since the
+ * values in the start, stride, count, and block
+ * arrays will be repeated over all entries in
+ * the selected space case, and over all selected
+ * dimensions in the selected hyperslab case, we
+ * compute these values first and store them in
+ * in the base_start, base_stride, base_count,
+ * and base_block arrays.
+ */
+
+ base_start[0] = 0;
+ base_start[1] = checker_size;
+
+ base_stride[0] = 2 * checker_size;
+ base_stride[1] = 2 * checker_size;
+
+ /* Note that the following computation depends on the C99
+ * requirement that integer division discard any fraction
+ * (truncation towards zero) to function correctly. As we
+ * now require C99, this shouldn't be a problem, but noting
+ * it may save us some pain if we are ever obliged to support
+ * pre-C99 compilers again.
+ */
+
+ base_count[0] = edge_size / (checker_size * 2);
+ if ((edge_size % (checker_size * 2)) > 0)
+ base_count[0]++;
+
+ base_count[1] = (edge_size - checker_size) / (checker_size * 2);
+ if (((edge_size - checker_size) % (checker_size * 2)) > 0)
+ base_count[1]++;
+
+ base_block[0] = checker_size;
+ base_block[1] = checker_size;
+
+ /* now setup start, stride, count, and block arrays for
+ * the first call to H5Sselect_hyperslab().
+ */
+ for (i = 0; i < SS_DR_MAX_RANK; i++) {
+ start[i] = base_start[0];
+ stride[i] = base_stride[0];
+ count[i] = base_count[0];
+ block[i] = base_block[0];
+ } /* end for */
+
+ ret = H5Sselect_hyperslab(n_cube_0_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* if small_rank == 1, or if edge_size == checker_size, we
+ * are done, as either there is no added dimension in which
+ * to place offset selected "checkers".
+ *
+ * Otherwise, set up start, stride, count and block, and
+ * make the additional selection.
+ */
+
+ if ((small_rank > 1) && (checker_size < edge_size)) {
+ for (i = 0; i < SS_DR_MAX_RANK; i++) {
+ start[i] = base_start[1];
+ stride[i] = base_stride[1];
+ count[i] = base_count[1];
+ block[i] = base_block[1];
+ } /* end for */
+
+ ret = H5Sselect_hyperslab(n_cube_0_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+ } /* end if */
+
+ /* Weirdness alert:
+ *
+ * Some how, it seems that selections can extend beyond the
+ * boundaries of the target dataspace -- hence the following
+ * code to manually clip the selection back to the dataspace
+ * proper.
+ */
+ for (i = 0; i < SS_DR_MAX_RANK; i++) {
+ start[i] = 0;
+ stride[i] = edge_size;
+ count[i] = 1;
+ block[i] = edge_size;
+ } /* end for */
+
+ ret = H5Sselect_hyperslab(n_cube_0_sid, H5S_SELECT_AND, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create the large n-cube */
+ n_cube_1_sid = H5Screate_simple(large_rank, dims, NULL);
+ CHECK(n_cube_1_sid, FAIL, "H5Screate_simple");
+
+ /* Now select the checkerboard selection in the (possibly larger) n-cube.
+ *
+ * Since we have already calculated the base start, stride, count,
+ * and block, re-use the values in setting up start, stride, count,
+ * and block.
+ */
+ for (i = 0; i < SS_DR_MAX_RANK; i++) {
+ if (dim_selected[i]) {
+ start[i] = base_start[0];
+ stride[i] = base_stride[0];
+ count[i] = base_count[0];
+ block[i] = base_block[0];
+ } /* end if */
+ else {
+ start[i] = (hsize_t)offset;
+ stride[i] = (hsize_t)(2 * edge_size);
+ count[i] = 1;
+ block[i] = 1;
+ } /* end else */
+ } /* end for */
+
+ /* Since large rank may be less than SS_DR_MAX_RANK, we may not
+ * use the entire start, stride, count, and block arrays. This
+ * is a problem, since it is inconvenient to set up the dim_selected
+ * array to reflect the large rank, and thus if large_rank <
+ * SS_DR_MAX_RANK, we need to hide the lower index entries
+ * from H5Sselect_hyperslab().
+ *
+ * Do this by setting up pointers to the first valid entry in start,
+ * stride, count, and block below, and pass these pointers in
+ * to H5Sselect_hyperslab() instead of the array base addresses.
+ */
+
+ i = SS_DR_MAX_RANK - large_rank;
+ HDassert(i >= 0);
+
+ start_ptr = &(start[i]);
+ stride_ptr = &(stride[i]);
+ count_ptr = &(count[i]);
+ block_ptr = &(block[i]);
+
+ /* select the hyperslab */
+ ret = H5Sselect_hyperslab(n_cube_1_sid, H5S_SELECT_SET, start_ptr, stride_ptr, count_ptr, block_ptr);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* As before, if the number of dimensions selected is less than or
+ * equal to 1, or if edge_size == checker_size, we are done, as
+ * either there is no added dimension in which to place offset selected
+ * "checkers", or the hyperslab is completely occupied by one
+ * "checker".
+ *
+ * Otherwise, set up start, stride, count and block, and
+ * make the additional selection.
+ */
+ if ((dims_selected > 1) && (checker_size < edge_size)) {
+ for (i = 0; i < SS_DR_MAX_RANK; i++) {
+ if (dim_selected[i]) {
+ start[i] = base_start[1];
+ stride[i] = base_stride[1];
+ count[i] = base_count[1];
+ block[i] = base_block[1];
+ } /* end if */
+ else {
+ start[i] = (hsize_t)offset;
+ stride[i] = (hsize_t)(2 * edge_size);
+ count[i] = 1;
+ block[i] = 1;
+ } /* end else */
+ } /* end for */
+
+ ret = H5Sselect_hyperslab(n_cube_1_sid, H5S_SELECT_OR, start_ptr, stride_ptr, count_ptr, block_ptr);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+ } /* end if */
+
+ /* Weirdness alert:
+ *
+ * Again, it seems that selections can extend beyond the
+ * boundaries of the target dataspace -- hence the following
+ * code to manually clip the selection back to the dataspace
+ * proper.
+ */
+ for (i = 0; i < SS_DR_MAX_RANK; i++) {
+ start[i] = 0;
+ stride[i] = edge_size;
+ count[i] = 1;
+ block[i] = edge_size;
+ } /* end for */
+
+ ret = H5Sselect_hyperslab(n_cube_1_sid, H5S_SELECT_AND, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* setup is done -- run the test: */
+ check = H5Sselect_shape_same(n_cube_0_sid, n_cube_1_sid);
+ VERIFY(check, expected_result, "H5Sselect_shape_same");
+
+ /* Close dataspaces */
+ ret = H5Sclose(n_cube_0_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(n_cube_1_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_shape_same_dr__checkerboard() */
+
+/****************************************************************
+**
+** test_shape_same_dr__run_checkerboard_tests():
+**
+** In this set of tests, we test H5Sselect_shape_same()
+** with a "checkerboard" selection of 1, 2, 3, and 4 cubes as
+** one parameter, and 1, 2, 3, and 4 dimensional checkerboard
+** slices through a n-cube of rank no more than 5 (and at
+** least the rank of the slice).
+**
+** All the n-cubes will have lengths of the same size, so
+** H5Sselect_shape_same() should return true iff:
+**
+** 1) the rank of the n cube equals the number of dimensions
+** selected in the checker board slice through the m-cube
+** (m >= n).
+**
+** 2) The dimensions selected in the checkerboard slice
+** through the m-cube are the dimensions with the most
+** quickly changing indices.
+**
+****************************************************************/
+static void
+test_shape_same_dr__run_checkerboard_tests(void)
+{
+ hbool_t dim_selected[5];
+ hbool_t expected_result;
+ int i, j;
+ int v, w, x, y, z;
+ int test_num = 0;
+ int small_rank;
+ int large_rank;
+
+ for (large_rank = 1; large_rank <= 5; large_rank++) {
+ for (small_rank = 1; small_rank <= large_rank; small_rank++) {
+ v = 0;
+ do {
+ if (v == 0)
+ dim_selected[0] = FALSE;
+ else
+ dim_selected[0] = TRUE;
+
+ w = 0;
+ do {
+ if (w == 0)
+ dim_selected[1] = FALSE;
+ else
+ dim_selected[1] = TRUE;
+
+ x = 0;
+ do {
+ if (x == 0)
+ dim_selected[2] = FALSE;
+ else
+ dim_selected[2] = TRUE;
+
+ y = 0;
+ do {
+ if (y == 0)
+ dim_selected[3] = FALSE;
+ else
+ dim_selected[3] = TRUE;
+
+ z = 0;
+ do {
+ if (z == 0)
+ dim_selected[4] = FALSE;
+ else
+ dim_selected[4] = TRUE;
+
+ /* compute the expected result: */
+ i = 0;
+ j = 4;
+ expected_result = TRUE;
+ while ((i < small_rank) && expected_result) {
+ if (!dim_selected[j])
+ expected_result = FALSE;
+ i++;
+ j--;
+ } /* end while */
+
+ while ((i < large_rank) && expected_result) {
+ if (dim_selected[j])
+ expected_result = FALSE;
+ i++;
+ j--;
+ } /* end while */
+
+ /* everything is set up -- run the tests */
+
+ /* run test with edge size 16, checker
+ * size 1, and a variety of offsets
+ */
+ test_shape_same_dr__checkerboard(test_num++, small_rank, large_rank,
+ /* offset */ 0,
+ /* edge_size */ 16,
+ /* checker_size */ 1, dim_selected,
+ expected_result);
+
+ test_shape_same_dr__checkerboard(test_num++, small_rank, large_rank,
+ /* offset */ 5,
+ /* edge_size */ 16,
+ /* checker_size */ 1, dim_selected,
+ expected_result);
+
+ test_shape_same_dr__checkerboard(test_num++, small_rank, large_rank,
+ /* offset */ 15,
+ /* edge_size */ 16,
+ /* checker_size */ 1, dim_selected,
+ expected_result);
+
+ /* run test with edge size 10, checker
+ * size 2, and a variety of offsets
+ */
+ test_shape_same_dr__checkerboard(test_num++, small_rank, large_rank,
+ /* offset */ 0,
+ /* edge_size */ 10,
+ /* checker_size */ 2, dim_selected,
+ expected_result);
+
+ test_shape_same_dr__checkerboard(test_num++, small_rank, large_rank,
+ /* offset */ 5,
+ /* edge_size */ 10,
+ /* checker_size */ 2, dim_selected,
+ expected_result);
+
+ test_shape_same_dr__checkerboard(test_num++, small_rank, large_rank,
+ /* offset */ 9,
+ /* edge_size */ 10,
+ /* checker_size */ 2, dim_selected,
+ expected_result);
+
+ /* run test with edge size 10, checker
+ * size 3, and a variety of offsets
+ */
+ test_shape_same_dr__checkerboard(test_num++, small_rank, large_rank,
+ /* offset */ 0,
+ /* edge_size */ 10,
+ /* checker_size */ 3, dim_selected,
+ expected_result);
+
+ test_shape_same_dr__checkerboard(test_num++, small_rank, large_rank,
+ /* offset */ 5,
+ /* edge_size */ 10,
+ /* checker_size */ 3, dim_selected,
+ expected_result);
+
+ test_shape_same_dr__checkerboard(test_num++, small_rank, large_rank,
+ /* offset */ 9,
+ /* edge_size */ 10,
+ /* checker_size */ 3, dim_selected,
+ expected_result);
+
+ /* run test with edge size 8, checker
+ * size 8, and a variety of offsets
+ */
+ test_shape_same_dr__checkerboard(test_num++, small_rank, large_rank,
+ /* offset */ 0,
+ /* edge_size */ 8,
+ /* checker_size */ 8, dim_selected,
+ expected_result);
+
+ test_shape_same_dr__checkerboard(test_num++, small_rank, large_rank,
+ /* offset */ 4,
+ /* edge_size */ 8,
+ /* checker_size */ 8, dim_selected,
+ expected_result);
+
+ test_shape_same_dr__checkerboard(test_num++, small_rank, large_rank,
+ /* offset */ 7,
+ /* edge_size */ 8,
+ /* checker_size */ 8, dim_selected,
+ expected_result);
+
+ z++;
+ } while ((z < 2) && (large_rank >= 1));
+
+ y++;
+ } while ((y < 2) && (large_rank >= 2));
+
+ x++;
+ } while ((x < 2) && (large_rank >= 3));
+
+ w++;
+ } while ((w < 2) && (large_rank >= 4));
+
+ v++;
+ } while ((v < 2) && (large_rank >= 5));
+ } /* end for */
+ } /* end for */
+} /* test_shape_same_dr__run_checkerboard_tests() */
+
+/****************************************************************
+**
+** test_shape_same_dr__irregular():
+**
+** Tests selection of an "irregular" subset of a full
+** n-cube dataspace vs an identical "irregular" subset
+** of an n-dimensional slice of an m-cube (m > n).
+** in a call to H5Sselect_shape_same().
+**
+** Note that this test does not require the n-cube and the
+** n-dimensional slice to have the same rank (although
+** H5Sselect_shape_same() should always return FALSE if
+** they don't).
+**
+****************************************************************/
+static void
+test_shape_same_dr__irregular(int test_num, int small_rank, int large_rank, int pattern_offset,
+ int slice_offset, hbool_t dim_selected[], hbool_t expected_result)
+{
+ char test_desc_0[128];
+ char test_desc_1[256];
+ int edge_size = 10;
+ int i;
+ int j;
+ int k;
+ int dims_selected = 0;
+ hid_t n_cube_0_sid; /* the hyper cube containing
+ * an irregular selection
+ */
+ hid_t n_cube_1_sid; /* the hyper cube in which a
+ * slice contains an irregular
+ * selection.
+ */
+ hsize_t dims[SS_DR_MAX_RANK];
+ hsize_t start_0[SS_DR_MAX_RANK] = {2, 2, 2, 2, 5};
+ hsize_t stride_0[SS_DR_MAX_RANK] = {10, 10, 10, 10, 10};
+ hsize_t count_0[SS_DR_MAX_RANK] = {1, 1, 1, 1, 1};
+ hsize_t block_0[SS_DR_MAX_RANK] = {2, 2, 2, 2, 3};
+
+ hsize_t start_1[SS_DR_MAX_RANK] = {2, 2, 2, 5, 2};
+ hsize_t stride_1[SS_DR_MAX_RANK] = {10, 10, 10, 10, 10};
+ hsize_t count_1[SS_DR_MAX_RANK] = {1, 1, 1, 1, 1};
+ hsize_t block_1[SS_DR_MAX_RANK] = {2, 2, 2, 3, 2};
+
+ hsize_t start_2[SS_DR_MAX_RANK] = {2, 2, 5, 2, 2};
+ hsize_t stride_2[SS_DR_MAX_RANK] = {10, 10, 10, 10, 10};
+ hsize_t count_2[SS_DR_MAX_RANK] = {1, 1, 1, 1, 1};
+ hsize_t block_2[SS_DR_MAX_RANK] = {2, 2, 3, 2, 2};
+
+ hsize_t start_3[SS_DR_MAX_RANK] = {2, 5, 2, 2, 2};
+ hsize_t stride_3[SS_DR_MAX_RANK] = {10, 10, 10, 10, 10};
+ hsize_t count_3[SS_DR_MAX_RANK] = {1, 1, 1, 1, 1};
+ hsize_t block_3[SS_DR_MAX_RANK] = {2, 3, 2, 2, 2};
+
+ hsize_t start_4[SS_DR_MAX_RANK] = {5, 2, 2, 2, 2};
+ hsize_t stride_4[SS_DR_MAX_RANK] = {10, 10, 10, 10, 10};
+ hsize_t count_4[SS_DR_MAX_RANK] = {1, 1, 1, 1, 1};
+ hsize_t block_4[SS_DR_MAX_RANK] = {3, 2, 2, 2, 2};
+
+ hsize_t clip_start[SS_DR_MAX_RANK] = {0, 0, 0, 0, 0};
+ hsize_t clip_stride[SS_DR_MAX_RANK] = {10, 10, 10, 10, 10};
+ hsize_t clip_count[SS_DR_MAX_RANK] = {1, 1, 1, 1, 1};
+ hsize_t clip_block[SS_DR_MAX_RANK] = {10, 10, 10, 10, 10};
+
+ hsize_t *(starts[SS_DR_MAX_RANK]) = {start_0, start_1, start_2, start_3, start_4};
+ hsize_t *(strides[SS_DR_MAX_RANK]) = {stride_0, stride_1, stride_2, stride_3, stride_4};
+ hsize_t *(counts[SS_DR_MAX_RANK]) = {count_0, count_1, count_2, count_3, count_4};
+ hsize_t *(blocks[SS_DR_MAX_RANK]) = {block_0, block_1, block_2, block_3, block_4};
+
+ hsize_t start[SS_DR_MAX_RANK];
+ hsize_t *start_ptr;
+ hsize_t stride[SS_DR_MAX_RANK];
+ hsize_t *stride_ptr;
+ hsize_t count[SS_DR_MAX_RANK];
+ hsize_t *count_ptr;
+ hsize_t block[SS_DR_MAX_RANK];
+ hsize_t *block_ptr;
+ htri_t check; /* Shape comparison return value */
+ herr_t ret; /* Generic return value */
+
+ HDassert(0 < small_rank);
+ HDassert(small_rank <= large_rank);
+ HDassert(large_rank <= SS_DR_MAX_RANK);
+ HDassert(9 <= edge_size);
+ HDassert(edge_size <= 1000);
+ HDassert(0 <= slice_offset);
+ HDassert(slice_offset < edge_size);
+ HDassert(-2 <= pattern_offset);
+ HDassert(pattern_offset <= 2);
+
+ for (i = SS_DR_MAX_RANK - large_rank; i < SS_DR_MAX_RANK; i++)
+ if (dim_selected[i] == TRUE)
+ dims_selected++;
+
+ HDassert(dims_selected >= 0);
+ HDassert(dims_selected <= large_rank);
+
+ HDsnprintf(test_desc_0, sizeof(test_desc_0),
+ "\tirregular sub set of n-cube slice through m-cube (n <= m) test %d.\n", test_num);
+ MESSAGE(7, ("%s", test_desc_0));
+
+ /* This statement must be updated if SS_DR_MAX_RANK is changed */
+ HDsnprintf(test_desc_1, sizeof(test_desc_1),
+ "\tranks: %d/%d edge: %d s/p offset: %d/%d dim_selected: %d/%d/%d/%d/%d:%d.\n", small_rank,
+ large_rank, edge_size, slice_offset, pattern_offset, (int)dim_selected[0],
+ (int)dim_selected[1], (int)dim_selected[2], (int)dim_selected[3], (int)dim_selected[4],
+ dims_selected);
+ MESSAGE(7, ("%s", test_desc_1));
+
+ /* copy the edge size into the dims array */
+ for (i = 0; i < SS_DR_MAX_RANK; i++)
+ dims[i] = (hsize_t)edge_size;
+
+ /* Create the small n-cube */
+ n_cube_0_sid = H5Screate_simple(small_rank, dims, NULL);
+ CHECK(n_cube_0_sid, FAIL, "H5Screate_simple");
+
+ /* Select an "irregular" pattern in the small n-cube. This
+ * pattern can be though of a set of four 3 x 2 x 2 X 2
+ * four dimensional prisims, each parallel to one of the
+ * axies and none of them intersecting with the other.
+ *
+ * In the lesser dimensional cases, this 4D pattern is
+ * projected onto the lower dimensional space.
+ *
+ * In the 1-D case, the projection of the pattern looks
+ * like this:
+ *
+ * - - * * - * * * - -
+ * 0 1 2 3 4 5 6 7 8 9 x
+ *
+ * and in the 2-D case, it would look like this:
+ *
+ *
+ * y
+ * 9 - - - - - - - - - -
+ * 8 - - - - - - - - - -
+ * 7 - - * * - - - - - -
+ * 6 - - * * - - - - - -
+ * 5 - - * * - - - - - -
+ * 4 - - - - - - - - - -
+ * 3 - - * * - * * * - -
+ * 2 - - * * - * * * - -
+ * 1 - - - - - - - - - -
+ * 0 - - - - - - - - - -
+ * 0 1 2 3 4 5 6 7 8 9 x
+ *
+ * In both cases, asterisks indicate selected elements,
+ * and dashes indicate unselected elements.
+ *
+ * Note that is this case, since the edge size is fixed,
+ * the pattern does not change. However, we do use the
+ * displacement parameter to allow it to be moved around
+ * within the n-cube or hyperslab.
+ */
+
+ /* first, ensure that the small n-cube has no selection */
+ ret = H5Sselect_none(n_cube_0_sid);
+ CHECK(ret, FAIL, "H5Sselect_none");
+
+ /* now, select the irregular pattern */
+ for (i = 0; i < SS_DR_MAX_RANK; i++) {
+ ret = H5Sselect_hyperslab(n_cube_0_sid, H5S_SELECT_OR, starts[i], strides[i], counts[i], blocks[i]);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+ } /* end for */
+
+ /* finally, clip the selection to ensure that it lies fully
+ * within the n-cube.
+ */
+ ret = H5Sselect_hyperslab(n_cube_0_sid, H5S_SELECT_AND, clip_start, clip_stride, clip_count, clip_block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create the large n-cube */
+ n_cube_1_sid = H5Screate_simple(large_rank, dims, NULL);
+ CHECK(n_cube_1_sid, FAIL, "H5Screate_simple");
+
+ /* Ensure that the large n-cube has no selection */
+ H5Sselect_none(n_cube_1_sid);
+ CHECK(ret, FAIL, "H5Sselect_none");
+
+ /* Since large rank may be less than SS_DR_MAX_RANK, we may not
+ * use the entire start, stride, count, and block arrays. This
+ * is a problem, since it is inconvenient to set up the dim_selected
+ * array to reflect the large rank, and thus if large_rank <
+ * SS_DR_MAX_RANK, we need to hide the lower index entries
+ * from H5Sselect_hyperslab().
+ *
+ * Do this by setting up pointers to the first valid entry in start,
+ * stride, count, and block below, and pass these pointers in
+ * to H5Sselect_hyperslab() instead of the array base addresses.
+ */
+
+ i = SS_DR_MAX_RANK - large_rank;
+ HDassert(i >= 0);
+
+ start_ptr = &(start[i]);
+ stride_ptr = &(stride[i]);
+ count_ptr = &(count[i]);
+ block_ptr = &(block[i]);
+
+ /* Now select the irregular selection in the (possibly larger) n-cube.
+ *
+ * Basic idea is to project the pattern used in the smaller n-cube
+ * onto the dimensions selected in the larger n-cube, with the displacement
+ * specified.
+ */
+ for (i = 0; i < SS_DR_MAX_RANK; i++) {
+ j = 0;
+ for (k = 0; k < SS_DR_MAX_RANK; k++) {
+ if (dim_selected[k]) {
+ start[k] = (starts[i])[j] + (hsize_t)pattern_offset;
+ stride[k] = (strides[i])[j];
+ count[k] = (counts[i])[j];
+ block[k] = (blocks[i])[j];
+ j++;
+ } /* end if */
+ else {
+ start[k] = (hsize_t)slice_offset;
+ stride[k] = (hsize_t)(2 * edge_size);
+ count[k] = 1;
+ block[k] = 1;
+ } /* end else */
+ } /* end for */
+
+ /* select the hyperslab */
+ ret = H5Sselect_hyperslab(n_cube_1_sid, H5S_SELECT_OR, start_ptr, stride_ptr, count_ptr, block_ptr);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+ } /* end for */
+
+ /* it is possible that the selection extends beyond the dataspace.
+ * clip the selection to ensure that it doesn't.
+ */
+ ret = H5Sselect_hyperslab(n_cube_1_sid, H5S_SELECT_AND, clip_start, clip_stride, clip_count, clip_block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* setup is done -- run the test: */
+ check = H5Sselect_shape_same(n_cube_0_sid, n_cube_1_sid);
+ VERIFY(check, expected_result, "H5Sselect_shape_same");
+
+ /* Close dataspaces */
+ ret = H5Sclose(n_cube_0_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(n_cube_1_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_shape_same_dr__irregular() */
+
+/****************************************************************
+**
+** test_shape_same_dr__run_irregular_tests():
+**
+** In this set of tests, we test H5Sselect_shape_same()
+** with an "irregular" subselection of 1, 2, 3, and 4 cubes as
+** one parameter, and irregular subselections of 1, 2, 3,
+** and 4 dimensional slices through a n-cube of rank no more
+** than 5 (and at least the rank of the slice) as the other.
+** Note that the "irregular" selection may be offset between
+** the n-cube and the slice.
+**
+** All the irregular selections will be identical (modulo rank)
+** so H5Sselect_shape_same() should return true iff:
+**
+** 1) the rank of the n cube equals the number of dimensions
+** selected in the irregular slice through the m-cube
+** (m >= n).
+**
+** 2) The dimensions selected in the irregular slice
+** through the m-cube are the dimensions with the most
+** quickly changing indices.
+**
+****************************************************************/
+static void
+test_shape_same_dr__run_irregular_tests(void)
+{
+ hbool_t dim_selected[5];
+ hbool_t expected_result;
+ int i, j;
+ int v, w, x, y, z;
+ int test_num = 0;
+ int small_rank;
+ int large_rank;
+
+ for (large_rank = 1; large_rank <= 5; large_rank++) {
+ for (small_rank = 1; small_rank <= large_rank; small_rank++) {
+ v = 0;
+ do {
+ if (v == 0)
+ dim_selected[0] = FALSE;
+ else
+ dim_selected[0] = TRUE;
+
+ w = 0;
+ do {
+ if (w == 0)
+ dim_selected[1] = FALSE;
+ else
+ dim_selected[1] = TRUE;
+
+ x = 0;
+ do {
+ if (x == 0)
+ dim_selected[2] = FALSE;
+ else
+ dim_selected[2] = TRUE;
+
+ y = 0;
+ do {
+ if (y == 0)
+ dim_selected[3] = FALSE;
+ else
+ dim_selected[3] = TRUE;
+
+ z = 0;
+ do {
+ if (z == 0)
+ dim_selected[4] = FALSE;
+ else
+ dim_selected[4] = TRUE;
+
+ /* compute the expected result: */
+ i = 0;
+ j = 4;
+ expected_result = TRUE;
+ while ((i < small_rank) && expected_result) {
+ if (!dim_selected[j])
+ expected_result = FALSE;
+ i++;
+ j--;
+ } /* end while */
+
+ while ((i < large_rank) && expected_result) {
+ if (dim_selected[j])
+ expected_result = FALSE;
+ i++;
+ j--;
+ } /* end while */
+
+ /* everything is set up -- run the tests */
+
+ test_shape_same_dr__irregular(test_num++, small_rank, large_rank,
+ /* pattern_offset */ -2,
+ /* slice_offset */ 0, dim_selected,
+ expected_result);
+
+ test_shape_same_dr__irregular(test_num++, small_rank, large_rank,
+ /* pattern_offset */ -2,
+ /* slice_offset */ 4, dim_selected,
+ expected_result);
+
+ test_shape_same_dr__irregular(test_num++, small_rank, large_rank,
+ /* pattern_offset */ -2,
+ /* slice_offset */ 9, dim_selected,
+ expected_result);
+
+ test_shape_same_dr__irregular(test_num++, small_rank, large_rank,
+ /* pattern_offset */ 0,
+ /* slice_offset */ 0, dim_selected,
+ expected_result);
+
+ test_shape_same_dr__irregular(test_num++, small_rank, large_rank,
+ /* pattern_offset */ 0,
+ /* slice_offset */ 6, dim_selected,
+ expected_result);
+
+ test_shape_same_dr__irregular(test_num++, small_rank, large_rank,
+ /* pattern_offset */ 0,
+ /* slice_offset */ 9, dim_selected,
+ expected_result);
+
+ test_shape_same_dr__irregular(test_num++, small_rank, large_rank,
+ /* pattern_offset */ 2,
+ /* slice_offset */ 0, dim_selected,
+ expected_result);
+
+ test_shape_same_dr__irregular(test_num++, small_rank, large_rank,
+ /* pattern_offset */ 2,
+ /* slice_offset */ 5, dim_selected,
+ expected_result);
+
+ test_shape_same_dr__irregular(test_num++, small_rank, large_rank,
+ /* pattern_offset */ 2,
+ /* slice_offset */ 9, dim_selected,
+ expected_result);
+
+ z++;
+ } while ((z < 2) && (large_rank >= 1));
+
+ y++;
+ } while ((y < 2) && (large_rank >= 2));
+
+ x++;
+ } while ((x < 2) && (large_rank >= 3));
+
+ w++;
+ } while ((w < 2) && (large_rank >= 4));
+
+ v++;
+ } while ((v < 2) && (large_rank >= 5));
+ } /* end for */
+ } /* end for */
+} /* test_shape_same_dr__run_irregular_tests() */
+
+/****************************************************************
+**
+** test_shape_same_dr(): Tests selections on dataspace with
+** different ranks, to verify that "shape same" routine
+** is now handling this case correctly.
+**
+****************************************************************/
+static void
+test_shape_same_dr(void)
+{
+ /* Output message about test being performed */
+ MESSAGE(6, ("Testing Same Shape/Different Rank Comparisons\n"));
+
+ /* first run some smoke checks */
+ test_shape_same_dr__smoke_check_1();
+ test_shape_same_dr__smoke_check_2();
+ test_shape_same_dr__smoke_check_3();
+ test_shape_same_dr__smoke_check_4();
+
+ /* now run more intensive tests. */
+ test_shape_same_dr__run_full_space_vs_slice_tests();
+ test_shape_same_dr__run_checkerboard_tests();
+ test_shape_same_dr__run_irregular_tests();
+} /* test_shape_same_dr() */
+
+/****************************************************************
+**
+** test_space_rebuild(): Tests selection rebuild routine,
+** We will test whether selection in span-tree form can be rebuilt
+** into a regular selection.
+**
+**
+****************************************************************/
+static void
+test_space_rebuild(void)
+{
+ /* regular space IDs in span-tree form */
+ hid_t sid_reg1, sid_reg2, sid_reg3, sid_reg4, sid_reg5;
+
+ /* Original regular Space IDs */
+ hid_t sid_reg_ori1, sid_reg_ori2, sid_reg_ori3, sid_reg_ori4, sid_reg_ori5;
+
+ /* Irregular space IDs */
+ hid_t sid_irreg1, sid_irreg2, sid_irreg3, sid_irreg4, sid_irreg5;
+
+ /* rebuild status state */
+#if 0
+ H5S_diminfo_valid_t rebuild_stat1, rebuild_stat2;
+ htri_t rebuild_check;
+#endif
+ herr_t ret;
+
+ /* dimensions of rank 1 to rank 5 */
+ hsize_t dims1[] = {SPACERE1_DIM0};
+ hsize_t dims2[] = {SPACERE2_DIM0, SPACERE2_DIM1};
+ hsize_t dims3[] = {SPACERE3_DIM0, SPACERE3_DIM1, SPACERE3_DIM2};
+ hsize_t dims4[] = {SPACERE4_DIM0, SPACERE4_DIM1, SPACERE4_DIM2, SPACERE4_DIM3};
+ hsize_t dims5[] = {SPACERE5_DIM0, SPACERE5_DIM1, SPACERE5_DIM2, SPACERE5_DIM3, SPACERE5_DIM4};
+
+ /* The start of the hyperslab */
+ hsize_t start1[SPACERE1_RANK], start2[SPACERE2_RANK], start3[SPACERE3_RANK], start4[SPACERE4_RANK],
+ start5[SPACERE5_RANK];
+
+ /* The stride of the hyperslab */
+ hsize_t stride1[SPACERE1_RANK], stride2[SPACERE2_RANK], stride3[SPACERE3_RANK], stride4[SPACERE4_RANK],
+ stride5[SPACERE5_RANK];
+
+ /* The number of blocks for the hyperslab */
+ hsize_t count1[SPACERE1_RANK], count2[SPACERE2_RANK], count3[SPACERE3_RANK], count4[SPACERE4_RANK],
+ count5[SPACERE5_RANK];
+
+ /* The size of each block for the hyperslab */
+ hsize_t block1[SPACERE1_RANK], block2[SPACERE2_RANK], block3[SPACERE3_RANK], block4[SPACERE4_RANK],
+ block5[SPACERE5_RANK];
+
+ /* Declarations for special test of rebuild */
+ hid_t sid_spec;
+
+ /* Output message about test being performed */
+ MESSAGE(6, ("Testing functionality to rebuild regular hyperslab selection\n"));
+
+ MESSAGE(7, ("Testing functionality to rebuild 1-D hyperslab selection\n"));
+
+ /* Create 1-D dataspace */
+ sid_reg1 = H5Screate_simple(SPACERE1_RANK, dims1, NULL);
+ sid_reg_ori1 = H5Screate_simple(SPACERE1_RANK, dims1, NULL);
+
+ /* Build up the original one dimensional regular selection */
+ start1[0] = 1;
+ count1[0] = 3;
+ stride1[0] = 5;
+ block1[0] = 4;
+ ret = H5Sselect_hyperslab(sid_reg_ori1, H5S_SELECT_SET, start1, stride1, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Build up one dimensional regular selection with H5_SELECT_OR,
+ inside HDF5, it will be treated as an irregular selection. */
+
+ start1[0] = 1;
+ count1[0] = 2;
+ stride1[0] = 5;
+ block1[0] = 4;
+ ret = H5Sselect_hyperslab(sid_reg1, H5S_SELECT_SET, start1, stride1, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start1[0] = 11;
+ count1[0] = 1;
+ stride1[0] = 5;
+ block1[0] = 4;
+ ret = H5Sselect_hyperslab(sid_reg1, H5S_SELECT_OR, start1, stride1, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ ret = H5S__get_rebuild_status_test(sid_reg1, &rebuild_stat1, &rebuild_stat2);
+ CHECK(ret, FAIL, "H5S__get_rebuild_status_test");
+ /* In this case, rebuild_stat1 and rebuild_stat2 should be
+ * H5S_DIMINFO_VALID_YES. */
+ if (rebuild_stat1 != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ }
+ if (rebuild_stat2 != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ }
+ if (ret != FAIL) {
+ /* In this case, rebuild_check should be TRUE. */
+ rebuild_check = H5Sselect_shape_same(sid_reg1, sid_reg_ori1);
+ CHECK(rebuild_check, FALSE, "H5Sselect_shape_same");
+ }
+#endif
+ /* For irregular hyperslab */
+ sid_irreg1 = H5Screate_simple(SPACERE1_RANK, dims1, NULL);
+
+ /* Build up one dimensional irregular selection with H5_SELECT_OR */
+ start1[0] = 1;
+ count1[0] = 2;
+ stride1[0] = 5;
+ block1[0] = 4;
+ ret = H5Sselect_hyperslab(sid_irreg1, H5S_SELECT_SET, start1, stride1, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start1[0] = 12; /* Just one position switch */
+ count1[0] = 1;
+ stride1[0] = 5;
+ block1[0] = 4;
+ ret = H5Sselect_hyperslab(sid_irreg1, H5S_SELECT_OR, start1, stride1, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ ret = H5S__get_rebuild_status_test(sid_irreg1, &rebuild_stat1, &rebuild_stat2);
+ CHECK(ret, FAIL, "H5S__get_rebuild_status_test");
+ /* In this case, rebuild_stat1 should be H5S_DIMINFO_VALID_NO and
+ * rebuild_stat2 should be H5S_DIMINFO_VALID_IMPOSSIBLE. */
+ if (rebuild_stat1 != H5S_DIMINFO_VALID_NO) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ }
+ if (rebuild_stat2 != H5S_DIMINFO_VALID_IMPOSSIBLE) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ }
+ /* No need to do shape comparison */
+#endif
+
+ MESSAGE(7, ("Testing functionality to rebuild 2-D hyperslab selection\n"));
+ /* Create 2-D dataspace */
+ sid_reg2 = H5Screate_simple(SPACERE2_RANK, dims2, NULL);
+ sid_reg_ori2 = H5Screate_simple(SPACERE2_RANK, dims2, NULL);
+
+ /* Build up the original two dimensional regular selection */
+ start2[0] = 2;
+ count2[0] = 2;
+ stride2[0] = 7;
+ block2[0] = 5;
+ start2[1] = 1;
+ count2[1] = 3;
+ stride2[1] = 3;
+ block2[1] = 2;
+
+ ret = H5Sselect_hyperslab(sid_reg_ori2, H5S_SELECT_SET, start2, stride2, count2, block2);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Build up two dimensional regular selection with H5_SELECT_OR, inside HDF5,
+ it will be treated as an irregular selection. */
+
+ start2[1] = 1;
+ count2[1] = 2;
+ stride2[1] = 3;
+ block2[1] = 2;
+
+ ret = H5Sselect_hyperslab(sid_reg2, H5S_SELECT_SET, start2, stride2, count2, block2);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start2[1] = 7; /* 7 = start(1) + count(2) * stride(3) */
+ count2[1] = 1;
+ stride2[1] = 3;
+ block2[1] = 2;
+
+ ret = H5Sselect_hyperslab(sid_reg2, H5S_SELECT_OR, start2, stride2, count2, block2);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ ret = H5S__get_rebuild_status_test(sid_reg2, &rebuild_stat1, &rebuild_stat2);
+ CHECK(ret, FAIL, "H5S__get_rebuild_status_test");
+ /* In this case, rebuild_stat1 and rebuild_stat2 should be
+ * H5S_DIMINFO_VALID_YES. */
+ if (rebuild_stat1 != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ }
+ if (rebuild_stat2 != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ } /* end if */
+ if (ret != FAIL) {
+ /* In this case, rebuild_check should be TRUE. */
+ rebuild_check = H5Sselect_shape_same(sid_reg2, sid_reg_ori2);
+ CHECK(rebuild_check, FALSE, "H5Sselect_shape_same");
+ }
+#endif
+ /* 2-D irregular case */
+ sid_irreg2 = H5Screate_simple(SPACERE2_RANK, dims2, NULL);
+ /* Build up two dimensional irregular selection with H5_SELECT_OR */
+
+ start2[0] = 2;
+ count2[0] = 2;
+ stride2[0] = 7;
+ block2[0] = 5;
+ start2[1] = 1;
+ count2[1] = 1;
+ stride2[1] = 3;
+ block2[1] = 2;
+ ret = H5Sselect_hyperslab(sid_irreg2, H5S_SELECT_SET, start2, stride2, count2, block2);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start2[1] = 4;
+ count2[1] = 2;
+ stride2[1] = 4;
+ block2[1] = 3; /* Just add one element for the block */
+
+ ret = H5Sselect_hyperslab(sid_irreg2, H5S_SELECT_OR, start2, stride2, count2, block2);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ ret = H5S__get_rebuild_status_test(sid_irreg2, &rebuild_stat1, &rebuild_stat2);
+ CHECK(ret, FAIL, "H5S__get_rebuild_status_test");
+ /* In this case, rebuild_stat1 should be H5S_DIMINFO_VALID_NO and
+ * rebuild_stat2 should be H5S_DIMINFO_VALID_IMPOSSIBLE. */
+ if (rebuild_stat1 != H5S_DIMINFO_VALID_NO) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ }
+ if (rebuild_stat2 != H5S_DIMINFO_VALID_IMPOSSIBLE) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ }
+ /* No need to do shape comparison */
+#endif
+
+ MESSAGE(7, ("Testing functionality to rebuild 3-D hyperslab selection\n"));
+
+ /* Create 3-D dataspace */
+ sid_reg3 = H5Screate_simple(SPACERE3_RANK, dims3, NULL);
+ sid_reg_ori3 = H5Screate_simple(SPACERE3_RANK, dims3, NULL);
+
+ /* Build up the original three dimensional regular selection */
+ start3[0] = 2;
+ count3[0] = 2;
+ stride3[0] = 3;
+ block3[0] = 2;
+ start3[1] = 1;
+ count3[1] = 3;
+ stride3[1] = 3;
+ block3[1] = 2;
+
+ start3[2] = 1;
+ count3[2] = 2;
+ stride3[2] = 4;
+ block3[2] = 2;
+
+ ret = H5Sselect_hyperslab(sid_reg_ori3, H5S_SELECT_SET, start3, stride3, count3, block3);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Build up three dimensional regular selection with H5_SELECT_OR, inside HDF5,
+ it will be treated as an irregular selection. */
+ start3[2] = 1;
+ count3[2] = 1;
+ stride3[2] = 4;
+ block3[2] = 2;
+
+ ret = H5Sselect_hyperslab(sid_reg3, H5S_SELECT_SET, start3, stride3, count3, block3);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start3[2] = 5;
+ count3[2] = 1;
+ stride3[2] = 4;
+ block3[2] = 2;
+
+ ret = H5Sselect_hyperslab(sid_reg3, H5S_SELECT_OR, start3, stride3, count3, block3);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ ret = H5S__get_rebuild_status_test(sid_reg3, &rebuild_stat1, &rebuild_stat2);
+ CHECK(ret, FAIL, "H5S__get_rebuild_status_test");
+ /* In this case, rebuild_stat1 and rebuild_stat2 should be
+ * H5S_DIMINFO_VALID_YES. */
+ if (rebuild_stat1 != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ }
+ if (rebuild_stat2 != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ }
+ if (ret != FAIL) {
+ /* In this case, rebuild_check should be TRUE. */
+ rebuild_check = H5Sselect_shape_same(sid_reg3, sid_reg_ori3);
+ CHECK(rebuild_check, FALSE, "H5Sselect_shape_same");
+ }
+#endif
+
+ sid_irreg3 = H5Screate_simple(SPACERE3_RANK, dims3, NULL);
+
+ /* Build up three dimensional irregular selection with H5_SELECT_OR */
+ start3[0] = 2;
+ count3[0] = 2;
+ stride3[0] = 3;
+ block3[0] = 2;
+ start3[1] = 1;
+ count3[1] = 3;
+ stride3[1] = 3;
+ block3[1] = 2;
+
+ start3[2] = 1;
+ count3[2] = 2;
+ stride3[2] = 2;
+ block3[2] = 1;
+
+ ret = H5Sselect_hyperslab(sid_irreg3, H5S_SELECT_SET, start3, stride3, count3, block3);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start3[2] = 3;
+ count3[2] = 2;
+ stride3[2] = 3; /* Just add one element for the stride */
+ block3[2] = 1;
+
+ ret = H5Sselect_hyperslab(sid_irreg3, H5S_SELECT_OR, start3, stride3, count3, block3);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ ret = H5S__get_rebuild_status_test(sid_irreg3, &rebuild_stat1, &rebuild_stat2);
+ CHECK(ret, FAIL, "H5S__get_rebuild_status_test");
+ /* In this case, rebuild_stat1 should be H5S_DIMINFO_VALID_NO and
+ * rebuild_stat2 should be H5S_DIMINFO_VALID_IMPOSSIBLE. */
+ if (rebuild_stat1 != H5S_DIMINFO_VALID_NO) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ }
+ if (rebuild_stat2 != H5S_DIMINFO_VALID_IMPOSSIBLE) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ }
+ /* No need to do shape comparison */
+#endif
+
+ MESSAGE(7, ("Testing functionality to rebuild 4-D hyperslab selection\n"));
+
+ /* Create 4-D dataspace */
+ sid_reg4 = H5Screate_simple(SPACERE4_RANK, dims4, NULL);
+ sid_reg_ori4 = H5Screate_simple(SPACERE4_RANK, dims4, NULL);
+
+ /* Build up the original four dimensional regular selection */
+ start4[0] = 2;
+ count4[0] = 2;
+ stride4[0] = 3;
+ block4[0] = 2;
+
+ start4[1] = 1;
+ count4[1] = 3;
+ stride4[1] = 3;
+ block4[1] = 2;
+
+ start4[2] = 1;
+ count4[2] = 2;
+ stride4[2] = 4;
+ block4[2] = 2;
+
+ start4[3] = 1;
+ count4[3] = 2;
+ stride4[3] = 4;
+ block4[3] = 2;
+
+ ret = H5Sselect_hyperslab(sid_reg_ori4, H5S_SELECT_SET, start4, stride4, count4, block4);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Build up four dimensional regular selection with H5_SELECT_OR, inside HDF5,
+ it will be treated as an irregular selection. */
+ start4[3] = 1;
+ count4[3] = 1;
+ stride4[3] = 4;
+ block4[3] = 2;
+
+ ret = H5Sselect_hyperslab(sid_reg4, H5S_SELECT_SET, start4, stride4, count4, block4);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start4[3] = 5;
+ count4[3] = 1;
+ stride4[3] = 4;
+ block4[3] = 2;
+
+ ret = H5Sselect_hyperslab(sid_reg4, H5S_SELECT_OR, start4, stride4, count4, block4);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+#if 0
+ ret = H5S__get_rebuild_status_test(sid_reg4, &rebuild_stat1, &rebuild_stat2);
+ CHECK(ret, FAIL, "H5S__get_rebuild_status_test");
+ /* In this case, rebuild_stat1 and rebuild_stat2 should be
+ * H5S_DIMINFO_VALID_YES. */
+ if (rebuild_stat1 != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ }
+ if (rebuild_stat2 != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ }
+ if (ret != FAIL) {
+ /* In this case, rebuild_check should be TRUE. */
+ rebuild_check = H5Sselect_shape_same(sid_reg4, sid_reg_ori4);
+ CHECK(rebuild_check, FALSE, "H5Sselect_shape_same");
+ }
+#endif
+
+ /* Testing irregular selection */
+ sid_irreg4 = H5Screate_simple(SPACERE4_RANK, dims4, NULL);
+
+ /* Build up four dimensional irregular selection with H5_SELECT_OR */
+ start4[0] = 2;
+ count4[0] = 2;
+ stride4[0] = 3;
+ block4[0] = 2;
+ start4[1] = 1;
+ count4[1] = 3;
+ stride4[1] = 3;
+ block4[1] = 2;
+
+ start4[2] = 1;
+ count4[2] = 1;
+ stride4[2] = 4;
+ block4[2] = 2;
+
+ start4[3] = 1;
+ count4[3] = 2;
+ stride4[3] = 4;
+ block4[3] = 2; /* sub-block is one element difference */
+
+ ret = H5Sselect_hyperslab(sid_irreg4, H5S_SELECT_SET, start4, stride4, count4, block4);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start4[2] = 5;
+ count4[2] = 1;
+ stride4[2] = 4;
+ block4[2] = 2;
+
+ start4[3] = 1;
+ count4[3] = 2;
+ stride4[3] = 4;
+ block4[3] = 3; /* sub-block is one element difference */
+
+ ret = H5Sselect_hyperslab(sid_irreg4, H5S_SELECT_OR, start4, stride4, count4, block4);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ ret = H5S__get_rebuild_status_test(sid_irreg4, &rebuild_stat1, &rebuild_stat2);
+ CHECK(ret, FAIL, "H5S__get_rebuild_status_test");
+ /* In this case, rebuild_stat1 should be H5S_DIMINFO_VALID_NO and
+ * rebuild_stat2 should be H5S_DIMINFO_VALID_IMPOSSIBLE. */
+ if (rebuild_stat1 != H5S_DIMINFO_VALID_NO) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ }
+ if (rebuild_stat2 != H5S_DIMINFO_VALID_IMPOSSIBLE) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ }
+ /* No need to do shape comparison */
+#endif
+
+ MESSAGE(7, ("Testing functionality to rebuild 5-D hyperslab selection\n"));
+
+ /* Create 5-D dataspace */
+ sid_reg5 = H5Screate_simple(SPACERE5_RANK, dims5, NULL);
+ sid_reg_ori5 = H5Screate_simple(SPACERE5_RANK, dims5, NULL);
+
+ /* Build up the original five dimensional regular selection */
+ start5[0] = 2;
+ count5[0] = 2;
+ stride5[0] = 3;
+ block5[0] = 2;
+
+ start5[1] = 1;
+ count5[1] = 3;
+ stride5[1] = 3;
+ block5[1] = 2;
+
+ start5[2] = 1;
+ count5[2] = 2;
+ stride5[2] = 4;
+ block5[2] = 2;
+
+ start5[3] = 1;
+ count5[3] = 2;
+ stride5[3] = 4;
+ block5[3] = 2;
+
+ start5[4] = 1;
+ count5[4] = 2;
+ stride5[4] = 4;
+ block5[4] = 2;
+
+ ret = H5Sselect_hyperslab(sid_reg_ori5, H5S_SELECT_SET, start5, stride5, count5, block5);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Build up five dimensional regular selection with H5_SELECT_OR, inside HDF5,
+ it will be treated as an irregular selection. */
+ start5[4] = 1;
+ count5[4] = 1;
+ stride5[4] = 4;
+ block5[4] = 2;
+
+ ret = H5Sselect_hyperslab(sid_reg5, H5S_SELECT_SET, start5, stride5, count5, block5);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start5[4] = 5;
+ count5[4] = 1;
+ stride5[4] = 4;
+ block5[4] = 2;
+
+ ret = H5Sselect_hyperslab(sid_reg5, H5S_SELECT_OR, start5, stride5, count5, block5);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+#if 0
+ ret = H5S__get_rebuild_status_test(sid_reg5, &rebuild_stat1, &rebuild_stat2);
+ CHECK(ret, FAIL, "H5S__get_rebuild_status_test");
+ /* In this case, rebuild_stat1 and rebuild_stat2 should be
+ * H5S_DIMINFO_VALID_YES. */
+ if (rebuild_stat1 != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ }
+ if (rebuild_stat2 != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ }
+ if (ret != FAIL) {
+ /* In this case, rebuild_check should be TRUE. */
+ rebuild_check = H5Sselect_shape_same(sid_reg5, sid_reg_ori5);
+ CHECK(rebuild_check, FALSE, "H5Sselect_shape_same");
+ }
+#endif
+
+ sid_irreg5 = H5Screate_simple(SPACERE5_RANK, dims5, NULL);
+
+ /* Build up five dimensional irregular selection with H5_SELECT_OR */
+ start5[0] = 2;
+ count5[0] = 2;
+ stride5[0] = 3;
+ block5[0] = 2;
+
+ start5[1] = 1;
+ count5[1] = 3;
+ stride5[1] = 3;
+ block5[1] = 2;
+
+ start5[2] = 1;
+ count5[2] = 2;
+ stride5[2] = 4;
+ block5[2] = 2;
+
+ start5[3] = 1;
+ count5[3] = 1;
+ stride5[3] = 4;
+ block5[3] = 2;
+
+ start5[4] = 2; /* One element difference */
+ count5[4] = 1;
+ stride5[4] = 4;
+ block5[4] = 2;
+
+ ret = H5Sselect_hyperslab(sid_irreg5, H5S_SELECT_SET, start5, stride5, count5, block5);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start5[3] = 5;
+ count5[3] = 1;
+ stride5[3] = 4;
+ block5[3] = 2;
+
+ start5[4] = 1; /* One element difference */
+ count5[4] = 2;
+ stride5[4] = 4;
+ block5[4] = 2;
+
+ ret = H5Sselect_hyperslab(sid_irreg5, H5S_SELECT_OR, start5, stride5, count5, block5);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ ret = H5S__get_rebuild_status_test(sid_irreg5, &rebuild_stat1, &rebuild_stat2);
+ CHECK(ret, FAIL, "H5S__get_rebuild_status_test");
+ /* In this case, rebuild_stat1 should be H5S_DIMINFO_VALID_NO and
+ * rebuild_stat2 should be H5S_DIMINFO_VALID_IMPOSSIBLE. */
+ if (rebuild_stat1 != H5S_DIMINFO_VALID_NO) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ }
+ if (rebuild_stat2 != H5S_DIMINFO_VALID_IMPOSSIBLE) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ }
+ /* No need to do shape comparison */
+#endif
+
+ /* We use 5-D to test a special case with
+ rebuilding routine TRUE, FALSE and TRUE */
+ sid_spec = H5Screate_simple(SPACERE5_RANK, dims5, NULL);
+
+ /* Build up the original five dimensional regular selection */
+ start5[0] = 2;
+ count5[0] = 2;
+ stride5[0] = 3;
+ block5[0] = 2;
+
+ start5[1] = 1;
+ count5[1] = 3;
+ stride5[1] = 3;
+ block5[1] = 2;
+
+ start5[2] = 1;
+ count5[2] = 2;
+ stride5[2] = 4;
+ block5[2] = 2;
+
+ start5[3] = 1;
+ count5[3] = 2;
+ stride5[3] = 4;
+ block5[3] = 2;
+
+ start5[4] = 1;
+ count5[4] = 1;
+ stride5[4] = 4;
+ block5[4] = 2;
+
+ ret = H5Sselect_hyperslab(sid_spec, H5S_SELECT_SET, start5, stride5, count5, block5);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ ret = H5S__get_rebuild_status_test(sid_spec, &rebuild_stat1, &rebuild_stat2);
+ CHECK(ret, FAIL, "H5S__get_rebuild_status_test");
+ /* In this case, rebuild_stat1 and rebuild_stat2 should both be
+ * H5S_DIMINFO_VALID_YES. */
+ if (rebuild_stat1 != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ }
+ if (rebuild_stat2 != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ }
+ /* No need to do shape comparison */
+#endif
+
+ /* Adding some selections to make it real irregular */
+ start5[3] = 1;
+ count5[3] = 1;
+ stride5[3] = 4;
+ block5[3] = 2;
+
+ start5[4] = 5;
+ count5[4] = 1;
+ stride5[4] = 4;
+ block5[4] = 2;
+
+ ret = H5Sselect_hyperslab(sid_spec, H5S_SELECT_OR, start5, stride5, count5, block5);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ ret = H5S__get_rebuild_status_test(sid_spec, &rebuild_stat1, &rebuild_stat2);
+ CHECK(ret, FAIL, "H5S__get_rebuild_status_test");
+ /* In this case, rebuild_stat1 should be H5S_DIMINFO_VALID_NO and
+ * rebuild_stat2 should be H5S_DIMINFO_VALID_IMPOSSIBLE. */
+ if (rebuild_stat1 != H5S_DIMINFO_VALID_NO) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ }
+ if (rebuild_stat2 != H5S_DIMINFO_VALID_IMPOSSIBLE) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ }
+ /* No need to do shape comparison */
+#endif
+
+ /* Add more selections to make it regular again */
+ start5[3] = 5;
+ count5[3] = 1;
+ stride5[3] = 4;
+ block5[3] = 2;
+
+ start5[4] = 5;
+ count5[4] = 1;
+ stride5[4] = 4;
+ block5[4] = 2;
+
+ ret = H5Sselect_hyperslab(sid_spec, H5S_SELECT_OR, start5, stride5, count5, block5);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ ret = H5S__get_rebuild_status_test(sid_spec, &rebuild_stat1, &rebuild_stat2);
+ CHECK(ret, FAIL, "H5S__get_rebuild_status_test");
+ /* In this case, rebuild_stat1 should be H5S_DIMINFO_VALID_NO and
+ * rebuild_stat2 should be H5S_DIMINFO_VALID_YES. */
+ if (rebuild_stat1 != H5S_DIMINFO_VALID_NO) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ }
+ if (rebuild_stat2 != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ }
+ /* No need to do shape comparison */
+#endif
+
+ H5Sclose(sid_reg1);
+ CHECK(ret, FAIL, "H5Sclose");
+ H5Sclose(sid_irreg1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ H5Sclose(sid_reg2);
+ CHECK(ret, FAIL, "H5Sclose");
+ H5Sclose(sid_irreg2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ H5Sclose(sid_reg3);
+ CHECK(ret, FAIL, "H5Sclose");
+ H5Sclose(sid_irreg3);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ H5Sclose(sid_reg4);
+ CHECK(ret, FAIL, "H5Sclose");
+ H5Sclose(sid_irreg4);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ H5Sclose(sid_reg5);
+ CHECK(ret, FAIL, "H5Sclose");
+ H5Sclose(sid_irreg5);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ H5Sclose(sid_spec);
+ CHECK(ret, FAIL, "H5Sclose");
+}
+
+/****************************************************************
+**
+** test_space_update_diminfo(): Tests selection diminfo update
+** routine. We will test whether regular selections can be
+** quickly updated when the selection is modified.
+**
+**
+****************************************************************/
+static void
+test_space_update_diminfo(void)
+{
+ hid_t space_id; /* Dataspace id */
+#if 0
+ H5S_diminfo_valid_t diminfo_valid; /* Diminfo status */
+ H5S_diminfo_valid_t rebuild_status; /* Diminfo status after rebuid */
+#endif
+ H5S_sel_type sel_type; /* Selection type */
+ herr_t ret; /* Return value */
+
+ /* dimensions of rank 1 to rank 5 */
+ hsize_t dims1[] = {SPACEUD1_DIM0};
+ hsize_t dims3[] = {SPACEUD3_DIM0, SPACEUD3_DIM1, SPACEUD3_DIM2};
+
+ /* The start of the hyperslab */
+ hsize_t start1[1], start3[3];
+
+ /* The stride of the hyperslab */
+ hsize_t stride1[1], stride3[3];
+
+ /* The number of blocks for the hyperslab */
+ hsize_t count1[1], count3[3];
+
+ /* The size of each block for the hyperslab */
+ hsize_t block1[1], block3[3];
+
+ /* Output message about test being performed */
+ MESSAGE(6, ("Testing functionality to update hyperslab dimension info\n"));
+
+ MESSAGE(7, ("Testing functionality to update 1-D hyperslab dimension info\n"));
+
+ /*
+ * Test adding regularly spaced distinct blocks
+ */
+
+ /* Create 1-D dataspace */
+ space_id = H5Screate_simple(1, dims1, NULL);
+
+ /* Create single block */
+ start1[0] = 3;
+ count1[0] = 1;
+ block1[0] = 2;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_SET, start1, NULL, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be YES */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Add block after first, with OR */
+ start1[0] = 6;
+ count1[0] = 1;
+ block1[0] = 2;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_OR, start1, NULL, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be YES */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Add block before first, this time with XOR */
+ start1[0] = 0;
+ count1[0] = 1;
+ block1[0] = 2;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_XOR, start1, NULL, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be YES */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Add two blocks after current block */
+ start1[0] = 9;
+ stride1[0] = 3;
+ count1[0] = 2;
+ block1[0] = 2;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_OR, start1, stride1, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be YES */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Add two blocks overlapping current block, with OR */
+ start1[0] = 9;
+ stride1[0] = 3;
+ count1[0] = 2;
+ block1[0] = 2;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_OR, start1, stride1, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be YES */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Add two blocks partially overlapping current block, with OR */
+ start1[0] = 12;
+ stride1[0] = 3;
+ count1[0] = 2;
+ block1[0] = 2;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_OR, start1, stride1, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be YES */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Add two blocks partially overlapping current block, with XOR */
+ start1[0] = 15;
+ stride1[0] = 3;
+ count1[0] = 2;
+ block1[0] = 2;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_XOR, start1, stride1, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be NO, after rebuild it should be IMPOSSIBLE */
+ ret = H5S__get_rebuild_status_test(space_id, &diminfo_valid, &rebuild_status);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_NO) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+ if (rebuild_status != H5S_DIMINFO_VALID_IMPOSSIBLE) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ } /* end if */
+#endif
+
+ /* Fill in missing block */
+ start1[0] = 15;
+ count1[0] = 1;
+ block1[0] = 2;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_XOR, start1, NULL, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be NO, after rebuild it should be YES */
+ ret = H5S__get_rebuild_status_test(space_id, &diminfo_valid, &rebuild_status);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_NO) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+ if (rebuild_status != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ } /* end if */
+#endif
+ /*
+ * Test adding contiguous blocks
+ */
+
+ /* Create single block */
+ start1[0] = 3;
+ count1[0] = 1;
+ block1[0] = 2;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_SET, start1, NULL, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be YES */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Add block immediately after first, with OR */
+ start1[0] = 5;
+ count1[0] = 1;
+ block1[0] = 2;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_OR, start1, NULL, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be YES */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Add block immediately before first, with XOR */
+ start1[0] = 1;
+ count1[0] = 1;
+ block1[0] = 2;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_OR, start1, NULL, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be YES */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Add differently size block immediately after current, with OR */
+ start1[0] = 7;
+ count1[0] = 1;
+ block1[0] = 7;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_OR, start1, NULL, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be YES */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /*
+ * Test adding overlapping blocks
+ */
+
+ /* Create single block */
+ start1[0] = 3;
+ count1[0] = 1;
+ block1[0] = 2;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_SET, start1, NULL, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be YES */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Add block completely overlapping first, with OR */
+ start1[0] = 3;
+ count1[0] = 1;
+ block1[0] = 2;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_OR, start1, NULL, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be YES */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Add block partially overlapping first, with OR */
+ start1[0] = 4;
+ count1[0] = 1;
+ block1[0] = 2;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_OR, start1, NULL, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be YES */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Add block completely enclosing current, with OR */
+ start1[0] = 2;
+ count1[0] = 1;
+ block1[0] = 5;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_OR, start1, NULL, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be YES */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Add block completely enclosed by current, with OR */
+ start1[0] = 3;
+ count1[0] = 1;
+ block1[0] = 2;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_OR, start1, NULL, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be YES */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Add equally sized block partially overlapping current, with XOR */
+ start1[0] = 3;
+ count1[0] = 1;
+ block1[0] = 5;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_XOR, start1, NULL, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be YES */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Fill in hole in block */
+ start1[0] = 3;
+ count1[0] = 1;
+ block1[0] = 4;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_OR, start1, NULL, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be NO, after rebuild it should be YES */
+ ret = H5S__get_rebuild_status_test(space_id, &diminfo_valid, &rebuild_status);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_NO) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+ if (rebuild_status != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ } /* end if */
+#endif
+
+ /* Add differently sized block partially overlapping current, with XOR */
+ start1[0] = 4;
+ count1[0] = 1;
+ block1[0] = 5;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_XOR, start1, NULL, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be NO */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_NO) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Fill in hole in block */
+ start1[0] = 4;
+ count1[0] = 1;
+ block1[0] = 4;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_OR, start1, NULL, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be NO, after rebuild it should be YES */
+ ret = H5S__get_rebuild_status_test(space_id, &diminfo_valid, &rebuild_status);
+ CHECK(ret, FAIL, "H5S__get_rebuild_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_NO) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+ if (rebuild_status != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_rebuild");
+ } /* end if */
+#endif
+
+ /* Add block completely overlapping current, with XOR */
+ start1[0] = 2;
+ count1[0] = 1;
+ block1[0] = 7;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_XOR, start1, NULL, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ sel_type = H5Sget_select_type(space_id);
+ VERIFY(sel_type, H5S_SEL_NONE, "H5Sget_select_type");
+
+ /*
+ * Test various conditions that break the fast algorithm
+ */
+
+ /* Create multiple blocks */
+ start1[0] = 3;
+ stride1[0] = 3;
+ count1[0] = 2;
+ block1[0] = 2;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_SET, start1, stride1, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be YES */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Create single block with start out of phase */
+ start1[0] = 8;
+ count1[0] = 1;
+ block1[0] = 2;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_OR, start1, NULL, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be NO */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_NO) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Create multiple blocks */
+ start1[0] = 3;
+ stride1[0] = 3;
+ count1[0] = 2;
+ block1[0] = 2;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_SET, start1, stride1, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be YES */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Create multiple blocks with start out of phase */
+ start1[0] = 8;
+ stride1[0] = 3;
+ count1[0] = 2;
+ block1[0] = 2;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_OR, start1, stride1, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be NO */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_NO) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Create multiple blocks */
+ start1[0] = 3;
+ stride1[0] = 3;
+ count1[0] = 2;
+ block1[0] = 2;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_SET, start1, stride1, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be YES */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Create multiple blocks with wrong stride */
+ start1[0] = 9;
+ stride1[0] = 4;
+ count1[0] = 2;
+ block1[0] = 2;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_OR, start1, stride1, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be NO */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_NO) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Create single block */
+ start1[0] = 3;
+ count1[0] = 1;
+ block1[0] = 2;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_SET, start1, NULL, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be YES */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Create single block with wrong size */
+ start1[0] = 6;
+ count1[0] = 1;
+ block1[0] = 1;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_OR, start1, NULL, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be NO */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_NO) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Create single block */
+ start1[0] = 3;
+ count1[0] = 1;
+ block1[0] = 2;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_SET, start1, NULL, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be YES */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Create multiple blocks with wrong size */
+ start1[0] = 6;
+ stride1[0] = 3;
+ count1[0] = 2;
+ block1[0] = 1;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_OR, start1, stride1, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be NO */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_NO) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Create multiple blocks */
+ start1[0] = 3;
+ stride1[0] = 3;
+ count1[0] = 2;
+ block1[0] = 2;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_SET, start1, stride1, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be YES */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Create single block with wrong size */
+ start1[0] = 9;
+ count1[0] = 1;
+ block1[0] = 1;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_OR, start1, NULL, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be NO */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_NO) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Create multiple blocks */
+ start1[0] = 3;
+ stride1[0] = 3;
+ count1[0] = 2;
+ block1[0] = 2;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_SET, start1, stride1, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be YES */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Create multiple blocks with wrong size */
+ start1[0] = 9;
+ stride1[0] = 3;
+ count1[0] = 2;
+ block1[0] = 1;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_OR, start1, stride1, count1, block1);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be NO */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_NO) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ ret = H5Sclose(space_id);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ MESSAGE(7, ("Testing functionality to update 3-D hyperslab dimension info\n"));
+
+ /* Create 3-D dataspace */
+ space_id = H5Screate_simple(3, dims3, NULL);
+
+ /* Create multiple blocks */
+ start3[0] = 0;
+ start3[1] = 1;
+ start3[2] = 2;
+ stride3[0] = 2;
+ stride3[1] = 3;
+ stride3[2] = 4;
+ count3[0] = 4;
+ count3[1] = 3;
+ count3[2] = 2;
+ block3[0] = 1;
+ block3[1] = 2;
+ block3[2] = 3;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_SET, start3, stride3, count3, block3);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be YES */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Add blocks with same values in all dimensions */
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_OR, start3, stride3, count3, block3);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be YES */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Add blocks with same values in two dimensions */
+ start3[0] = 8;
+ stride3[0] = 1;
+ count3[0] = 1;
+ block3[0] = 1;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_OR, start3, stride3, count3, block3);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be YES */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Create multiple blocks */
+ start3[0] = 0;
+ start3[1] = 1;
+ start3[2] = 2;
+ stride3[0] = 2;
+ stride3[1] = 3;
+ stride3[2] = 4;
+ count3[0] = 4;
+ count3[1] = 3;
+ count3[2] = 2;
+ block3[0] = 1;
+ block3[1] = 2;
+ block3[2] = 3;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_SET, start3, stride3, count3, block3);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be YES */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Add blocks with same values in one dimension */
+ start3[0] = 8;
+ start3[1] = 10;
+ stride3[0] = 1;
+ stride3[1] = 1;
+ count3[0] = 1;
+ count3[1] = 1;
+ block3[0] = 1;
+ block3[1] = 2;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_OR, start3, stride3, count3, block3);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be NO */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_NO) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Create multiple blocks */
+ start3[0] = 0;
+ start3[1] = 1;
+ start3[2] = 2;
+ stride3[0] = 2;
+ stride3[1] = 3;
+ stride3[2] = 4;
+ count3[0] = 4;
+ count3[1] = 3;
+ count3[2] = 2;
+ block3[0] = 1;
+ block3[1] = 2;
+ block3[2] = 3;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_SET, start3, stride3, count3, block3);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be YES */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_YES) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ /* Add blocks with same values in no dimensions */
+ start3[0] = 8;
+ start3[1] = 10;
+ start3[2] = 10;
+ stride3[0] = 1;
+ stride3[1] = 1;
+ stride3[2] = 1;
+ count3[0] = 1;
+ count3[1] = 1;
+ count3[2] = 1;
+ block3[0] = 1;
+ block3[1] = 2;
+ block3[2] = 3;
+ ret = H5Sselect_hyperslab(space_id, H5S_SELECT_OR, start3, stride3, count3, block3);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+#if 0
+ /* diminfo_valid should be NO */
+ ret = H5S__get_diminfo_status_test(space_id, &diminfo_valid);
+ CHECK(ret, FAIL, "H5S__get_diminfo_status_test");
+ if (diminfo_valid != H5S_DIMINFO_VALID_NO) {
+ ret = FAIL;
+ CHECK(ret, FAIL, "H5S_hyper_update_diminfo");
+ } /* end if */
+#endif
+ ret = H5Sclose(space_id);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* end test_space_update_diminfo() */
+
+/****************************************************************
+**
+** test_select_hyper_chunk_offset(): Tests selections on dataspace,
+** verify that offsets for hyperslab selections are working in
+** chunked datasets.
+**
+****************************************************************/
+#if 0
+static void
+test_select_hyper_chunk_offset(void)
+{
+ hid_t fid; /* File ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t msid; /* Memory dataspace ID */
+ hid_t did; /* Dataset ID */
+ const hsize_t mem_dims[1] = {SPACE10_DIM1}; /* Dataspace dimensions for memory */
+ const hsize_t dims[1] = {0}; /* Dataspace initial dimensions */
+ const hsize_t maxdims[1] = {H5S_UNLIMITED}; /* Dataspace mam dims */
+ int *wbuf; /* Buffer for writing data */
+ int *rbuf; /* Buffer for reading data */
+ hid_t dcpl; /* Dataset creation property list ID */
+ hsize_t chunks[1] = {SPACE10_CHUNK_SIZE}; /* Chunk size */
+ hsize_t start[1] = {0}; /* The start of the hyperslab */
+ hsize_t count[1] = {SPACE10_CHUNK_SIZE}; /* The size of the hyperslab */
+ int i, j; /* Local index */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(6, ("Testing hyperslab selections using offsets in chunked datasets\n"));
+
+ /* Allocate buffers */
+ wbuf = (int *)HDmalloc(sizeof(int) * SPACE10_DIM1);
+ CHECK_PTR(wbuf, "HDmalloc");
+ rbuf = (int *)HDcalloc(sizeof(int), SPACE10_DIM1);
+ CHECK_PTR(rbuf, "HDcalloc");
+
+ /* Initialize the write buffer */
+ for (i = 0; i < SPACE10_DIM1; i++)
+ wbuf[i] = i;
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create a dataset creation property list */
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+
+ /* Set to chunked storage layout */
+ ret = H5Pset_layout(dcpl, H5D_CHUNKED);
+ CHECK(ret, FAIL, "H5Pset_layout");
+
+ /* Set the chunk size */
+ ret = H5Pset_chunk(dcpl, 1, chunks);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+
+ /* Create dataspace for memory */
+ msid = H5Screate_simple(1, mem_dims, NULL);
+ CHECK(msid, FAIL, "H5Screate_simple");
+
+ /* Select the correct chunk in the memory dataspace */
+ ret = H5Sselect_hyperslab(msid, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create dataspace for dataset */
+ sid = H5Screate_simple(1, dims, maxdims);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Create the dataset */
+ did = H5Dcreate2(fid, "fooData", H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+
+ /* Close the dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close the dataset creation property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Loop over writing out each chunk */
+ for (i = SPACE10_CHUNK_SIZE; i <= SPACE10_DIM1; i += SPACE10_CHUNK_SIZE) {
+ hssize_t offset[1]; /* Offset of selection */
+ hid_t fsid; /* File dataspace ID */
+ hsize_t size[1]; /* The size to extend the dataset to */
+
+ /* Extend the dataset */
+ size[0] = (hsize_t)i; /* The size to extend the dataset to */
+ ret = H5Dset_extent(did, size);
+ CHECK(ret, FAIL, "H5Dset_extent");
+
+ /* Get the (extended) dataspace from the dataset */
+ fsid = H5Dget_space(did);
+ CHECK(fsid, FAIL, "H5Dget_space");
+
+ /* Select the correct chunk in the dataset */
+ ret = H5Sselect_hyperslab(fsid, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Set the selection offset for the file dataspace */
+ offset[0] = i - SPACE10_CHUNK_SIZE;
+ ret = H5Soffset_simple(fsid, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+
+ /* Set the selection offset for the memory dataspace */
+ offset[0] = SPACE10_DIM1 - i;
+ ret = H5Soffset_simple(msid, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+
+ /* Write the data to the chunk */
+ ret = H5Dwrite(did, H5T_NATIVE_INT, msid, fsid, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close the file dataspace copy */
+ ret = H5Sclose(fsid);
+ CHECK(ret, FAIL, "H5Sclose");
+ }
+
+ /* Read the data back in */
+ ret = H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Verify the information read in */
+ for (i = 0; i < SPACE10_DIM1; i += SPACE10_CHUNK_SIZE)
+ for (j = 0; j < SPACE10_CHUNK_SIZE; j++)
+ if (wbuf[i + j] != rbuf[((SPACE10_DIM1 - i) - SPACE10_CHUNK_SIZE) + j])
+ TestErrPrintf("Line: %d - Error! i=%d, j=%d, rbuf=%d, wbuf=%d\n", __LINE__, i, j,
+ rbuf[((SPACE10_DIM1 - i) - SPACE10_CHUNK_SIZE) + j], wbuf[i + j]);
+
+ /* Check with 'OR'ed set of hyperslab selections, which makes certain the
+ * hyperslab spanlist code gets tested. -QAK
+ */
+
+ /* Re-initialize the write buffer */
+ for (i = 0; i < SPACE10_DIM1; i++)
+ wbuf[i] = i * 2;
+
+ /* Change the selected the region in the memory dataspace */
+ start[0] = 0;
+ count[0] = SPACE10_CHUNK_SIZE / 3;
+ ret = H5Sselect_hyperslab(msid, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+ start[0] = (2 * SPACE10_CHUNK_SIZE) / 3;
+ ret = H5Sselect_hyperslab(msid, H5S_SELECT_OR, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Loop over writing out each chunk */
+ for (i = SPACE10_CHUNK_SIZE; i <= SPACE10_DIM1; i += SPACE10_CHUNK_SIZE) {
+ hssize_t offset[1]; /* Offset of selection */
+ hid_t fsid; /* File dataspace ID */
+ hsize_t size[1]; /* The size to extend the dataset to */
+
+ /* Extend the dataset */
+ size[0] = (hsize_t)i; /* The size to extend the dataset to */
+ ret = H5Dset_extent(did, size);
+ CHECK(ret, FAIL, "H5Dset_extent");
+
+ /* Get the (extended) dataspace from the dataset */
+ fsid = H5Dget_space(did);
+ CHECK(fsid, FAIL, "H5Dget_space");
+
+ /* Select the correct region in the dataset */
+ start[0] = 0;
+ ret = H5Sselect_hyperslab(fsid, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+ start[0] = (2 * SPACE10_CHUNK_SIZE) / 3;
+ ret = H5Sselect_hyperslab(fsid, H5S_SELECT_OR, start, NULL, count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Set the selection offset for the file dataspace */
+ offset[0] = i - SPACE10_CHUNK_SIZE;
+ ret = H5Soffset_simple(fsid, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+
+ /* Set the selection offset for the memory dataspace */
+ offset[0] = SPACE10_DIM1 - i;
+ ret = H5Soffset_simple(msid, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+
+ /* Write the data to the chunk */
+ ret = H5Dwrite(did, H5T_NATIVE_INT, msid, fsid, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+
+ /* Close the file dataspace copy */
+ ret = H5Sclose(fsid);
+ CHECK(ret, FAIL, "H5Sclose");
+ }
+
+ /* Read the data back in */
+ ret = H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+
+ /* Verify the information read in */
+ for (i = 0; i < SPACE10_DIM1; i += SPACE10_CHUNK_SIZE)
+ for (j = 0; j < SPACE10_CHUNK_SIZE; j++)
+ /* We're not writing out the "middle" of each chunk, so don't check that */
+ if (j < (SPACE10_CHUNK_SIZE / 3) || j >= ((2 * SPACE10_CHUNK_SIZE) / 3))
+ if (wbuf[i + j] != rbuf[((SPACE10_DIM1 - i) - SPACE10_CHUNK_SIZE) + j])
+ TestErrPrintf("Line: %d - Error! i=%d, j=%d, rbuf=%d, wbuf=%d\n", __LINE__, i, j,
+ rbuf[((SPACE10_DIM1 - i) - SPACE10_CHUNK_SIZE) + j], wbuf[i + j]);
+
+ /* Close the memory dataspace */
+ ret = H5Sclose(msid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close the dataset */
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close the file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Free the buffers */
+ HDfree(wbuf);
+ HDfree(rbuf);
+} /* test_select_hyper_chunk_offset() */
+#endif
+/****************************************************************
+**
+** test_select_hyper_chunk_offset2(): Tests selections on dataspace,
+** another test to verify that offsets for hyperslab selections are
+** working in chunked datasets.
+**
+****************************************************************/
+#if 0
+static void
+test_select_hyper_chunk_offset2(void)
+{
+ hid_t file, dataset; /* handles */
+ hid_t dataspace;
+ hid_t memspace;
+ hid_t dcpl; /* Dataset creation property list */
+ herr_t status;
+ unsigned data_out[SPACE12_DIM0]; /* output buffer */
+ unsigned data_in[SPACE12_CHUNK_DIM0]; /* input buffer */
+ hsize_t dims[SPACE12_RANK] = {SPACE12_DIM0}; /* Dimension size */
+ hsize_t chunk_dims[SPACE12_RANK] = {SPACE12_CHUNK_DIM0}; /* Chunk size */
+ hsize_t start[SPACE12_RANK]; /* Start of hyperslab */
+ hsize_t count[SPACE12_RANK]; /* Size of hyperslab */
+ hssize_t offset[SPACE12_RANK]; /* hyperslab offset in the file */
+ unsigned u, v; /* Local index variables */
+
+ /* Output message about test being performed */
+ MESSAGE(6, ("Testing more hyperslab selections using offsets in chunked datasets\n"));
+
+ /* Initialize data to write out */
+ for (u = 0; u < SPACE12_DIM0; u++)
+ data_out[u] = u;
+
+ /* Create the file */
+ file = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file, FAIL, "H5Fcreate");
+
+ /* Create dataspace */
+ dataspace = H5Screate_simple(SPACE12_RANK, dims, NULL);
+ CHECK(dataspace, FAIL, "H5Screate_simple");
+
+ /* Create dataset creation property list */
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+
+ /* Set chunk sizes */
+ status = H5Pset_chunk(dcpl, SPACE12_RANK, chunk_dims);
+ CHECK(status, FAIL, "H5Pset_chunk");
+
+ /* Create dataset */
+ dataset = H5Dcreate2(file, DATASETNAME, H5T_NATIVE_UINT, dataspace, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Close DCPL */
+ status = H5Pclose(dcpl);
+ CHECK(status, FAIL, "H5Pclose");
+
+ /* Write out entire dataset */
+ status = H5Dwrite(dataset, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, data_out);
+ CHECK(status, FAIL, "H5Dclose");
+
+ /* Create memory dataspace (same size as a chunk) */
+ memspace = H5Screate_simple(SPACE12_RANK, chunk_dims, NULL);
+ CHECK(dataspace, FAIL, "H5Screate_simple");
+
+ /*
+ * Define hyperslab in the file dataspace.
+ */
+ start[0] = 0;
+ count[0] = SPACE12_CHUNK_DIM0;
+ status = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, start, NULL, count, NULL);
+ CHECK(status, FAIL, "H5Sselect_hyperslab");
+
+ /* Loop through retrieving data from file, checking it against data written */
+ for (u = 0; u < SPACE12_DIM0; u += SPACE12_CHUNK_DIM0) {
+ /* Set the offset of the file selection */
+ offset[0] = u;
+ status = H5Soffset_simple(dataspace, offset);
+ CHECK(status, FAIL, "H5Soffset_simple");
+
+ /* Read in buffer of data */
+ status = H5Dread(dataset, H5T_NATIVE_UINT, memspace, dataspace, H5P_DEFAULT, data_in);
+ CHECK(status, FAIL, "H5Dread");
+
+ /* Check data read in */
+ for (v = 0; v < SPACE12_CHUNK_DIM0; v++)
+ if (data_out[u + v] != data_in[v])
+ TestErrPrintf("Error! data_out[%u]=%u, data_in[%u]=%u\n", (unsigned)(u + v), data_out[u + v],
+ v, data_in[v]);
+ } /* end for */
+
+ status = H5Dclose(dataset);
+ CHECK(status, FAIL, "H5Dclose");
+
+ status = H5Sclose(dataspace);
+ CHECK(status, FAIL, "H5Sclose");
+
+ status = H5Sclose(memspace);
+ CHECK(status, FAIL, "H5Sclose");
+
+ status = H5Fclose(file);
+ CHECK(status, FAIL, "H5Fclose");
+} /* test_select_hyper_chunk_offset2() */
+#endif
+/****************************************************************
+**
+** test_select_bounds(): Tests selection bounds on dataspaces,
+** both with and without offsets.
+**
+****************************************************************/
+static void
+test_select_bounds(void)
+{
+ hid_t sid; /* Dataspace ID */
+ const hsize_t dims[SPACE11_RANK] = {SPACE11_DIM1, SPACE11_DIM2}; /* Dataspace dimensions */
+ hsize_t coord[SPACE11_NPOINTS][SPACE11_RANK]; /* Coordinates for point selection */
+ hsize_t start[SPACE11_RANK]; /* The start of the hyperslab */
+ hsize_t stride[SPACE11_RANK]; /* The stride between block starts for the hyperslab */
+ hsize_t count[SPACE11_RANK]; /* The number of blocks for the hyperslab */
+ hsize_t block[SPACE11_RANK]; /* The size of each block for the hyperslab */
+ hssize_t offset[SPACE11_RANK]; /* Offset amount for selection */
+ hsize_t low_bounds[SPACE11_RANK]; /* The low bounds for the selection */
+ hsize_t high_bounds[SPACE11_RANK]; /* The high bounds for the selection */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(6, ("Testing selection bounds\n"));
+
+ /* Create dataspace */
+ sid = H5Screate_simple(SPACE11_RANK, dims, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Get bounds for 'all' selection */
+ ret = H5Sget_select_bounds(sid, low_bounds, high_bounds);
+ CHECK(ret, FAIL, "H5Sget_select_bounds");
+ VERIFY(low_bounds[0], 0, "H5Sget_select_bounds");
+ VERIFY(low_bounds[1], 0, "H5Sget_select_bounds");
+ VERIFY(high_bounds[0], SPACE11_DIM1 - 1, "H5Sget_select_bounds");
+ VERIFY(high_bounds[1], SPACE11_DIM2 - 1, "H5Sget_select_bounds");
+
+ /* Set offset for selection */
+ offset[0] = 1;
+ offset[1] = 1;
+ ret = H5Soffset_simple(sid, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+
+ /* Get bounds for 'all' selection with offset (which should be ignored) */
+ ret = H5Sget_select_bounds(sid, low_bounds, high_bounds);
+ CHECK(ret, FAIL, "H5Sget_select_bounds");
+ VERIFY(low_bounds[0], 0, "H5Sget_select_bounds");
+ VERIFY(low_bounds[1], 0, "H5Sget_select_bounds");
+ VERIFY(high_bounds[0], SPACE11_DIM1 - 1, "H5Sget_select_bounds");
+ VERIFY(high_bounds[1], SPACE11_DIM2 - 1, "H5Sget_select_bounds");
+
+ /* Reset offset for selection */
+ offset[0] = 0;
+ offset[1] = 0;
+ ret = H5Soffset_simple(sid, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+
+ /* Set 'none' selection */
+ ret = H5Sselect_none(sid);
+ CHECK(ret, FAIL, "H5Sselect_none");
+
+ /* Get bounds for 'none' selection */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Sget_select_bounds(sid, low_bounds, high_bounds);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Sget_select_bounds");
+
+ /* Set point selection */
+ coord[0][0] = 3;
+ coord[0][1] = 3;
+ coord[1][0] = 3;
+ coord[1][1] = 96;
+ coord[2][0] = 96;
+ coord[2][1] = 3;
+ coord[3][0] = 96;
+ coord[3][1] = 96;
+ ret = H5Sselect_elements(sid, H5S_SELECT_SET, (size_t)SPACE11_NPOINTS, (const hsize_t *)coord);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Get bounds for point selection */
+ ret = H5Sget_select_bounds(sid, low_bounds, high_bounds);
+ CHECK(ret, FAIL, "H5Sget_select_bounds");
+ VERIFY(low_bounds[0], 3, "H5Sget_select_bounds");
+ VERIFY(low_bounds[1], 3, "H5Sget_select_bounds");
+ VERIFY(high_bounds[0], SPACE11_DIM1 - 4, "H5Sget_select_bounds");
+ VERIFY(high_bounds[1], SPACE11_DIM2 - 4, "H5Sget_select_bounds");
+
+ /* Set bad offset for selection */
+ offset[0] = 5;
+ offset[1] = -5;
+ ret = H5Soffset_simple(sid, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+
+ /* Get bounds for hyperslab selection with negative offset */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Sget_select_bounds(sid, low_bounds, high_bounds);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Sget_select_bounds");
+
+ /* Set valid offset for selection */
+ offset[0] = 2;
+ offset[1] = -2;
+ ret = H5Soffset_simple(sid, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+
+ /* Get bounds for point selection with offset */
+ ret = H5Sget_select_bounds(sid, low_bounds, high_bounds);
+ CHECK(ret, FAIL, "H5Sget_select_bounds");
+ VERIFY(low_bounds[0], 5, "H5Sget_select_bounds");
+ VERIFY(low_bounds[1], 1, "H5Sget_select_bounds");
+ VERIFY(high_bounds[0], SPACE11_DIM1 - 2, "H5Sget_select_bounds");
+ VERIFY(high_bounds[1], SPACE11_DIM2 - 6, "H5Sget_select_bounds");
+
+ /* Reset offset for selection */
+ offset[0] = 0;
+ offset[1] = 0;
+ ret = H5Soffset_simple(sid, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+
+ /* Set "regular" hyperslab selection */
+ start[0] = 2;
+ start[1] = 2;
+ stride[0] = 10;
+ stride[1] = 10;
+ count[0] = 4;
+ count[1] = 4;
+ block[0] = 5;
+ block[1] = 5;
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Get bounds for hyperslab selection */
+ ret = H5Sget_select_bounds(sid, low_bounds, high_bounds);
+ CHECK(ret, FAIL, "H5Sget_select_bounds");
+ VERIFY(low_bounds[0], 2, "H5Sget_select_bounds");
+ VERIFY(low_bounds[1], 2, "H5Sget_select_bounds");
+ VERIFY(high_bounds[0], 36, "H5Sget_select_bounds");
+ VERIFY(high_bounds[1], 36, "H5Sget_select_bounds");
+
+ /* Set bad offset for selection */
+ offset[0] = 5;
+ offset[1] = -5;
+ ret = H5Soffset_simple(sid, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+
+ /* Get bounds for hyperslab selection with negative offset */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Sget_select_bounds(sid, low_bounds, high_bounds);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Sget_select_bounds");
+
+ /* Set valid offset for selection */
+ offset[0] = 5;
+ offset[1] = -2;
+ ret = H5Soffset_simple(sid, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+
+ /* Get bounds for hyperslab selection with offset */
+ ret = H5Sget_select_bounds(sid, low_bounds, high_bounds);
+ CHECK(ret, FAIL, "H5Sget_select_bounds");
+ VERIFY(low_bounds[0], 7, "H5Sget_select_bounds");
+ VERIFY(low_bounds[1], 0, "H5Sget_select_bounds");
+ VERIFY(high_bounds[0], 41, "H5Sget_select_bounds");
+ VERIFY(high_bounds[1], 34, "H5Sget_select_bounds");
+
+ /* Reset offset for selection */
+ offset[0] = 0;
+ offset[1] = 0;
+ ret = H5Soffset_simple(sid, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+
+ /* Make "irregular" hyperslab selection */
+ start[0] = 20;
+ start[1] = 20;
+ stride[0] = 20;
+ stride[1] = 20;
+ count[0] = 2;
+ count[1] = 2;
+ block[0] = 10;
+ block[1] = 10;
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Get bounds for hyperslab selection */
+ ret = H5Sget_select_bounds(sid, low_bounds, high_bounds);
+ CHECK(ret, FAIL, "H5Sget_select_bounds");
+ VERIFY(low_bounds[0], 2, "H5Sget_select_bounds");
+ VERIFY(low_bounds[1], 2, "H5Sget_select_bounds");
+ VERIFY(high_bounds[0], 49, "H5Sget_select_bounds");
+ VERIFY(high_bounds[1], 49, "H5Sget_select_bounds");
+
+ /* Set bad offset for selection */
+ offset[0] = 5;
+ offset[1] = -5;
+ ret = H5Soffset_simple(sid, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+
+ /* Get bounds for hyperslab selection with negative offset */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Sget_select_bounds(sid, low_bounds, high_bounds);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Sget_select_bounds");
+
+ /* Set valid offset for selection */
+ offset[0] = 5;
+ offset[1] = -2;
+ ret = H5Soffset_simple(sid, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+
+ /* Get bounds for hyperslab selection with offset */
+ ret = H5Sget_select_bounds(sid, low_bounds, high_bounds);
+ CHECK(ret, FAIL, "H5Sget_select_bounds");
+ VERIFY(low_bounds[0], 7, "H5Sget_select_bounds");
+ VERIFY(low_bounds[1], 0, "H5Sget_select_bounds");
+ VERIFY(high_bounds[0], 54, "H5Sget_select_bounds");
+ VERIFY(high_bounds[1], 47, "H5Sget_select_bounds");
+
+ /* Reset offset for selection */
+ offset[0] = 0;
+ offset[1] = 0;
+ ret = H5Soffset_simple(sid, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+
+ /* Close the dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_select_bounds() */
+
+/****************************************************************
+**
+** test_hyper_regular(): Tests query operations on regular hyperslabs
+**
+****************************************************************/
+static void
+test_hyper_regular(void)
+{
+ hid_t sid; /* Dataspace ID */
+ const hsize_t dims[SPACE13_RANK] = {SPACE13_DIM1, SPACE13_DIM2, SPACE13_DIM3}; /* Dataspace dimensions */
+ hsize_t coord[SPACE13_NPOINTS][SPACE13_RANK]; /* Coordinates for point selection */
+ hsize_t start[SPACE13_RANK]; /* The start of the hyperslab */
+ hsize_t stride[SPACE13_RANK]; /* The stride between block starts for the hyperslab */
+ hsize_t count[SPACE13_RANK]; /* The number of blocks for the hyperslab */
+ hsize_t block[SPACE13_RANK]; /* The size of each block for the hyperslab */
+ hsize_t t_start[SPACE13_RANK]; /* Temporary start of the hyperslab */
+ hsize_t t_count[SPACE13_RANK]; /* Temporary number of blocks for the hyperslab */
+ hsize_t q_start[SPACE13_RANK]; /* The queried start of the hyperslab */
+ hsize_t q_stride[SPACE13_RANK]; /* The queried stride between block starts for the hyperslab */
+ hsize_t q_count[SPACE13_RANK]; /* The queried number of blocks for the hyperslab */
+ hsize_t q_block[SPACE13_RANK]; /* The queried size of each block for the hyperslab */
+ htri_t is_regular; /* Whether a hyperslab selection is regular */
+ unsigned u; /* Local index variable */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(6, ("Testing queries on regular hyperslabs\n"));
+
+ /* Create dataspace */
+ sid = H5Screate_simple(SPACE13_RANK, dims, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Query if 'all' selection is regular hyperslab (should fail) */
+ H5E_BEGIN_TRY
+ {
+ is_regular = H5Sis_regular_hyperslab(sid);
+ }
+ H5E_END_TRY;
+ VERIFY(is_regular, FAIL, "H5Sis_regular_hyperslab");
+
+ /* Query regular hyperslab selection info (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Sget_regular_hyperslab(sid, q_start, q_stride, q_count, q_block);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Sget_regular_hyperslab");
+
+ /* Set 'none' selection */
+ ret = H5Sselect_none(sid);
+ CHECK(ret, FAIL, "H5Sselect_none");
+
+ /* Query if 'none' selection is regular hyperslab (should fail) */
+ H5E_BEGIN_TRY
+ {
+ is_regular = H5Sis_regular_hyperslab(sid);
+ }
+ H5E_END_TRY;
+ VERIFY(is_regular, FAIL, "H5Sis_regular_hyperslab");
+
+ /* Query regular hyperslab selection info (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Sget_regular_hyperslab(sid, q_start, q_stride, q_count, q_block);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Sget_regular_hyperslab");
+
+ /* Set point selection */
+ coord[0][0] = 3;
+ coord[0][1] = 3;
+ coord[0][2] = 3;
+ coord[1][0] = 3;
+ coord[1][1] = 48;
+ coord[1][2] = 48;
+ coord[2][0] = 48;
+ coord[2][1] = 3;
+ coord[2][2] = 3;
+ coord[3][0] = 48;
+ coord[3][1] = 48;
+ coord[3][2] = 48;
+ ret = H5Sselect_elements(sid, H5S_SELECT_SET, (size_t)SPACE13_NPOINTS, (const hsize_t *)coord);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Query if 'point' selection is regular hyperslab (should fail) */
+ H5E_BEGIN_TRY
+ {
+ is_regular = H5Sis_regular_hyperslab(sid);
+ }
+ H5E_END_TRY;
+ VERIFY(is_regular, FAIL, "H5Sis_regular_hyperslab");
+
+ /* Query regular hyperslab selection info (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Sget_regular_hyperslab(sid, q_start, q_stride, q_count, q_block);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Sget_regular_hyperslab");
+
+ /* Set "regular" hyperslab selection */
+ start[0] = 2;
+ start[1] = 2;
+ start[2] = 2;
+ stride[0] = 5;
+ stride[1] = 5;
+ stride[2] = 5;
+ count[0] = 3;
+ count[1] = 3;
+ count[2] = 3;
+ block[0] = 4;
+ block[1] = 4;
+ block[2] = 4;
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Query if 'hyperslab' selection is regular hyperslab (should be TRUE) */
+ is_regular = H5Sis_regular_hyperslab(sid);
+ VERIFY(is_regular, TRUE, "H5Sis_regular_hyperslab");
+
+ /* Retrieve the hyperslab parameters */
+ ret = H5Sget_regular_hyperslab(sid, q_start, q_stride, q_count, q_block);
+ CHECK(ret, FAIL, "H5Sget_regular_hyperslab");
+
+ /* Verify the hyperslab parameters */
+ for (u = 0; u < SPACE13_RANK; u++) {
+ if (start[u] != q_start[u])
+ ERROR("H5Sget_regular_hyperslab, start");
+ if (stride[u] != q_stride[u])
+ ERROR("H5Sget_regular_hyperslab, stride");
+ if (count[u] != q_count[u])
+ ERROR("H5Sget_regular_hyperslab, count");
+ if (block[u] != q_block[u])
+ ERROR("H5Sget_regular_hyperslab, block");
+ } /* end for */
+
+ /* 'OR' in another point */
+ t_start[0] = 0;
+ t_start[1] = 0;
+ t_start[2] = 0;
+ t_count[0] = 1;
+ t_count[1] = 1;
+ t_count[2] = 1;
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_OR, t_start, NULL, t_count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Query if 'hyperslab' selection is regular hyperslab (should be FALSE) */
+ is_regular = H5Sis_regular_hyperslab(sid);
+ VERIFY(is_regular, FALSE, "H5Sis_regular_hyperslab");
+
+ /* Query regular hyperslab selection info (should fail) */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Sget_regular_hyperslab(sid, q_start, q_stride, q_count, q_block);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Sget_regular_hyperslab");
+
+ /* 'XOR' in the point again, to remove it, which should make it regular again */
+ t_start[0] = 0;
+ t_start[1] = 0;
+ t_start[2] = 0;
+ t_count[0] = 1;
+ t_count[1] = 1;
+ t_count[2] = 1;
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_XOR, t_start, NULL, t_count, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Query if 'hyperslab' selection is regular hyperslab (should be TRUE) */
+ is_regular = H5Sis_regular_hyperslab(sid);
+ VERIFY(is_regular, TRUE, "H5Sis_regular_hyperslab");
+
+ /* Retrieve the hyperslab parameters */
+ ret = H5Sget_regular_hyperslab(sid, q_start, q_stride, q_count, q_block);
+ CHECK(ret, FAIL, "H5Sget_regular_hyperslab");
+
+ /* Verify the hyperslab parameters */
+ for (u = 0; u < SPACE13_RANK; u++) {
+ if (start[u] != q_start[u])
+ ERROR("H5Sget_regular_hyperslab, start");
+ if (stride[u] != q_stride[u])
+ ERROR("H5Sget_regular_hyperslab, stride");
+ if (count[u] != q_count[u])
+ ERROR("H5Sget_regular_hyperslab, count");
+ if (block[u] != q_block[u])
+ ERROR("H5Sget_regular_hyperslab, block");
+ } /* end for */
+
+ /* Close the dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_hyper_regular() */
+
+/****************************************************************
+**
+** test_hyper_unlim(): Tests unlimited hyperslab selections
+**
+****************************************************************/
+static void
+test_hyper_unlim_check(hid_t sid, hsize_t *dims, hssize_t endpoints, hssize_t enblocks, hsize_t *eblock1,
+ hsize_t *eblock2)
+{
+ hid_t lim_sid;
+ hsize_t start[3];
+ H5S_sel_type sel_type;
+ hssize_t npoints;
+ hssize_t nblocks;
+ hsize_t blocklist[12];
+ herr_t ret;
+
+ HDassert(enblocks <= 2);
+
+ /* Copy sid to lim_sid */
+ lim_sid = H5Scopy(sid);
+ CHECK(lim_sid, FAIL, "H5Scopy");
+
+ /* "And" lim_sid with dims to create limited selection */
+ HDmemset(start, 0, sizeof(start));
+ ret = H5Sselect_hyperslab(lim_sid, H5S_SELECT_AND, start, NULL, dims, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Check number of elements */
+ npoints = H5Sget_select_npoints(lim_sid);
+ CHECK(npoints, FAIL, "H5Sget_select_npoints");
+ VERIFY(npoints, endpoints, "H5Sget_select_npoints");
+
+ /* Get selection type */
+ sel_type = H5Sget_select_type(lim_sid);
+ CHECK(sel_type, H5S_SEL_ERROR, "H5Sget_select_type");
+
+ /* Only examine blocks for hyperslab selection */
+ if (sel_type == H5S_SEL_HYPERSLABS) {
+ /* Get number of blocks */
+ nblocks = H5Sget_select_hyper_nblocks(lim_sid);
+ CHECK(nblocks, FAIL, "H5Sget_select_hyper_nblocks");
+ VERIFY(nblocks, enblocks, "H5Sget_select_hyper_nblocks");
+
+ if (nblocks > 0) {
+ /* Get blocklist */
+ ret = H5Sget_select_hyper_blocklist(lim_sid, (hsize_t)0, (hsize_t)nblocks, blocklist);
+ CHECK(ret, FAIL, "H5Sget_select_hyper_blocklist");
+
+ /* Verify blocklist */
+ if (nblocks == (hssize_t)1) {
+ if (HDmemcmp(blocklist, eblock1, 6 * sizeof(eblock1[0])) != 0)
+ ERROR("H5Sget_select_hyper_blocklist");
+ } /* end if */
+ else {
+ HDassert(nblocks == (hssize_t)2);
+ if (HDmemcmp(blocklist, eblock1, 6 * sizeof(eblock1[0])) != 0) {
+ if (HDmemcmp(blocklist, eblock2, 6 * sizeof(eblock2[0])) != 0)
+ ERROR("H5Sget_select_hyper_blocklist");
+ if (HDmemcmp(&blocklist[6], eblock1, 6 * sizeof(eblock1[0])) != 0)
+ ERROR("H5Sget_select_hyper_blocklist");
+ } /* end if */
+ else if (HDmemcmp(&blocklist[6], eblock2, 6 * sizeof(eblock2[0])) != 0)
+ ERROR("H5Sget_select_hyper_blocklist");
+ } /* end else */
+ } /* end if */
+ } /* end if */
+ else if (sel_type != H5S_SEL_NONE)
+ ERROR("H5Sget_select_type");
+
+ /* Close the limited dataspace */
+ ret = H5Sclose(lim_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* end test_hyper_unlim_check() */
+
+static void
+test_hyper_unlim(void)
+{
+ hid_t sid;
+ hsize_t dims[3] = {4, 4, 7};
+ hsize_t mdims[3] = {4, H5S_UNLIMITED, 7};
+ hsize_t start[3] = {1, 2, 1};
+ hsize_t stride[3] = {1, 1, 3};
+ hsize_t count[3] = {1, 1, 2};
+ hsize_t block[3] = {2, H5S_UNLIMITED, 2};
+ hsize_t start2[3];
+ hsize_t count2[3];
+ hsize_t eblock1[6] = {1, 2, 1, 2, 3, 2};
+ hsize_t eblock2[6] = {1, 2, 4, 2, 3, 5};
+ hssize_t offset[3] = {0, -1, 0};
+ hssize_t ssize_out;
+ herr_t ret;
+
+ /* Output message about test being performed */
+ MESSAGE(6, ("Testing unlimited hyperslab selections\n"));
+
+ /* Create dataspace */
+ sid = H5Screate_simple(3, dims, mdims);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Select unlimited hyperslab */
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Check with unlimited dimension clipped to 4 */
+ test_hyper_unlim_check(sid, dims, (hssize_t)16, (hssize_t)2, eblock1, eblock2);
+
+ /* Check with unlimited dimension clipped to 3 */
+ dims[1] = 3;
+ eblock1[4] = 2;
+ eblock2[4] = 2;
+ test_hyper_unlim_check(sid, dims, (hssize_t)8, (hssize_t)2, eblock1, eblock2);
+
+ /* Check with unlimited dimension clipped to 2 */
+ dims[1] = 2;
+ test_hyper_unlim_check(sid, dims, (hssize_t)0, (hssize_t)0, eblock1, eblock2);
+
+ /* Check with unlimited dimension clipped to 1 */
+ dims[1] = 1;
+ test_hyper_unlim_check(sid, dims, (hssize_t)0, (hssize_t)0, eblock1, eblock2);
+
+ /* Check with unlimited dimension clipped to 7 */
+ dims[1] = 7;
+ eblock1[4] = 6;
+ eblock2[4] = 6;
+ test_hyper_unlim_check(sid, dims, (hssize_t)40, (hssize_t)2, eblock1, eblock2);
+
+ /* Set offset of selection */
+ ret = H5Soffset_simple(sid, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+
+ /* Check with adjusted offset (should not affect result) */
+ test_hyper_unlim_check(sid, dims, (hssize_t)40, (hssize_t)2, eblock1, eblock2);
+
+ /* Reset offset of selection */
+ offset[1] = (hssize_t)0;
+ ret = H5Soffset_simple(sid, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+
+ /*
+ * Now try with multiple blocks in unlimited dimension
+ */
+ stride[1] = 3;
+ stride[2] = 1;
+ count[1] = H5S_UNLIMITED;
+ count[2] = 1;
+ block[1] = 2;
+
+ /* Select unlimited hyperslab */
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Check with new selection */
+ eblock1[1] = 2;
+ eblock1[4] = 3;
+ eblock2[1] = 5;
+ eblock2[2] = 1;
+ eblock2[4] = 6;
+ eblock2[5] = 2;
+ test_hyper_unlim_check(sid, dims, (hssize_t)16, (hssize_t)2, eblock1, eblock2);
+
+ /* Check with unlimited dimension clipped to 3 */
+ dims[1] = 3;
+ eblock1[4] = 2;
+ test_hyper_unlim_check(sid, dims, (hssize_t)4, (hssize_t)1, eblock1, eblock2);
+
+ /* Check with unlimited dimension clipped to 4 */
+ dims[1] = 4;
+ eblock1[4] = 3;
+ test_hyper_unlim_check(sid, dims, (hssize_t)8, (hssize_t)1, eblock1, eblock2);
+
+ /* Check with unlimited dimension clipped to 5 */
+ dims[1] = 5;
+ eblock1[4] = 3;
+ test_hyper_unlim_check(sid, dims, (hssize_t)8, (hssize_t)1, eblock1, eblock2);
+
+ /* Check with unlimited dimension clipped to 6 */
+ dims[1] = 6;
+ eblock1[4] = 3;
+ eblock2[4] = 5;
+ test_hyper_unlim_check(sid, dims, (hssize_t)12, (hssize_t)2, eblock1, eblock2);
+
+ /* Set offset of selection */
+ offset[1] = (hssize_t)-1;
+ ret = H5Soffset_simple(sid, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+
+ /* Check with adjusted offset (should not affect result) */
+ test_hyper_unlim_check(sid, dims, (hssize_t)12, (hssize_t)2, eblock1, eblock2);
+
+ /* Set offset of selection */
+ offset[1] = (hssize_t)3;
+ ret = H5Soffset_simple(sid, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+
+ /* Check with adjusted offset (should not affect result) */
+ test_hyper_unlim_check(sid, dims, (hssize_t)12, (hssize_t)2, eblock1, eblock2);
+
+ /* Reset offset of selection */
+ offset[1] = (hssize_t)0;
+ ret = H5Soffset_simple(sid, offset);
+ CHECK(ret, FAIL, "H5Soffset_simple");
+
+ /*
+ * Now try invalid operations
+ */
+ H5E_BEGIN_TRY
+ {
+ /* Try multiple unlimited dimensions */
+ start[0] = 1;
+ start[1] = 2;
+ start[2] = 1;
+ stride[0] = 1;
+ stride[1] = 3;
+ stride[2] = 3;
+ count[0] = 1;
+ count[1] = H5S_UNLIMITED;
+ count[2] = H5S_UNLIMITED;
+ block[0] = 2;
+ block[1] = 2;
+ block[2] = 2;
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, stride, count, block);
+ VERIFY(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Try unlimited count and block */
+ count[2] = 2;
+ block[1] = H5S_UNLIMITED;
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, stride, count, block);
+ VERIFY(ret, FAIL, "H5Sselect_hyperslab");
+ }
+ H5E_END_TRY
+
+ /* Try operations with two unlimited selections */
+ block[1] = 2;
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+ H5E_BEGIN_TRY
+ {
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_OR, start, NULL, count, NULL);
+ VERIFY(ret, FAIL, "H5Sselect_hyperslab");
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_AND, start, NULL, count, NULL);
+ VERIFY(ret, FAIL, "H5Sselect_hyperslab");
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_XOR, start, NULL, count, NULL);
+ VERIFY(ret, FAIL, "H5Sselect_hyperslab");
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_NOTB, start, NULL, count, NULL);
+ VERIFY(ret, FAIL, "H5Sselect_hyperslab");
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_NOTA, start, NULL, count, NULL);
+ VERIFY(ret, FAIL, "H5Sselect_hyperslab");
+ }
+ H5E_END_TRY
+
+ /* Try invalid combination operations */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_OR, start, NULL, block, NULL);
+ VERIFY(ret, FAIL, "H5Sselect_hyperslab");
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_XOR, start, NULL, block, NULL);
+ VERIFY(ret, FAIL, "H5Sselect_hyperslab");
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_NOTB, start, NULL, block, NULL);
+ VERIFY(ret, FAIL, "H5Sselect_hyperslab");
+ }
+ H5E_END_TRY
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, NULL, block, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+ H5E_BEGIN_TRY
+ {
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_OR, start, stride, count, block);
+ VERIFY(ret, FAIL, "H5Sselect_hyperslab");
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_XOR, start, stride, count, block);
+ VERIFY(ret, FAIL, "H5Sselect_hyperslab");
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_NOTA, start, stride, count, block);
+ VERIFY(ret, FAIL, "H5Sselect_hyperslab");
+ }
+ H5E_END_TRY
+
+ /*
+ * Now test valid combination operations
+ */
+ /* unlim AND non-unlim */
+ count[0] = 1;
+ count[1] = H5S_UNLIMITED;
+ count[2] = 2;
+ block[0] = 2;
+ block[1] = 2;
+ block[2] = 2;
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+ start2[0] = 2;
+ start2[1] = 2;
+ start2[2] = 0;
+ count2[0] = 5;
+ count2[1] = 4;
+ count2[2] = 2;
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_AND, start2, NULL, count2, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+ eblock1[0] = 2;
+ eblock1[3] = 2;
+ eblock1[1] = 2;
+ eblock1[4] = 3;
+ eblock1[2] = 1;
+ eblock1[5] = 1;
+ eblock2[0] = 2;
+ eblock2[3] = 2;
+ eblock2[1] = 5;
+ eblock2[4] = 5;
+ eblock2[2] = 1;
+ eblock2[5] = 1;
+ dims[0] = 50;
+ dims[1] = 50;
+ dims[2] = 50;
+ test_hyper_unlim_check(sid, dims, (hssize_t)3, (hssize_t)2, eblock1, eblock2);
+
+ /* unlim NOTA non-unlim */
+ count[0] = 1;
+ count[1] = H5S_UNLIMITED;
+ count[2] = 2;
+ block[0] = 2;
+ block[1] = 2;
+ block[2] = 2;
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+ start2[0] = 1;
+ start2[1] = 5;
+ start2[2] = 2;
+ count2[0] = 2;
+ count2[1] = 2;
+ count2[2] = 6;
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_NOTA, start2, NULL, count2, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+ eblock1[0] = 1;
+ eblock1[3] = 2;
+ eblock1[1] = 5;
+ eblock1[4] = 6;
+ eblock1[2] = 3;
+ eblock1[5] = 3;
+ eblock2[0] = 1;
+ eblock2[3] = 2;
+ eblock2[1] = 5;
+ eblock2[4] = 6;
+ eblock2[2] = 6;
+ eblock2[5] = 7;
+ dims[0] = 50;
+ dims[1] = 50;
+ dims[2] = 50;
+ test_hyper_unlim_check(sid, dims, (hssize_t)12, (hssize_t)2, eblock1, eblock2);
+
+ /* non-unlim AND unlim */
+ start2[0] = 2;
+ start2[1] = 2;
+ start2[2] = 0;
+ count2[0] = 5;
+ count2[1] = 4;
+ count2[2] = 2;
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, start2, NULL, count2, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+ count[0] = 1;
+ count[1] = H5S_UNLIMITED;
+ count[2] = 2;
+ block[0] = 2;
+ block[1] = 2;
+ block[2] = 2;
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_AND, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+ eblock1[0] = 2;
+ eblock1[3] = 2;
+ eblock1[1] = 2;
+ eblock1[4] = 3;
+ eblock1[2] = 1;
+ eblock1[5] = 1;
+ eblock2[0] = 2;
+ eblock2[3] = 2;
+ eblock2[1] = 5;
+ eblock2[4] = 5;
+ eblock2[2] = 1;
+ eblock2[5] = 1;
+ dims[0] = 50;
+ dims[1] = 50;
+ dims[2] = 50;
+ test_hyper_unlim_check(sid, dims, (hssize_t)3, (hssize_t)2, eblock1, eblock2);
+
+ /* non-unlim NOTB unlim */
+ start2[0] = 1;
+ start2[1] = 5;
+ start2[2] = 2;
+ count2[0] = 2;
+ count2[1] = 2;
+ count2[2] = 6;
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, start2, NULL, count2, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+ count[0] = 1;
+ count[1] = H5S_UNLIMITED;
+ count[2] = 2;
+ block[0] = 2;
+ block[1] = 2;
+ block[2] = 2;
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_NOTB, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+ eblock1[0] = 1;
+ eblock1[3] = 2;
+ eblock1[1] = 5;
+ eblock1[4] = 6;
+ eblock1[2] = 3;
+ eblock1[5] = 3;
+ eblock2[0] = 1;
+ eblock2[3] = 2;
+ eblock2[1] = 5;
+ eblock2[4] = 6;
+ eblock2[2] = 6;
+ eblock2[5] = 7;
+ dims[0] = 50;
+ dims[1] = 50;
+ dims[2] = 50;
+ test_hyper_unlim_check(sid, dims, (hssize_t)12, (hssize_t)2, eblock1, eblock2);
+
+ /* Test H5Sget_select_npoints() */
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+ ssize_out = H5Sget_select_npoints(sid);
+ VERIFY(ssize_out, (hssize_t)H5S_UNLIMITED, "H5Sget_select_npoints");
+
+ /* Test H5Sget_select_hyper_nblocks() */
+ H5E_BEGIN_TRY
+ {
+ ssize_out = H5Sget_select_hyper_nblocks(sid);
+ }
+ H5E_END_TRY;
+ VERIFY(ssize_out, (hssize_t)H5S_UNLIMITED, "H5Sget_select_hyper_nblocks");
+
+ /* Test H5Sget_select_bounds() */
+ ret = H5Sget_select_bounds(sid, start2, count2);
+ CHECK(ret, FAIL, "H5Sget_select_bounds");
+ VERIFY(start2[0], start[0], "H5Sget_select_bounds");
+ VERIFY(start2[1], start[1], "H5Sget_select_bounds");
+ VERIFY(start2[2], start[2], "H5Sget_select_bounds");
+ VERIFY(count2[0], (long)(start[0] + (stride[0] * (count[0] - 1)) + block[0] - 1), "H5Sget_select_bounds");
+ VERIFY(count2[1], H5S_UNLIMITED, "H5Sget_select_bounds");
+ VERIFY(count2[2], (long)(start[2] + (stride[2] * (count[2] - 1)) + block[2] - 1), "H5Sget_select_bounds");
+
+ /* Close the dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* end test_hyper_unlim() */
+
+/****************************************************************
+**
+** test_internal_consistency(): Tests selections on dataspace, then
+** verify that internal states of data structures of selections are
+** consistent.
+**
+****************************************************************/
+static void
+test_internal_consistency(void)
+{
+ hid_t all_sid; /* Dataspace ID with "all" selection */
+ hid_t none_sid; /* Dataspace ID with "none" selection */
+ hid_t single_pt_sid; /* Dataspace ID with single point selection */
+ hid_t mult_pt_sid; /* Dataspace ID with multiple point selection */
+ hid_t single_hyper_sid; /* Dataspace ID with single block hyperslab selection */
+ hid_t single_hyper_all_sid; /* Dataspace ID with single block hyperslab
+ * selection that is the entire dataspace
+ */
+ hid_t single_hyper_pt_sid; /* Dataspace ID with single block hyperslab
+ * selection that is the same as the single
+ * point selection
+ */
+ hid_t regular_hyper_sid; /* Dataspace ID with regular hyperslab selection */
+ hid_t irreg_hyper_sid; /* Dataspace ID with irregular hyperslab selection */
+ hid_t none_hyper_sid; /* Dataspace ID with "no hyperslabs" selection */
+ hid_t scalar_all_sid; /* ID for scalar dataspace with "all" selection */
+ hid_t scalar_none_sid; /* ID for scalar dataspace with "none" selection */
+ hid_t tmp_sid; /* Temporary dataspace ID */
+ hsize_t dims[] = {SPACE9_DIM1, SPACE9_DIM2};
+ hsize_t coord1[1][SPACE2_RANK]; /* Coordinates for single point selection */
+ hsize_t coord2[SPACE9_DIM2][SPACE9_RANK]; /* Coordinates for multiple point selection */
+ hsize_t start[SPACE9_RANK]; /* Hyperslab start */
+ hsize_t stride[SPACE9_RANK]; /* Hyperslab stride */
+ hsize_t count[SPACE9_RANK]; /* Hyperslab block count */
+ hsize_t block[SPACE9_RANK]; /* Hyperslab block size */
+#if 0
+ htri_t check; /* Shape comparison return value */
+#endif
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(6, ("Testing Consistency of Internal States\n"));
+ HDassert(SPACE9_DIM2 >= POINT1_NPOINTS);
+
+ /* Create dataspace for "all" selection */
+ all_sid = H5Screate_simple(SPACE9_RANK, dims, NULL);
+ CHECK(all_sid, FAIL, "H5Screate_simple");
+
+ /* Select entire extent for dataspace */
+ ret = H5Sselect_all(all_sid);
+ CHECK(ret, FAIL, "H5Sselect_all");
+
+ /* Create dataspace for "none" selection */
+ none_sid = H5Screate_simple(SPACE9_RANK, dims, NULL);
+ CHECK(none_sid, FAIL, "H5Screate_simple");
+
+ /* Un-Select entire extent for dataspace */
+ ret = H5Sselect_none(none_sid);
+ CHECK(ret, FAIL, "H5Sselect_none");
+
+ /* Create dataspace for single point selection */
+ single_pt_sid = H5Screate_simple(SPACE9_RANK, dims, NULL);
+ CHECK(single_pt_sid, FAIL, "H5Screate_simple");
+
+ /* Select sequence of ten points for multiple point selection */
+ coord1[0][0] = 2;
+ coord1[0][1] = 2;
+ ret = H5Sselect_elements(single_pt_sid, H5S_SELECT_SET, (size_t)1, (const hsize_t *)coord1);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Create dataspace for multiple point selection */
+ mult_pt_sid = H5Screate_simple(SPACE9_RANK, dims, NULL);
+ CHECK(mult_pt_sid, FAIL, "H5Screate_simple");
+
+ /* Select sequence of ten points for multiple point selection */
+ coord2[0][0] = 2;
+ coord2[0][1] = 2;
+ coord2[1][0] = 7;
+ coord2[1][1] = 2;
+ coord2[2][0] = 1;
+ coord2[2][1] = 4;
+ coord2[3][0] = 2;
+ coord2[3][1] = 6;
+ coord2[4][0] = 0;
+ coord2[4][1] = 8;
+ coord2[5][0] = 3;
+ coord2[5][1] = 2;
+ coord2[6][0] = 4;
+ coord2[6][1] = 4;
+ coord2[7][0] = 1;
+ coord2[7][1] = 0;
+ coord2[8][0] = 5;
+ coord2[8][1] = 1;
+ coord2[9][0] = 9;
+ coord2[9][1] = 3;
+ ret = H5Sselect_elements(mult_pt_sid, H5S_SELECT_SET, (size_t)POINT1_NPOINTS, (const hsize_t *)coord2);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Create dataspace for single hyperslab selection */
+ single_hyper_sid = H5Screate_simple(SPACE9_RANK, dims, NULL);
+ CHECK(single_hyper_sid, FAIL, "H5Screate_simple");
+
+ /* Select 10x10 hyperslab for single hyperslab selection */
+ start[0] = 1;
+ start[1] = 1;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 1;
+ count[1] = 1;
+ block[0] = (SPACE9_DIM1 - 2);
+ block[1] = (SPACE9_DIM2 - 2);
+ ret = H5Sselect_hyperslab(single_hyper_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create dataspace for single hyperslab selection with entire extent selected */
+ single_hyper_all_sid = H5Screate_simple(SPACE9_RANK, dims, NULL);
+ CHECK(single_hyper_all_sid, FAIL, "H5Screate_simple");
+
+ /* Select entire extent for hyperslab selection */
+ start[0] = 0;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 1;
+ count[1] = 1;
+ block[0] = SPACE9_DIM1;
+ block[1] = SPACE9_DIM2;
+ ret = H5Sselect_hyperslab(single_hyper_all_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create dataspace for single hyperslab selection with single point selected */
+ single_hyper_pt_sid = H5Screate_simple(SPACE9_RANK, dims, NULL);
+ CHECK(single_hyper_pt_sid, FAIL, "H5Screate_simple");
+
+ /* Select entire extent for hyperslab selection */
+ start[0] = 2;
+ start[1] = 2;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 1;
+ count[1] = 1;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(single_hyper_pt_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create dataspace for regular hyperslab selection */
+ regular_hyper_sid = H5Screate_simple(SPACE9_RANK, dims, NULL);
+ CHECK(regular_hyper_sid, FAIL, "H5Screate_simple");
+
+ /* Select regular, strided hyperslab selection */
+ start[0] = 2;
+ start[1] = 2;
+ stride[0] = 2;
+ stride[1] = 2;
+ count[0] = 5;
+ count[1] = 2;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(regular_hyper_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create dataspace for irregular hyperslab selection */
+ irreg_hyper_sid = H5Screate_simple(SPACE9_RANK, dims, NULL);
+ CHECK(irreg_hyper_sid, FAIL, "H5Screate_simple");
+
+ /* Create irregular hyperslab selection by OR'ing two blocks together */
+ start[0] = 2;
+ start[1] = 2;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 1;
+ count[1] = 1;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(irreg_hyper_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[0] = 4;
+ start[1] = 4;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 1;
+ count[1] = 1;
+ block[0] = 3;
+ block[1] = 3;
+ ret = H5Sselect_hyperslab(irreg_hyper_sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create dataspace for "no" hyperslab selection */
+ none_hyper_sid = H5Screate_simple(SPACE9_RANK, dims, NULL);
+ CHECK(none_hyper_sid, FAIL, "H5Screate_simple");
+
+ /* Create "no" hyperslab selection by XOR'ing same blocks together */
+ start[0] = 2;
+ start[1] = 2;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 1;
+ count[1] = 1;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(none_hyper_sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ ret = H5Sselect_hyperslab(none_hyper_sid, H5S_SELECT_XOR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create scalar dataspace for "all" selection */
+ scalar_all_sid = H5Screate(H5S_SCALAR);
+ CHECK(scalar_all_sid, FAIL, "H5Screate");
+
+ /* Create scalar dataspace for "none" selection */
+ scalar_none_sid = H5Screate(H5S_SCALAR);
+ CHECK(scalar_none_sid, FAIL, "H5Screate");
+
+ /* Un-Select entire extent for dataspace */
+ ret = H5Sselect_none(scalar_none_sid);
+ CHECK(ret, FAIL, "H5Sselect_none");
+
+ /* Test all the selections created */
+
+ /* Test the copy of itself */
+ tmp_sid = H5Scopy(all_sid);
+ CHECK(tmp_sid, FAIL, "H5Scopy");
+#if 0
+ check = H5S__internal_consistency_test(tmp_sid);
+ VERIFY(check, TRUE, "H5S__internal_consistency_test");
+#endif
+ ret = H5Sclose(tmp_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+#if 0
+ /* Test "none" selection */
+ check = H5S__internal_consistency_test(none_sid);
+ VERIFY(check, TRUE, "H5S__internal_consistency_test");
+
+ /* Test single point selection */
+ check = H5S__internal_consistency_test(single_pt_sid);
+ VERIFY(check, TRUE, "H5S__internal_consistency_test");
+
+ /* Test multiple point selection */
+ check = H5S__internal_consistency_test(mult_pt_sid);
+ VERIFY(check, TRUE, "H5S__internal_consistency_test");
+
+ /* Test "plain" single hyperslab selection */
+ check = H5S__internal_consistency_test(single_hyper_sid);
+ VERIFY(check, TRUE, "H5S__internal_consistency_test");
+
+ /* Test "all" single hyperslab selection */
+ check = H5S__internal_consistency_test(single_hyper_all_sid);
+ VERIFY(check, TRUE, "H5S__internal_consistency_test");
+
+ /* Test "single point" single hyperslab selection */
+ check = H5S__internal_consistency_test(single_hyper_pt_sid);
+ VERIFY(check, TRUE, "H5S__internal_consistency_test");
+
+ /* Test regular, strided hyperslab selection */
+ check = H5S__internal_consistency_test(regular_hyper_sid);
+ VERIFY(check, TRUE, "H5S__internal_consistency_test");
+
+ /* Test irregular hyperslab selection */
+ check = H5S__internal_consistency_test(irreg_hyper_sid);
+ VERIFY(check, TRUE, "H5S__internal_consistency_test");
+
+ /* Test "no" hyperslab selection */
+ check = H5S__internal_consistency_test(none_hyper_sid);
+ VERIFY(check, TRUE, "H5S__internal_consistency_test");
+
+ /* Test scalar "all" hyperslab selection */
+ check = H5S__internal_consistency_test(scalar_all_sid);
+ VERIFY(check, TRUE, "H5S__internal_consistency_test");
+
+ /* Test scalar "none" hyperslab selection */
+ check = H5S__internal_consistency_test(scalar_none_sid);
+ VERIFY(check, TRUE, "H5S__internal_consistency_test");
+#endif
+
+ /* Close dataspaces */
+ ret = H5Sclose(all_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(none_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(single_pt_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(mult_pt_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(single_hyper_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(single_hyper_all_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(single_hyper_pt_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(regular_hyper_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(irreg_hyper_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(none_hyper_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(scalar_all_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(scalar_none_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_internal_consistency() */
+
+/****************************************************************
+**
+** test_irreg_io(): Tests unusual selections on datasets, to stress the
+** new hyperslab code.
+**
+****************************************************************/
+static void
+test_irreg_io(void)
+{
+ hid_t fid; /* File ID */
+ hid_t did; /* Dataset ID */
+ hid_t dcpl_id; /* Dataset creation property list ID */
+ hid_t sid; /* File dataspace ID */
+ hid_t mem_sid; /* Memory dataspace ID */
+ hsize_t dims[] = {6, 12}; /* Dataspace dimensions */
+ hsize_t chunk_dims[] = {2, 2}; /* Chunk dimensions */
+ hsize_t mem_dims[] = {32}; /* Memory dataspace dimensions */
+ hsize_t start[2]; /* Hyperslab start */
+ hsize_t stride[2]; /* Hyperslab stride */
+ hsize_t count[2]; /* Hyperslab block count */
+ hsize_t block[2]; /* Hyperslab block size */
+ unsigned char wbuf[72]; /* Write buffer */
+ unsigned char rbuf[32]; /* Read buffer */
+ unsigned u; /* Local index variable */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(6, ("Testing Irregular Hyperslab I/O\n"));
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create dataspace for dataset */
+ sid = H5Screate_simple(2, dims, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Set chunk dimensions for dataset */
+ dcpl_id = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl_id, FAIL, "H5Pcreate");
+ ret = H5Pset_chunk(dcpl_id, 2, chunk_dims);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+
+ /* Create a dataset */
+ did = H5Dcreate2(fid, SPACE1_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, dcpl_id, H5P_DEFAULT);
+ CHECK(did, FAIL, "H5Dcreate2");
+
+ /* Initialize the write buffer */
+ for (u = 0; u < 72; u++)
+ wbuf[u] = (unsigned char)u;
+
+ /* Write entire dataset to disk */
+ ret = H5Dwrite(did, H5T_NATIVE_UCHAR, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close the DCPL */
+ ret = H5Pclose(dcpl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Create dataspace for memory selection */
+ mem_sid = H5Screate_simple(1, mem_dims, NULL);
+ CHECK(mem_sid, FAIL, "H5Screate_simple");
+
+ /* Select 'L'-shaped region within dataset */
+ start[0] = 0;
+ start[1] = 10;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 4;
+ count[1] = 2;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[0] = 4;
+ start[1] = 0;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 2;
+ count[1] = 12;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Reset the buffer */
+ HDmemset(rbuf, 0, sizeof(rbuf));
+
+ /* Read selection from disk */
+ ret = H5Dread(did, H5T_NATIVE_UCHAR, mem_sid, sid, H5P_DEFAULT, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Close everything */
+ ret = H5Sclose(mem_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* test_irreg_io() */
+
+/****************************************************************
+**
+** test_sel_iter(): Test selection iterator API routines.
+**
+****************************************************************/
+static void
+test_sel_iter(void)
+{
+ hid_t sid; /* Dataspace ID */
+ hid_t iter_id; /* Dataspace selection iterator ID */
+ hsize_t dims1[] = {6, 12}; /* 2-D Dataspace dimensions */
+ hsize_t coord1[POINT1_NPOINTS][2]; /* Coordinates for point selection */
+ hsize_t start[2]; /* Hyperslab start */
+ hsize_t stride[2]; /* Hyperslab stride */
+ hsize_t count[2]; /* Hyperslab block count */
+ hsize_t block[2]; /* Hyperslab block size */
+ size_t nseq; /* # of sequences retrieved */
+ size_t nbytes; /* # of bytes retrieved */
+ hsize_t off[SEL_ITER_MAX_SEQ]; /* Offsets for retrieved sequences */
+ size_t len[SEL_ITER_MAX_SEQ]; /* Lengths for retrieved sequences */
+ H5S_sel_type sel_type; /* Selection type */
+ unsigned sel_share; /* Whether to share selection with dataspace */
+ unsigned sel_iter_flags; /* Flags for selection iterator creation */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(6, ("Testing Dataspace Selection Iterators\n"));
+
+ /* Create dataspace */
+ sid = H5Screate_simple(2, dims1, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Try creating selection iterator object with bad parameters */
+ H5E_BEGIN_TRY
+ { /* Bad dataspace ID */
+ iter_id = H5Ssel_iter_create(H5I_INVALID_HID, (size_t)1, (unsigned)0);
+ }
+ H5E_END_TRY;
+ VERIFY(iter_id, FAIL, "H5Ssel_iter_create");
+ H5E_BEGIN_TRY
+ { /* Bad element size */
+ iter_id = H5Ssel_iter_create(sid, (size_t)0, (unsigned)0);
+ }
+ H5E_END_TRY;
+ VERIFY(iter_id, FAIL, "H5Ssel_iter_create");
+ H5E_BEGIN_TRY
+ { /* Bad flag(s) */
+ iter_id = H5Ssel_iter_create(sid, (size_t)1, (unsigned)0xffff);
+ }
+ H5E_END_TRY;
+ VERIFY(iter_id, FAIL, "H5Ssel_iter_create");
+
+ /* Try closing selection iterator, with bad parameters */
+ H5E_BEGIN_TRY
+ { /* Invalid ID */
+ ret = H5Ssel_iter_close(H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Ssel_iter_close");
+ H5E_BEGIN_TRY
+ { /* Not a selection iterator ID */
+ ret = H5Ssel_iter_close(sid);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Ssel_iter_close");
+
+ /* Try with no selection sharing, and with sharing */
+ for (sel_share = 0; sel_share < 2; sel_share++) {
+ /* Set selection iterator sharing flags */
+ if (sel_share)
+ sel_iter_flags = H5S_SEL_ITER_SHARE_WITH_DATASPACE;
+ else
+ sel_iter_flags = 0;
+
+ /* Create selection iterator object */
+ iter_id = H5Ssel_iter_create(sid, (size_t)1, (unsigned)sel_iter_flags);
+ CHECK(iter_id, FAIL, "H5Ssel_iter_create");
+
+ /* Close selection iterator */
+ ret = H5Ssel_iter_close(iter_id);
+ CHECK(ret, FAIL, "H5Ssel_iter_close");
+
+ /* Try closing selection iterator twice */
+ H5E_BEGIN_TRY
+ { /* Invalid ID */
+ ret = H5Ssel_iter_close(iter_id);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Ssel_iter_close");
+
+ /* Create selection iterator object */
+ iter_id = H5Ssel_iter_create(sid, (size_t)1, (unsigned)sel_iter_flags);
+ CHECK(iter_id, FAIL, "H5Ssel_iter_create");
+
+ /* Try resetting selection iterator with bad parameters */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Ssel_iter_reset(H5I_INVALID_HID, sid);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Ssel_iter_reset");
+ H5E_BEGIN_TRY
+ {
+ ret = H5Ssel_iter_reset(iter_id, H5I_INVALID_HID);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Ssel_iter_reset");
+
+ /* Try retrieving sequences, with bad parameters */
+ H5E_BEGIN_TRY
+ { /* Invalid ID */
+ ret = H5Ssel_iter_get_seq_list(H5I_INVALID_HID, (size_t)1, (size_t)1, &nseq, &nbytes, off, len);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Ssel_iter_get_seq_list");
+ H5E_BEGIN_TRY
+ { /* Invalid nseq pointer */
+ ret = H5Ssel_iter_get_seq_list(iter_id, (size_t)1, (size_t)1, NULL, &nbytes, off, len);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Ssel_iter_get_seq_list");
+ H5E_BEGIN_TRY
+ { /* Invalid nbytes pointer */
+ ret = H5Ssel_iter_get_seq_list(iter_id, (size_t)1, (size_t)1, &nseq, NULL, off, len);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Ssel_iter_get_seq_list");
+ H5E_BEGIN_TRY
+ { /* Invalid offset array */
+ ret = H5Ssel_iter_get_seq_list(iter_id, (size_t)1, (size_t)1, &nseq, &nbytes, NULL, len);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Ssel_iter_get_seq_list");
+ H5E_BEGIN_TRY
+ { /* Invalid length array */
+ ret = H5Ssel_iter_get_seq_list(iter_id, (size_t)1, (size_t)1, &nseq, &nbytes, off, NULL);
+ }
+ H5E_END_TRY;
+ VERIFY(ret, FAIL, "H5Ssel_iter_get_seq_list");
+
+ /* Close selection iterator */
+ ret = H5Ssel_iter_close(iter_id);
+ CHECK(ret, FAIL, "H5Ssel_iter_close");
+
+ /* Test iterators on various basic selection types */
+ for (sel_type = H5S_SEL_NONE; sel_type <= H5S_SEL_ALL; sel_type = (H5S_sel_type)(sel_type + 1)) {
+ switch (sel_type) {
+ case H5S_SEL_NONE: /* "None" selection */
+ ret = H5Sselect_none(sid);
+ CHECK(ret, FAIL, "H5Sselect_none");
+ break;
+
+ case H5S_SEL_POINTS: /* Point selection */
+ /* Select sequence of ten points */
+ coord1[0][0] = 0;
+ coord1[0][1] = 9;
+ coord1[1][0] = 1;
+ coord1[1][1] = 2;
+ coord1[2][0] = 2;
+ coord1[2][1] = 4;
+ coord1[3][0] = 0;
+ coord1[3][1] = 6;
+ coord1[4][0] = 1;
+ coord1[4][1] = 8;
+ coord1[5][0] = 2;
+ coord1[5][1] = 10;
+ coord1[6][0] = 0;
+ coord1[6][1] = 11;
+ coord1[7][0] = 1;
+ coord1[7][1] = 4;
+ coord1[8][0] = 2;
+ coord1[8][1] = 1;
+ coord1[9][0] = 0;
+ coord1[9][1] = 3;
+ ret = H5Sselect_elements(sid, H5S_SELECT_SET, (size_t)POINT1_NPOINTS,
+ (const hsize_t *)coord1);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+ break;
+
+ case H5S_SEL_HYPERSLABS: /* Hyperslab selection */
+ /* Select regular hyperslab */
+ start[0] = 3;
+ start[1] = 0;
+ stride[0] = 2;
+ stride[1] = 2;
+ count[0] = 2;
+ count[1] = 5;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+ break;
+
+ case H5S_SEL_ALL: /* "All" selection */
+ ret = H5Sselect_all(sid);
+ CHECK(ret, FAIL, "H5Sselect_all");
+ break;
+
+ case H5S_SEL_ERROR:
+ case H5S_SEL_N:
+ default:
+ HDassert(0 && "Can't occur");
+ break;
+ } /* end switch */
+
+ /* Create selection iterator object */
+ iter_id = H5Ssel_iter_create(sid, (size_t)1, (unsigned)sel_iter_flags);
+ CHECK(iter_id, FAIL, "H5Ssel_iter_create");
+
+ /* Try retrieving no sequences, with 0 for maxseq & maxbytes */
+ ret = H5Ssel_iter_get_seq_list(iter_id, (size_t)0, (size_t)1, &nseq, &nbytes, off, len);
+ CHECK(ret, FAIL, "H5Ssel_iter_get_seq_list");
+ VERIFY(nseq, 0, "H5Ssel_iter_get_seq_list");
+ VERIFY(nbytes, 0, "H5Ssel_iter_get_seq_list");
+ ret = H5Ssel_iter_get_seq_list(iter_id, (size_t)1, (size_t)0, &nseq, &nbytes, off, len);
+ CHECK(ret, FAIL, "H5Ssel_iter_get_seq_list");
+ VERIFY(nseq, 0, "H5Ssel_iter_get_seq_list");
+ VERIFY(nbytes, 0, "H5Ssel_iter_get_seq_list");
+
+ /* Try retrieving all sequences */
+ ret = H5Ssel_iter_get_seq_list(iter_id, (size_t)SEL_ITER_MAX_SEQ, (size_t)(1024 * 1024), &nseq,
+ &nbytes, off, len);
+ CHECK(ret, FAIL, "H5Ssel_iter_get_seq_list");
+
+ /* Check results from retrieving sequence list */
+ switch (sel_type) {
+ case H5S_SEL_NONE: /* "None" selection */
+ VERIFY(nseq, 0, "H5Ssel_iter_get_seq_list");
+ VERIFY(nbytes, 0, "H5Ssel_iter_get_seq_list");
+ break;
+
+ case H5S_SEL_POINTS: /* Point selection */
+ VERIFY(nseq, 10, "H5Ssel_iter_get_seq_list");
+ VERIFY(nbytes, 10, "H5Ssel_iter_get_seq_list");
+ break;
+
+ case H5S_SEL_HYPERSLABS: /* Hyperslab selection */
+ VERIFY(nseq, 10, "H5Ssel_iter_get_seq_list");
+ VERIFY(nbytes, 10, "H5Ssel_iter_get_seq_list");
+ break;
+
+ case H5S_SEL_ALL: /* "All" selection */
+ VERIFY(nseq, 1, "H5Ssel_iter_get_seq_list");
+ VERIFY(nbytes, 72, "H5Ssel_iter_get_seq_list");
+ break;
+
+ case H5S_SEL_ERROR:
+ case H5S_SEL_N:
+ default:
+ HDassert(0 && "Can't occur");
+ break;
+ } /* end switch */
+
+ /* Close selection iterator */
+ ret = H5Ssel_iter_close(iter_id);
+ CHECK(ret, FAIL, "H5Ssel_iter_close");
+ } /* end for */
+
+ /* Create selection iterator object */
+ iter_id = H5Ssel_iter_create(sid, (size_t)1, (unsigned)sel_iter_flags);
+ CHECK(iter_id, FAIL, "H5Ssel_iter_create");
+
+ /* Test iterators on various basic selection types using
+ * H5Ssel_iter_reset instead of creating multiple iterators */
+ for (sel_type = H5S_SEL_NONE; sel_type <= H5S_SEL_ALL; sel_type = (H5S_sel_type)(sel_type + 1)) {
+ switch (sel_type) {
+ case H5S_SEL_NONE: /* "None" selection */
+ ret = H5Sselect_none(sid);
+ CHECK(ret, FAIL, "H5Sselect_none");
+ break;
+
+ case H5S_SEL_POINTS: /* Point selection */
+ /* Select sequence of ten points */
+ coord1[0][0] = 0;
+ coord1[0][1] = 9;
+ coord1[1][0] = 1;
+ coord1[1][1] = 2;
+ coord1[2][0] = 2;
+ coord1[2][1] = 4;
+ coord1[3][0] = 0;
+ coord1[3][1] = 6;
+ coord1[4][0] = 1;
+ coord1[4][1] = 8;
+ coord1[5][0] = 2;
+ coord1[5][1] = 10;
+ coord1[6][0] = 0;
+ coord1[6][1] = 11;
+ coord1[7][0] = 1;
+ coord1[7][1] = 4;
+ coord1[8][0] = 2;
+ coord1[8][1] = 1;
+ coord1[9][0] = 0;
+ coord1[9][1] = 3;
+ ret = H5Sselect_elements(sid, H5S_SELECT_SET, (size_t)POINT1_NPOINTS,
+ (const hsize_t *)coord1);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+ break;
+
+ case H5S_SEL_HYPERSLABS: /* Hyperslab selection */
+ /* Select regular hyperslab */
+ start[0] = 3;
+ start[1] = 0;
+ stride[0] = 2;
+ stride[1] = 2;
+ count[0] = 2;
+ count[1] = 5;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+ break;
+
+ case H5S_SEL_ALL: /* "All" selection */
+ ret = H5Sselect_all(sid);
+ CHECK(ret, FAIL, "H5Sselect_all");
+ break;
+
+ case H5S_SEL_ERROR:
+ case H5S_SEL_N:
+ default:
+ HDassert(0 && "Can't occur");
+ break;
+ } /* end switch */
+
+ /* Try retrieving no sequences, with 0 for maxseq & maxbytes */
+ ret = H5Ssel_iter_get_seq_list(iter_id, (size_t)0, (size_t)1, &nseq, &nbytes, off, len);
+ CHECK(ret, FAIL, "H5Ssel_iter_get_seq_list");
+ VERIFY(nseq, 0, "H5Ssel_iter_get_seq_list");
+ VERIFY(nbytes, 0, "H5Ssel_iter_get_seq_list");
+ ret = H5Ssel_iter_get_seq_list(iter_id, (size_t)1, (size_t)0, &nseq, &nbytes, off, len);
+ CHECK(ret, FAIL, "H5Ssel_iter_get_seq_list");
+ VERIFY(nseq, 0, "H5Ssel_iter_get_seq_list");
+ VERIFY(nbytes, 0, "H5Ssel_iter_get_seq_list");
+
+ /* Reset iterator */
+ ret = H5Ssel_iter_reset(iter_id, sid);
+ CHECK(ret, FAIL, "H5Ssel_iter_reset");
+
+ /* Try retrieving all sequences */
+ ret = H5Ssel_iter_get_seq_list(iter_id, (size_t)SEL_ITER_MAX_SEQ, (size_t)(1024 * 1024), &nseq,
+ &nbytes, off, len);
+ CHECK(ret, FAIL, "H5Ssel_iter_get_seq_list");
+
+ /* Check results from retrieving sequence list */
+ switch (sel_type) {
+ case H5S_SEL_NONE: /* "None" selection */
+ VERIFY(nseq, 0, "H5Ssel_iter_get_seq_list");
+ VERIFY(nbytes, 0, "H5Ssel_iter_get_seq_list");
+ break;
+
+ case H5S_SEL_POINTS: /* Point selection */
+ VERIFY(nseq, 10, "H5Ssel_iter_get_seq_list");
+ VERIFY(nbytes, 10, "H5Ssel_iter_get_seq_list");
+ break;
+
+ case H5S_SEL_HYPERSLABS: /* Hyperslab selection */
+ VERIFY(nseq, 10, "H5Ssel_iter_get_seq_list");
+ VERIFY(nbytes, 10, "H5Ssel_iter_get_seq_list");
+ break;
+
+ case H5S_SEL_ALL: /* "All" selection */
+ VERIFY(nseq, 1, "H5Ssel_iter_get_seq_list");
+ VERIFY(nbytes, 72, "H5Ssel_iter_get_seq_list");
+ break;
+
+ case H5S_SEL_ERROR:
+ case H5S_SEL_N:
+ default:
+ HDassert(0 && "Can't occur");
+ break;
+ } /* end switch */
+
+ /* Reset iterator */
+ ret = H5Ssel_iter_reset(iter_id, sid);
+ CHECK(ret, FAIL, "H5Ssel_iter_reset");
+
+ /* Try retrieving all sequences again */
+ ret = H5Ssel_iter_get_seq_list(iter_id, (size_t)SEL_ITER_MAX_SEQ, (size_t)(1024 * 1024), &nseq,
+ &nbytes, off, len);
+ CHECK(ret, FAIL, "H5Ssel_iter_get_seq_list");
+
+ /* Check results from retrieving sequence list */
+ switch (sel_type) {
+ case H5S_SEL_NONE: /* "None" selection */
+ VERIFY(nseq, 0, "H5Ssel_iter_get_seq_list");
+ VERIFY(nbytes, 0, "H5Ssel_iter_get_seq_list");
+ break;
+
+ case H5S_SEL_POINTS: /* Point selection */
+ VERIFY(nseq, 10, "H5Ssel_iter_get_seq_list");
+ VERIFY(nbytes, 10, "H5Ssel_iter_get_seq_list");
+ break;
+
+ case H5S_SEL_HYPERSLABS: /* Hyperslab selection */
+ VERIFY(nseq, 10, "H5Ssel_iter_get_seq_list");
+ VERIFY(nbytes, 10, "H5Ssel_iter_get_seq_list");
+ break;
+
+ case H5S_SEL_ALL: /* "All" selection */
+ VERIFY(nseq, 1, "H5Ssel_iter_get_seq_list");
+ VERIFY(nbytes, 72, "H5Ssel_iter_get_seq_list");
+ break;
+
+ case H5S_SEL_ERROR:
+ case H5S_SEL_N:
+ default:
+ HDassert(0 && "Can't occur");
+ break;
+ } /* end switch */
+
+ /* Reset iterator */
+ ret = H5Ssel_iter_reset(iter_id, sid);
+ CHECK(ret, FAIL, "H5Ssel_iter_reset");
+ } /* end for */
+
+ /* Close selection iterator */
+ ret = H5Ssel_iter_close(iter_id);
+ CHECK(ret, FAIL, "H5Ssel_iter_close");
+
+ /* Point selection which will merge into smaller # of sequences */
+ coord1[0][0] = 0;
+ coord1[0][1] = 9;
+ coord1[1][0] = 0;
+ coord1[1][1] = 10;
+ coord1[2][0] = 0;
+ coord1[2][1] = 11;
+ coord1[3][0] = 0;
+ coord1[3][1] = 6;
+ coord1[4][0] = 1;
+ coord1[4][1] = 8;
+ coord1[5][0] = 2;
+ coord1[5][1] = 10;
+ coord1[6][0] = 0;
+ coord1[6][1] = 11;
+ coord1[7][0] = 1;
+ coord1[7][1] = 4;
+ coord1[8][0] = 1;
+ coord1[8][1] = 5;
+ coord1[9][0] = 1;
+ coord1[9][1] = 6;
+ ret = H5Sselect_elements(sid, H5S_SELECT_SET, (size_t)POINT1_NPOINTS, (const hsize_t *)coord1);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Create selection iterator object */
+ iter_id = H5Ssel_iter_create(sid, (size_t)1, (unsigned)sel_iter_flags);
+ CHECK(iter_id, FAIL, "H5Ssel_iter_create");
+
+ /* Try retrieving all sequences */
+ ret = H5Ssel_iter_get_seq_list(iter_id, (size_t)SEL_ITER_MAX_SEQ, (size_t)(1024 * 1024), &nseq,
+ &nbytes, off, len);
+ CHECK(ret, FAIL, "H5Ssel_iter_get_seq_list");
+ VERIFY(nseq, 6, "H5Ssel_iter_get_seq_list");
+ VERIFY(nbytes, 10, "H5Ssel_iter_get_seq_list");
+
+ /* Reset iterator */
+ ret = H5Ssel_iter_reset(iter_id, sid);
+ CHECK(ret, FAIL, "H5Ssel_iter_reset");
+
+ /* Try retrieving all sequences again */
+ ret = H5Ssel_iter_get_seq_list(iter_id, (size_t)SEL_ITER_MAX_SEQ, (size_t)(1024 * 1024), &nseq,
+ &nbytes, off, len);
+ CHECK(ret, FAIL, "H5Ssel_iter_get_seq_list");
+ VERIFY(nseq, 6, "H5Ssel_iter_get_seq_list");
+ VERIFY(nbytes, 10, "H5Ssel_iter_get_seq_list");
+
+ /* Close selection iterator */
+ ret = H5Ssel_iter_close(iter_id);
+ CHECK(ret, FAIL, "H5Ssel_iter_close");
+
+ /* Select irregular hyperslab, which will merge into smaller # of sequences */
+ start[0] = 3;
+ start[1] = 0;
+ stride[0] = 2;
+ stride[1] = 2;
+ count[0] = 2;
+ count[1] = 5;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ start[0] = 3;
+ start[1] = 3;
+ stride[0] = 2;
+ stride[1] = 2;
+ count[0] = 2;
+ count[1] = 5;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create selection iterator object */
+ iter_id = H5Ssel_iter_create(sid, (size_t)1, (unsigned)sel_iter_flags);
+ CHECK(iter_id, FAIL, "H5Ssel_iter_create");
+
+ /* Try retrieving all sequences */
+ ret = H5Ssel_iter_get_seq_list(iter_id, (size_t)SEL_ITER_MAX_SEQ, (size_t)(1024 * 1024), &nseq,
+ &nbytes, off, len);
+ CHECK(ret, FAIL, "H5Ssel_iter_get_seq_list");
+ VERIFY(nseq, 6, "H5Ssel_iter_get_seq_list");
+ VERIFY(nbytes, 20, "H5Ssel_iter_get_seq_list");
+
+ /* Reset iterator */
+ ret = H5Ssel_iter_reset(iter_id, sid);
+ CHECK(ret, FAIL, "H5Ssel_iter_reset");
+
+ /* Try retrieving all sequences again */
+ ret = H5Ssel_iter_get_seq_list(iter_id, (size_t)SEL_ITER_MAX_SEQ, (size_t)(1024 * 1024), &nseq,
+ &nbytes, off, len);
+ CHECK(ret, FAIL, "H5Ssel_iter_get_seq_list");
+ VERIFY(nseq, 6, "H5Ssel_iter_get_seq_list");
+ VERIFY(nbytes, 20, "H5Ssel_iter_get_seq_list");
+
+ /* Close selection iterator */
+ ret = H5Ssel_iter_close(iter_id);
+ CHECK(ret, FAIL, "H5Ssel_iter_close");
+
+ } /* end for */
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_sel_iter() */
+
+/****************************************************************
+**
+** test_select_intersect_block(): Test selections on dataspace,
+** verify that "intersect block" routine is working correctly.
+**
+****************************************************************/
+static void
+test_select_intersect_block(void)
+{
+ hid_t sid; /* Dataspace ID */
+ hsize_t dims1[] = {6, 12}; /* 2-D Dataspace dimensions */
+ hsize_t block_start[] = {1, 3}; /* Start offset for block */
+ hsize_t block_end[] = {2, 5}; /* End offset for block */
+ hsize_t block_end2[] = {0, 5}; /* Bad end offset for block */
+ hsize_t block_end3[] = {2, 2}; /* Another bad end offset for block */
+ hsize_t block_end4[] = {1, 3}; /* End offset that makes a single element block */
+ hsize_t coord[10][2]; /* Coordinates for point selection */
+ hsize_t start[2]; /* Starting location of hyperslab */
+ hsize_t stride[2]; /* Stride of hyperslab */
+ hsize_t count[2]; /* Element count of hyperslab */
+ hsize_t block[2]; /* Block size of hyperslab */
+ htri_t status; /* Intersection status */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(6, ("Testing Dataspace Selection Block Intersection\n"));
+
+ /* Create dataspace */
+ sid = H5Screate_simple(2, dims1, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Try intersection calls with bad parameters */
+ H5E_BEGIN_TRY
+ { /* Bad dataspace ID */
+ status = H5Sselect_intersect_block(H5I_INVALID_HID, block_start, block_end);
+ }
+ H5E_END_TRY;
+ VERIFY(status, FAIL, "H5Sselect_intersect_block");
+ H5E_BEGIN_TRY
+ { /* Bad start pointer */
+ status = H5Sselect_intersect_block(sid, NULL, block_end);
+ }
+ H5E_END_TRY;
+ VERIFY(status, FAIL, "H5Sselect_intersect_block");
+ H5E_BEGIN_TRY
+ { /* Bad end pointer */
+ status = H5Sselect_intersect_block(sid, block_start, NULL);
+ }
+ H5E_END_TRY;
+ VERIFY(status, FAIL, "H5Sselect_intersect_block");
+ H5E_BEGIN_TRY
+ { /* Invalid block */
+ status = H5Sselect_intersect_block(sid, block_start, block_end2);
+ }
+ H5E_END_TRY;
+ VERIFY(status, FAIL, "H5Sselect_intersect_block");
+ H5E_BEGIN_TRY
+ { /* Another invalid block */
+ status = H5Sselect_intersect_block(sid, block_start, block_end3);
+ }
+ H5E_END_TRY;
+ VERIFY(status, FAIL, "H5Sselect_intersect_block");
+
+ /* Set selection to 'none' */
+ ret = H5Sselect_none(sid);
+ CHECK(ret, FAIL, "H5Sselect_none");
+
+ /* Test block intersection with 'none' selection (always false) */
+ status = H5Sselect_intersect_block(sid, block_start, block_end);
+ VERIFY(status, FALSE, "H5Sselect_intersect_block");
+
+ /* Set selection to 'all' */
+ ret = H5Sselect_all(sid);
+ CHECK(ret, FAIL, "H5Sselect_all");
+
+ /* Test block intersection with 'all' selection (always true) */
+ status = H5Sselect_intersect_block(sid, block_start, block_end);
+ VERIFY(status, TRUE, "H5Sselect_intersect_block");
+
+ /* Select sequence of ten points */
+ coord[0][0] = 0;
+ coord[0][1] = 10;
+ coord[1][0] = 1;
+ coord[1][1] = 2;
+ coord[2][0] = 2;
+ coord[2][1] = 4;
+ coord[3][0] = 0;
+ coord[3][1] = 6;
+ coord[4][0] = 1;
+ coord[4][1] = 8;
+ coord[5][0] = 2;
+ coord[5][1] = 11;
+ coord[6][0] = 0;
+ coord[6][1] = 4;
+ coord[7][0] = 1;
+ coord[7][1] = 0;
+ coord[8][0] = 2;
+ coord[8][1] = 1;
+ coord[9][0] = 0;
+ coord[9][1] = 3;
+ ret = H5Sselect_elements(sid, H5S_SELECT_SET, (size_t)10, (const hsize_t *)coord);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ /* Test block intersection with 'point' selection */
+ status = H5Sselect_intersect_block(sid, block_start, block_end);
+ VERIFY(status, TRUE, "H5Sselect_intersect_block");
+ status = H5Sselect_intersect_block(sid, block_start, block_end4);
+ VERIFY(status, FALSE, "H5Sselect_intersect_block");
+
+ /* Select single 4x6 hyperslab block at (2,1) */
+ start[0] = 2;
+ start[1] = 1;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 4;
+ count[1] = 6;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Test block intersection with single 'hyperslab' selection */
+ status = H5Sselect_intersect_block(sid, block_start, block_end);
+ VERIFY(status, TRUE, "H5Sselect_intersect_block");
+ status = H5Sselect_intersect_block(sid, block_start, block_end4);
+ VERIFY(status, FALSE, "H5Sselect_intersect_block");
+
+ /* 'OR' another hyperslab block in, making an irregular hyperslab selection */
+ start[0] = 3;
+ start[1] = 2;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = 4;
+ count[1] = 6;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_OR, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Test block intersection with 'hyperslab' selection */
+ status = H5Sselect_intersect_block(sid, block_start, block_end);
+ VERIFY(status, TRUE, "H5Sselect_intersect_block");
+ status = H5Sselect_intersect_block(sid, block_start, block_end4);
+ VERIFY(status, FALSE, "H5Sselect_intersect_block");
+
+ /* Select regular, strided hyperslab selection */
+ start[0] = 2;
+ start[1] = 1;
+ stride[0] = 2;
+ stride[1] = 2;
+ count[0] = 2;
+ count[1] = 4;
+ block[0] = 1;
+ block[1] = 1;
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Test block intersection with single 'hyperslab' selection */
+ status = H5Sselect_intersect_block(sid, block_start, block_end);
+ VERIFY(status, TRUE, "H5Sselect_intersect_block");
+ status = H5Sselect_intersect_block(sid, block_start, block_end4);
+ VERIFY(status, FALSE, "H5Sselect_intersect_block");
+
+ /* Close dataspace */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_select_intersect_block() */
+
+/****************************************************************
+**
+** test_hyper_io_1d():
+** Test to verify all the selected 10th element in the 1-d file
+** dataspace is read correctly into the 1-d contiguous memory space.
+** This is modeled after the test scenario described in HDFFV-10585
+** that demonstrated the hyperslab slowness. A fix to speed up
+** performance is in place to handle the special case for 1-d disjoint
+** file dataspace into 1-d single block contiguous memory space.
+**
+****************************************************************/
+static void
+test_hyper_io_1d(void)
+{
+ hid_t fid; /* File ID */
+ hid_t did; /* Dataset ID */
+ hid_t sid, mid; /* Dataspace IDs */
+ hid_t dcpl; /* Dataset creation property list ID */
+ hsize_t dims[1], maxdims[1], dimsm[1]; /* Dataset dimension sizes */
+ hsize_t chunk_dims[1]; /* Chunk dimension size */
+ hsize_t offset[1]; /* Starting offset for hyperslab */
+ hsize_t stride[1]; /* Distance between blocks in the hyperslab selection */
+ hsize_t count[1]; /* # of blocks in the the hyperslab selection */
+ hsize_t block[1]; /* Size of block in the hyperslab selection */
+ unsigned int wdata[CHUNKSZ]; /* Data to be written */
+ unsigned int rdata[NUM_ELEMENTS / 10]; /* Data to be read */
+ herr_t ret; /* Generic return value */
+ unsigned i; /* Local index variable */
+
+ /* Output message about test being performed */
+ MESSAGE(6, ("Testing Hyperslab I/O for 1-d single block memory space\n"));
+
+ for (i = 0; i < CHUNKSZ; i++)
+ wdata[i] = i;
+
+ /* Create the file file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, H5I_INVALID_HID, "H5Fcreate");
+
+ /* Create file dataspace */
+ dims[0] = CHUNKSZ;
+ maxdims[0] = H5S_UNLIMITED;
+ sid = H5Screate_simple(RANK, dims, maxdims);
+ CHECK(sid, H5I_INVALID_HID, "H5Pcreate");
+
+ /* Create memory dataspace */
+ dimsm[0] = CHUNKSZ;
+ mid = H5Screate_simple(RANK, dimsm, NULL);
+ CHECK(mid, H5I_INVALID_HID, "H5Pcreate");
+
+ /* Set up to create a chunked dataset */
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, H5I_INVALID_HID, "H5Pcreate");
+
+ chunk_dims[0] = CHUNKSZ;
+ ret = H5Pset_chunk(dcpl, RANK, chunk_dims);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+
+ /* Create a chunked dataset */
+ did = H5Dcreate2(fid, DNAME, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(did, H5I_INVALID_HID, "H5Dcreate2");
+
+ /* Set up hyperslab selection for file dataspace */
+ offset[0] = 0;
+ stride[0] = 1;
+ count[0] = 1;
+ block[0] = CHUNKSZ;
+
+ /* Write to each chunk in the dataset */
+ for (i = 0; i < NUMCHUNKS; i++) {
+ /* Set the hyperslab selection */
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, offset, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Write to the dataset */
+ ret = H5Dwrite(did, H5T_NATIVE_INT, mid, sid, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Extend the dataset's dataspace */
+ if (i < (NUMCHUNKS - 1)) {
+ offset[0] = offset[0] + CHUNKSZ;
+ dims[0] = dims[0] + CHUNKSZ;
+ ret = H5Dset_extent(did, dims);
+ CHECK(ret, FAIL, "H5Dset_extent");
+
+ /* Get the dataset's current dataspace */
+ sid = H5Dget_space(did);
+ CHECK(sid, H5I_INVALID_HID, "H5Dget_space");
+ }
+ }
+
+ /* Closing */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(mid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Open the file */
+ fid = H5Fopen(FILENAME, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid, H5I_INVALID_HID, "H5Fopen");
+
+ /* Open the dataset */
+ did = H5Dopen2(fid, DNAME, H5P_DEFAULT);
+ CHECK(did, H5I_INVALID_HID, "H5Dopen");
+
+ /* Set up to read every 10th element in file dataspace */
+ offset[0] = 1;
+ stride[0] = 10;
+ count[0] = NUM_ELEMENTS / 10;
+ block[0] = 1;
+
+ /* Get the dataset's dataspace */
+ sid = H5Dget_space(did);
+ CHECK(sid, H5I_INVALID_HID, "H5Dget_space");
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, offset, stride, count, block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Set up contiguous memory dataspace for the selected elements */
+ dimsm[0] = count[0];
+ mid = H5Screate_simple(RANK, dimsm, NULL);
+ CHECK(mid, H5I_INVALID_HID, "H5Screate_simple");
+
+ /* Read all the selected 10th elements in the dataset into "rdata" */
+ ret = H5Dread(did, H5T_NATIVE_INT, mid, sid, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Verify data read is correct */
+ for (i = 0; i < 6; i += 2) {
+ VERIFY(rdata[i], 1, "H5Dread\n");
+ VERIFY(rdata[i + 1], 11, "H5Dread\n");
+ }
+
+ /* Closing */
+ ret = H5Sclose(mid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Dclose(did);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* test_hyper_io_1d() */
+
+/****************************************************************
+**
+** test_h5s_set_extent_none:
+** Test to verify the behavior of dataspace code when passed
+** a dataspace modified by H5Sset_extent_none().
+**
+****************************************************************/
+static void
+test_h5s_set_extent_none(void)
+{
+ hid_t sid = H5I_INVALID_HID;
+ hid_t dst_sid = H5I_INVALID_HID;
+ hid_t null_sid = H5I_INVALID_HID;
+ int rank = 1;
+ hsize_t current_dims = 123;
+ H5S_class_t cls;
+ int out_rank;
+ hsize_t out_dims;
+ hsize_t out_maxdims;
+ hssize_t out_points;
+ htri_t equal;
+ herr_t ret;
+
+ /* Specific values here don't matter as we're just going to reset */
+ sid = H5Screate_simple(rank, &current_dims, NULL);
+ CHECK(sid, H5I_INVALID_HID, "H5Screate_simple");
+
+ /* Dataspace class will be H5S_NULL after this.
+ * In versions prior to 1.10.7 / 1.12.1 this would produce a
+ * dataspace with the internal H5S_NO_CLASS class.
+ */
+ ret = H5Sset_extent_none(sid);
+ CHECK(ret, FAIL, "H5Sset_extent_none");
+ cls = H5Sget_simple_extent_type(sid);
+ VERIFY(cls, H5S_NULL, "H5Sget_simple_extent_type");
+
+ /* Extent getters should generate normal results and not segfault.
+ */
+ out_rank = H5Sget_simple_extent_dims(sid, &out_dims, &out_maxdims);
+ VERIFY(out_rank, 0, "H5Sget_simple_extent_dims");
+ out_rank = H5Sget_simple_extent_ndims(sid);
+ VERIFY(out_rank, 0, "H5Sget_simple_extent_ndims");
+ out_points = H5Sget_simple_extent_npoints(sid);
+ VERIFY(out_points, 0, "H5Sget_simple_extent_npoints");
+
+ /* Check that copying the new (non-)extent works.
+ */
+ dst_sid = H5Screate_simple(rank, &current_dims, NULL);
+ CHECK(dst_sid, H5I_INVALID_HID, "H5Screate_simple");
+ ret = H5Sextent_copy(dst_sid, sid);
+ CHECK(ret, FAIL, "H5Sextent_copy");
+
+ /* Check that H5Sset_extent_none() produces the same extent as
+ * H5Screate(H5S_NULL).
+ */
+ null_sid = H5Screate(H5S_NULL);
+ CHECK(null_sid, H5I_INVALID_HID, "H5Screate");
+ equal = H5Sextent_equal(sid, null_sid);
+ VERIFY(equal, TRUE, "H5Sextent_equal");
+
+ /* Close */
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(dst_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Sclose(null_sid);
+ CHECK(ret, FAIL, "H5Sclose");
+
+} /* test_h5s_set_extent_none() */
+
+/****************************************************************
+**
+** test_select(): Main H5S selection testing routine.
+**
+****************************************************************/
+void
+test_select(void)
+{
+ hid_t plist_id; /* Property list for reading random hyperslabs */
+ hid_t fapl; /* Property list accessing the file */
+ int mdc_nelmts; /* Metadata number of elements */
+ size_t rdcc_nelmts; /* Raw data number of elements */
+ size_t rdcc_nbytes; /* Raw data number of bytes */
+ double rdcc_w0; /* Raw data write percentage */
+ hssize_t offset[SPACE7_RANK] = {1, 1}; /* Offset for testing selection offsets */
+ const char *env_h5_drvr; /* File Driver value from environment */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Selections\n"));
+
+ /* Get the VFD to use */
+ env_h5_drvr = HDgetenv(HDF5_DRIVER);
+ if (env_h5_drvr == NULL)
+ env_h5_drvr = "nomatch";
+
+ /* Create a dataset transfer property list */
+ plist_id = H5Pcreate(H5P_DATASET_XFER);
+ CHECK(plist_id, FAIL, "H5Pcreate");
+
+ /* test I/O with a very small buffer for reads */
+ ret = H5Pset_buffer(plist_id, (size_t)59, NULL, NULL);
+ CHECK(ret, FAIL, "H5Pset_buffer");
+
+ /* These next tests use the same file */
+ test_select_hyper(H5P_DEFAULT); /* Test basic H5S hyperslab selection code */
+ test_select_hyper(plist_id); /* Test basic H5S hyperslab selection code */
+ test_select_point(H5P_DEFAULT); /* Test basic H5S element selection code, also tests appending to existing
+ element selections */
+ test_select_point(plist_id); /* Test basic H5S element selection code, also tests appending to existing
+ element selections */
+ test_select_all(H5P_DEFAULT); /* Test basic all & none selection code */
+ test_select_all(plist_id); /* Test basic all & none selection code */
+ test_select_all_hyper(H5P_DEFAULT); /* Test basic all & none selection code */
+ test_select_all_hyper(plist_id); /* Test basic all & none selection code */
+
+ /* These next tests use the same file */
+ test_select_combo(); /* Test combined hyperslab & element selection code */
+ test_select_hyper_stride(H5P_DEFAULT); /* Test strided hyperslab selection code */
+ test_select_hyper_stride(plist_id); /* Test strided hyperslab selection code */
+ test_select_hyper_contig(H5T_STD_U16LE, H5P_DEFAULT); /* Test contiguous hyperslab selection code */
+ test_select_hyper_contig(H5T_STD_U16LE, plist_id); /* Test contiguous hyperslab selection code */
+ test_select_hyper_contig(H5T_STD_U16BE, H5P_DEFAULT); /* Test contiguous hyperslab selection code */
+ test_select_hyper_contig(H5T_STD_U16BE, plist_id); /* Test contiguous hyperslab selection code */
+ test_select_hyper_contig2(H5T_STD_U16LE,
+ H5P_DEFAULT); /* Test more contiguous hyperslab selection cases */
+ test_select_hyper_contig2(H5T_STD_U16LE, plist_id); /* Test more contiguous hyperslab selection cases */
+ test_select_hyper_contig2(H5T_STD_U16BE,
+ H5P_DEFAULT); /* Test more contiguous hyperslab selection cases */
+ test_select_hyper_contig2(H5T_STD_U16BE, plist_id); /* Test more contiguous hyperslab selection cases */
+ test_select_hyper_contig3(H5T_STD_U16LE,
+ H5P_DEFAULT); /* Test yet more contiguous hyperslab selection cases */
+ test_select_hyper_contig3(H5T_STD_U16LE,
+ plist_id); /* Test yet more contiguous hyperslab selection cases */
+ test_select_hyper_contig3(H5T_STD_U16BE,
+ H5P_DEFAULT); /* Test yet more contiguous hyperslab selection cases */
+ test_select_hyper_contig3(H5T_STD_U16BE,
+ plist_id); /* Test yet more contiguous hyperslab selection cases */
+#if 0
+ test_select_hyper_contig_dr(H5T_STD_U16LE, H5P_DEFAULT);
+ test_select_hyper_contig_dr(H5T_STD_U16LE, plist_id);
+ test_select_hyper_contig_dr(H5T_STD_U16BE, H5P_DEFAULT);
+ test_select_hyper_contig_dr(H5T_STD_U16BE, plist_id);
+#else
+ HDprintf("** SKIPPED a test due to file creation issues\n");
+#endif
+#if 0
+ test_select_hyper_checker_board_dr(H5T_STD_U16LE, H5P_DEFAULT);
+ test_select_hyper_checker_board_dr(H5T_STD_U16LE, plist_id);
+ test_select_hyper_checker_board_dr(H5T_STD_U16BE, H5P_DEFAULT);
+ test_select_hyper_checker_board_dr(H5T_STD_U16BE, plist_id);
+#else
+ HDprintf("** SKIPPED a test due to assertion in HDF5\n");
+#endif
+ test_select_hyper_copy(); /* Test hyperslab selection copying code */
+ test_select_point_copy(); /* Test point selection copying code */
+ test_select_hyper_offset(); /* Test selection offset code with hyperslabs */
+ test_select_hyper_offset2(); /* Test more selection offset code with hyperslabs */
+ test_select_point_offset(); /* Test selection offset code with elements */
+ test_select_hyper_union(); /* Test hyperslab union code */
+
+ /* Fancy hyperslab API tests */
+ test_select_hyper_union_stagger(); /* Test hyperslab union code for staggered slabs */
+ test_select_hyper_union_3d(); /* Test hyperslab union code for 3-D dataset */
+ test_select_hyper_valid_combination(); /* Test different input combinations */
+
+ /* The following tests are currently broken with the Direct VFD */
+ if (HDstrcmp(env_h5_drvr, "direct") != 0) {
+ test_select_hyper_and_2d(); /* Test hyperslab intersection (AND) code for 2-D dataset */
+ test_select_hyper_xor_2d(); /* Test hyperslab XOR code for 2-D dataset */
+ test_select_hyper_notb_2d(); /* Test hyperslab NOTB code for 2-D dataset */
+ test_select_hyper_nota_2d(); /* Test hyperslab NOTA code for 2-D dataset */
+ }
+
+ /* test the random hyperslab I/O with the default property list for reading */
+ test_select_hyper_union_random_5d(H5P_DEFAULT); /* Test hyperslab union code for random 5-D hyperslabs */
+
+ /* test random hyperslab I/O with a small buffer for reads */
+ test_select_hyper_union_random_5d(plist_id); /* Test hyperslab union code for random 5-D hyperslabs */
+
+ /* Create a dataset transfer property list */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ CHECK(fapl, FAIL, "H5Pcreate");
+
+ /* Get the default file access properties for caching */
+ ret = H5Pget_cache(fapl, &mdc_nelmts, &rdcc_nelmts, &rdcc_nbytes, &rdcc_w0);
+ CHECK(ret, FAIL, "H5Pget_cache");
+
+ /* Increase the size of the raw data cache */
+ rdcc_nbytes = 10 * 1024 * 1024;
+
+ /* Set the file access properties for caching */
+ ret = H5Pset_cache(fapl, mdc_nelmts, rdcc_nelmts, rdcc_nbytes, rdcc_w0);
+ CHECK(ret, FAIL, "H5Pset_cache");
+
+ /* Test reading in a large hyperslab with a chunked dataset */
+ test_select_hyper_chunk(fapl, H5P_DEFAULT);
+
+ /* Test reading in a large hyperslab with a chunked dataset a small amount at a time */
+ test_select_hyper_chunk(fapl, plist_id);
+
+ /* Close file access property list */
+ ret = H5Pclose(fapl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close dataset transfer property list */
+ ret = H5Pclose(plist_id);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* More tests for checking validity of selections */
+ test_select_valid();
+
+ /* Tests for combining "all" and "none" selections with hyperslabs */
+ test_select_combine();
+
+ /* Test filling selections */
+ /* (Also tests iterating through each selection */
+ test_select_fill_all();
+ test_select_fill_point(NULL);
+ test_select_fill_point(offset);
+ test_select_fill_hyper_simple(NULL);
+ test_select_fill_hyper_simple(offset);
+ test_select_fill_hyper_regular(NULL);
+ test_select_fill_hyper_regular(offset);
+ test_select_fill_hyper_irregular(NULL);
+ test_select_fill_hyper_irregular(offset);
+
+ /* Test 0-sized selections */
+ test_select_none();
+
+ /* Test selections on scalar dataspaces */
+ test_scalar_select();
+ test_scalar_select2();
+ test_scalar_select3();
+
+ /* Test "same shape" routine */
+ test_shape_same();
+
+ /* Test "same shape" routine for selections of different rank */
+ test_shape_same_dr();
+
+ /* Test "re-build" routine */
+ test_space_rebuild();
+
+ /* Test "update diminfo" routine */
+ test_space_update_diminfo();
+
+ /* Test point selections in chunked datasets */
+ test_select_point_chunk();
+
+ /* Test scalar dataspaces in chunked datasets */
+ test_select_scalar_chunk();
+#if 0
+ /* Test using selection offset on hyperslab in chunked dataset */
+ test_select_hyper_chunk_offset();
+ test_select_hyper_chunk_offset2();
+#else
+ HDprintf("** SKIPPED a test due to assertion in HDF5\n");
+#endif
+
+ /* Test selection bounds with & without offsets */
+ test_select_bounds();
+
+ /* Test 'regular' hyperslab query routines */
+ test_hyper_regular();
+
+ /* Test unlimited hyperslab selections */
+ test_hyper_unlim();
+
+ /* Test the consistency of internal data structures of selection */
+ test_internal_consistency();
+
+ /* Test irregular selection I/O */
+ test_irreg_io();
+
+ /* Test selection iterators */
+ test_sel_iter();
+
+ /* Test selection intersection with block */
+ test_select_intersect_block();
+
+ /* Test reading of 1-d disjoint file space to 1-d single block memory space */
+ test_hyper_io_1d();
+
+ /* Test H5Sset_extent_none() functionality after we updated it to set
+ * the class to H5S_NULL instead of H5S_NO_CLASS.
+ */
+ test_h5s_set_extent_none();
+
+} /* test_select() */
+
+/*-------------------------------------------------------------------------
+ * Function: cleanup_select
+ *
+ * Purpose: Cleanup temporary test files
+ *
+ * Return: none
+ *
+ * Programmer: Albert Cheng
+ * July 2, 1998
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+cleanup_select(void)
+{
+ H5Fdelete(FILENAME, H5P_DEFAULT);
+}
diff --git a/test/API/ttime.c b/test/API/ttime.c
new file mode 100644
index 0000000..74128fd
--- /dev/null
+++ b/test/API/ttime.c
@@ -0,0 +1,231 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/***********************************************************
+ *
+ * Test program: ttime
+ *
+ * Test the Time Datatype functionality
+ *
+ *************************************************************/
+
+#include "testhdf5.h"
+
+#define DATAFILE "ttime.h5"
+#ifdef NOT_YET
+#define DATASETNAME "Dataset"
+#endif /* NOT_YET */
+
+/****************************************************************
+**
+** test_time_commit(): Test committing time datatypes to a file
+**
+****************************************************************/
+static void
+test_time_commit(void)
+{
+ hid_t file_id, tid; /* identifiers */
+ herr_t status;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Committing Time Datatypes\n"));
+
+ /* Create a new file using default properties. */
+ file_id = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file_id, FAIL, "H5Fcreate");
+
+ tid = H5Tcopy(H5T_UNIX_D32LE);
+ CHECK(tid, FAIL, "H5Tcopy");
+ status = H5Tcommit2(file_id, "Committed D32LE type", tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(status, FAIL, "H5Tcommit2");
+ status = H5Tclose(tid);
+ CHECK(status, FAIL, "H5Tclose");
+
+ tid = H5Tcopy(H5T_UNIX_D32BE);
+ CHECK(tid, FAIL, "H5Tcopy");
+ status = H5Tcommit2(file_id, "Committed D32BE type", tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(status, FAIL, "H5Tcommit2");
+ status = H5Tclose(tid);
+ CHECK(status, FAIL, "H5Tclose");
+
+ tid = H5Tcopy(H5T_UNIX_D64LE);
+ CHECK(tid, FAIL, "H5Tcopy");
+ status = H5Tcommit2(file_id, "Committed D64LE type", tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(status, FAIL, "H5Tcommit2");
+ status = H5Tclose(tid);
+ CHECK(status, FAIL, "H5Tclose");
+
+ tid = H5Tcopy(H5T_UNIX_D64BE);
+ CHECK(tid, FAIL, "H5Tcopy");
+ status = H5Tcommit2(file_id, "Committed D64BE type", tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(status, FAIL, "H5Tcommit2");
+ status = H5Tclose(tid);
+ CHECK(status, FAIL, "H5Tclose");
+
+ /* Close the file. */
+ status = H5Fclose(file_id);
+ CHECK(status, FAIL, "H5Fclose");
+
+ file_id = H5Fopen(DATAFILE, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(file_id, FAIL, "H5Fopen");
+
+ tid = H5Topen2(file_id, "Committed D32LE type", H5P_DEFAULT);
+ CHECK(tid, FAIL, "H5Topen2");
+
+ if (!H5Tequal(tid, H5T_UNIX_D32LE))
+ TestErrPrintf("H5T_UNIX_D32LE datatype not found\n");
+
+ status = H5Tclose(tid);
+ CHECK(status, FAIL, "H5Tclose");
+
+ tid = H5Topen2(file_id, "Committed D32BE type", H5P_DEFAULT);
+ CHECK(tid, FAIL, "H5Topen2");
+
+ if (!H5Tequal(tid, H5T_UNIX_D32BE))
+ TestErrPrintf("H5T_UNIX_D32BE datatype not found\n");
+
+ status = H5Tclose(tid);
+ CHECK(status, FAIL, "H5Tclose");
+
+ tid = H5Topen2(file_id, "Committed D64LE type", H5P_DEFAULT);
+ CHECK(tid, FAIL, "H5Topen2");
+
+ if (!H5Tequal(tid, H5T_UNIX_D64LE))
+ TestErrPrintf("H5T_UNIX_D64LE datatype not found");
+
+ status = H5Tclose(tid);
+ CHECK(status, FAIL, "H5Tclose");
+
+ tid = H5Topen2(file_id, "Committed D64BE type", H5P_DEFAULT);
+ CHECK(tid, FAIL, "H5Topen2");
+
+ if (!H5Tequal(tid, H5T_UNIX_D64BE))
+ TestErrPrintf("H5T_UNIX_D64BE datatype not found");
+
+ status = H5Tclose(tid);
+ CHECK(status, FAIL, "H5Tclose");
+
+ status = H5Fclose(file_id);
+ CHECK(status, FAIL, "H5Fclose");
+}
+
+#ifdef NOT_YET
+/****************************************************************
+**
+** test_time_io(): Test writing time data to a dataset
+**
+****************************************************************/
+static void
+test_time_io(void)
+{
+ hid_t fid; /* File identifier */
+ hid_t dsid; /* Dataset identifier */
+ hid_t tid; /* Datatype identifier */
+ hid_t sid; /* Dataspace identifier */
+ time_t timenow, timethen; /* Times */
+ herr_t status;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Committing Time Datatypes\n"));
+
+ /* Create a new file using default properties. */
+ fid = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ /* Create a scalar dataspace */
+ sid = H5Screate(H5S_SCALAR);
+ CHECK(sid, FAIL, "H5Screate");
+
+ /* Create a dataset with a time datatype */
+ dsid = H5Dcreate2(fid, DATASETNAME, H5T_UNIX_D32LE, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dsid, FAIL, "H5Dcreate2");
+
+ /* Initialize time data value */
+ timenow = HDtime(NULL);
+
+ /* Write time to dataset */
+ status = H5Dwrite(dsid, H5T_UNIX_D32LE, H5S_ALL, H5S_ALL, H5P_DEFAULT, &timenow);
+ CHECK(status, FAIL, "H5Dwrite");
+
+ /* Close objects */
+ status = H5Dclose(dsid);
+ CHECK(status, FAIL, "H5Dclose");
+
+ status = H5Sclose(sid);
+ CHECK(status, FAIL, "H5Sclose");
+
+ status = H5Fclose(fid);
+ CHECK(status, FAIL, "H5Fclose");
+
+ /* Open file and dataset, read time back and print it in calendar format */
+ fid = H5Fopen(DATAFILE, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ dsid = H5Dopen2(fid, DATASETNAME, H5P_DEFAULT);
+ CHECK(dsid, FAIL, "H5Dopen2");
+
+ tid = H5Dget_type(dsid);
+ CHECK(tid, FAIL, "H5Dget_type");
+ if (H5Tget_class(tid) == H5T_TIME)
+ HDfprintf(stderr, "datatype class is H5T_TIME\n");
+ status = H5Tclose(tid);
+ CHECK(status, FAIL, "H5Tclose");
+
+ status = H5Dread(dsid, H5T_UNIX_D32LE, H5S_ALL, H5S_ALL, H5P_DEFAULT, &timethen);
+ CHECK(status, FAIL, "H5Dread");
+ HDfprintf(stderr, "time written was: %s\n", HDctime(&timethen));
+
+ status = H5Dclose(dsid);
+ CHECK(status, FAIL, "H5Dclose");
+
+ status = H5Fclose(fid);
+ CHECK(status, FAIL, "H5Fclose");
+}
+#endif /* NOT_YET */
+
+/****************************************************************
+**
+** test_time(): Main time datatype testing routine.
+**
+****************************************************************/
+void
+test_time(void)
+{
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Time Datatypes\n"));
+
+ test_time_commit(); /* Test committing time datatypes to a file */
+#ifdef NOT_YET
+ test_time_io(); /* Test writing time data to a dataset */
+#endif /* NOT_YET */
+
+} /* test_time() */
+
+/*-------------------------------------------------------------------------
+ * Function: cleanup_time
+ *
+ * Purpose: Cleanup temporary test files
+ *
+ * Return: none
+ *
+ * Programmer: Quincey Koziol
+ * October 19, 2000
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+cleanup_time(void)
+{
+ H5Fdelete(DATAFILE, H5P_DEFAULT);
+}
diff --git a/test/API/tunicode.c b/test/API/tunicode.c
new file mode 100644
index 0000000..fa59456
--- /dev/null
+++ b/test/API/tunicode.c
@@ -0,0 +1,867 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/* Unicode test */
+#include "testhdf5.h"
+
+#define NUM_CHARS 16
+#define MAX_STRING_LENGTH ((NUM_CHARS * 4) + 1) /* Max length in bytes */
+#define MAX_PATH_LENGTH (MAX_STRING_LENGTH + 20) /* Max length in bytes */
+#define MAX_CODE_POINT 0x200000
+#define FILENAME "unicode.h5"
+/* A buffer to hold two copies of the UTF-8 string */
+#define LONG_BUF_SIZE (2 * MAX_STRING_LENGTH + 4)
+
+#define DSET1_NAME "fl_string_dataset"
+#define DSET3_NAME "dataset3"
+#define DSET4_NAME "dataset4"
+#define VL_DSET1_NAME "vl_dset_1"
+#define GROUP1_NAME "group1"
+#define GROUP2_NAME "group2"
+#define GROUP3_NAME "group3"
+#define GROUP4_NAME "group4"
+
+#define RANK 1
+#define COMP_INT_VAL 7
+#define COMP_FLOAT_VAL (-42.0F)
+#define COMP_DOUBLE_VAL 42.0
+
+/* Test function prototypes */
+void test_fl_string(hid_t fid, const char *string);
+void test_strpad(hid_t fid, const char *string);
+void test_vl_string(hid_t fid, const char *string);
+void test_objnames(hid_t fid, const char *string);
+void test_attrname(hid_t fid, const char *string);
+void test_compound(hid_t fid, const char *string);
+void test_enum(hid_t fid, const char *string);
+void test_opaque(hid_t fid, const char *string);
+
+/* Utility function prototypes */
+static hid_t mkstr(size_t len, H5T_str_t strpad);
+unsigned int write_char(unsigned int c, char *test_string, unsigned int cur_pos);
+void dump_string(const char *string);
+
+/*
+ * test_fl_string
+ * Tests that UTF-8 can be used for fixed-length string data.
+ * Writes the string to a dataset and reads it back again.
+ */
+void
+test_fl_string(hid_t fid, const char *string)
+{
+ hid_t dtype_id, space_id, dset_id;
+ hsize_t dims = 1;
+ char read_buf[MAX_STRING_LENGTH];
+ H5T_cset_t cset;
+ herr_t ret;
+
+ /* Create the datatype, ensure that the character set behaves
+ * correctly (it should default to ASCII and can be set to UTF8)
+ */
+ dtype_id = H5Tcopy(H5T_C_S1);
+ CHECK(dtype_id, FAIL, "H5Tcopy");
+ ret = H5Tset_size(dtype_id, (size_t)MAX_STRING_LENGTH);
+ CHECK(ret, FAIL, "H5Tset_size");
+ cset = H5Tget_cset(dtype_id);
+ VERIFY(cset, H5T_CSET_ASCII, "H5Tget_cset");
+ ret = H5Tset_cset(dtype_id, H5T_CSET_UTF8);
+ CHECK(ret, FAIL, "H5Tset_cset");
+ cset = H5Tget_cset(dtype_id);
+ VERIFY(cset, H5T_CSET_UTF8, "H5Tget_cset");
+
+ /* Create dataspace for a dataset */
+ space_id = H5Screate_simple(RANK, &dims, NULL);
+ CHECK(space_id, FAIL, "H5Screate_simple");
+
+ /* Create a dataset */
+ dset_id = H5Dcreate2(fid, DSET1_NAME, dtype_id, space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset_id, FAIL, "H5Dcreate2");
+
+ /* Write UTF-8 string to dataset */
+ ret = H5Dwrite(dset_id, dtype_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, string);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Read string back and make sure it is unchanged */
+ ret = H5Dread(dset_id, dtype_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, read_buf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ VERIFY(HDstrcmp(string, read_buf), 0, "strcmp");
+
+ /* Close all */
+ ret = H5Dclose(dset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Tclose(dtype_id);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Sclose(space_id);
+ CHECK(ret, FAIL, "H5Sclose");
+}
+
+/*
+ * test_strpad
+ * Tests string padding for a UTF-8 string.
+ * Converts strings to shorter and then longer strings.
+ * Borrows heavily from dtypes.c, but is more complicated because
+ * the string is randomly generated.
+ */
+void
+test_strpad(hid_t H5_ATTR_UNUSED fid, const char *string)
+{
+ /* buf is used to hold the data that H5Tconvert operates on. */
+ char buf[LONG_BUF_SIZE];
+
+ /* cmpbuf holds the output that H5Tconvert should produce,
+ * to compare against the actual output. */
+ char cmpbuf[LONG_BUF_SIZE];
+
+ /* new_string is a slightly modified version of the UTF-8
+ * string to make the tests run more smoothly. */
+ char new_string[MAX_STRING_LENGTH + 2];
+
+ size_t length; /* Length of new_string in bytes */
+ size_t small_len; /* Size of the small datatype */
+ size_t big_len; /* Size of the larger datatype */
+ hid_t src_type, dst_type;
+ herr_t ret;
+
+ /* The following tests are simpler if the UTF-8 string contains
+ * the right number of bytes (even or odd, depending on the test).
+ * We create a 'new_string' whose length is convenient by prepending
+ * an 'x' to 'string' when necessary. */
+ length = HDstrlen(string);
+ if (length % 2 != 1) {
+ HDstrcpy(new_string, "x");
+ HDstrcat(new_string, string);
+ length++;
+ }
+ else {
+ HDstrcpy(new_string, string);
+ }
+
+ /* Convert a null-terminated string to a shorter and longer null
+ * terminated string. */
+
+ /* Create a src_type that holds the UTF-8 string and its final NULL */
+ big_len = length + 1; /* +1 byte for final NULL */
+ HDassert((2 * big_len) <= sizeof(cmpbuf));
+ src_type = mkstr(big_len, H5T_STR_NULLTERM);
+ CHECK(src_type, FAIL, "mkstr");
+ /* Create a dst_type that holds half of the UTF-8 string and a final
+ * NULL */
+ small_len = (length + 1) / 2;
+ dst_type = mkstr(small_len, H5T_STR_NULLTERM);
+ CHECK(dst_type, FAIL, "mkstr");
+
+ /* Fill the buffer with two copies of the UTF-8 string, each with a
+ * terminating NULL. It will look like "abcdefg\0abcdefg\0". */
+ HDstrncpy(buf, new_string, big_len);
+ HDstrncpy(&buf[big_len], new_string, big_len);
+
+ ret = H5Tconvert(src_type, dst_type, (size_t)2, buf, NULL, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tconvert");
+
+ /* After conversion, the buffer should look like
+ * "abc\0abc\0abcdefg\0". Note that this is just what the bytes look
+ * like; UTF-8 characters may well have been truncated.
+ * To check that the conversion worked properly, we'll build this
+ * string manually. */
+ HDstrncpy(cmpbuf, new_string, small_len - 1);
+ cmpbuf[small_len - 1] = '\0';
+ HDstrncpy(&cmpbuf[small_len], new_string, small_len - 1);
+ cmpbuf[2 * small_len - 1] = '\0';
+ HDstrcpy(&cmpbuf[2 * small_len], new_string);
+
+ VERIFY(HDmemcmp(buf, cmpbuf, 2 * big_len), 0, "HDmemcmp");
+
+ /* Now convert from smaller datatype to bigger datatype. This should
+ * leave our buffer looking like: "abc\0\0\0\0\0abc\0\0\0\0\0" */
+ ret = H5Tconvert(dst_type, src_type, (size_t)2, buf, NULL, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tconvert");
+
+ /* First fill the buffer with NULLs */
+ HDmemset(cmpbuf, '\0', (size_t)LONG_BUF_SIZE);
+ /* Copy in the characters */
+ HDstrncpy(cmpbuf, new_string, small_len - 1);
+ HDstrncpy(&cmpbuf[big_len], new_string, small_len - 1);
+
+ VERIFY(HDmemcmp(buf, cmpbuf, 2 * big_len), 0, "HDmemcmp");
+
+ ret = H5Tclose(src_type);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Tclose(dst_type);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Now test null padding. Null-padded strings do *not* need
+ * terminating NULLs, so the sizes of the datatypes are slightly
+ * different and we want a string with an even number of characters. */
+ length = HDstrlen(string);
+ if (length % 2 != 0) {
+ HDstrcpy(new_string, "x");
+ HDstrcat(new_string, string);
+ length++;
+ }
+ else {
+ HDstrcpy(new_string, string);
+ }
+
+ /* Create a src_type that holds the UTF-8 string */
+ big_len = length;
+ HDassert((2 * big_len) <= sizeof(cmpbuf));
+ src_type = mkstr(big_len, H5T_STR_NULLPAD);
+ CHECK(src_type, FAIL, "mkstr");
+ /* Create a dst_type that holds half of the UTF-8 string */
+ small_len = length / 2;
+ dst_type = mkstr(small_len, H5T_STR_NULLPAD);
+ CHECK(dst_type, FAIL, "mkstr");
+
+ /* Fill the buffer with two copies of the UTF-8 string.
+ * It will look like "abcdefghabcdefgh". */
+ HDstrncpy(buf, new_string, big_len);
+ HDstrncpy(&buf[big_len], new_string, big_len);
+
+ ret = H5Tconvert(src_type, dst_type, (size_t)2, buf, NULL, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tconvert");
+
+ /* After conversion, the buffer should look like
+ * "abcdabcdabcdefgh". Note that this is just what the bytes look
+ * like; UTF-8 characters may well have been truncated.
+ * To check that the conversion worked properly, we'll build this
+ * string manually. */
+ HDstrncpy(cmpbuf, new_string, small_len);
+ HDstrncpy(&cmpbuf[small_len], new_string, small_len);
+ HDstrncpy(&cmpbuf[2 * small_len], new_string, big_len);
+
+ VERIFY(HDmemcmp(buf, cmpbuf, 2 * big_len), 0, "HDmemcmp");
+
+ /* Now convert from smaller datatype to bigger datatype. This should
+ * leave our buffer looking like: "abcd\0\0\0\0abcd\0\0\0\0" */
+ ret = H5Tconvert(dst_type, src_type, (size_t)2, buf, NULL, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tconvert");
+
+ /* First fill the buffer with NULLs */
+ HDmemset(cmpbuf, '\0', (size_t)LONG_BUF_SIZE);
+ /* Copy in the characters */
+ HDstrncpy(cmpbuf, new_string, small_len);
+ HDstrncpy(&cmpbuf[big_len], new_string, small_len);
+
+ VERIFY(HDmemcmp(buf, cmpbuf, 2 * big_len), 0, "HDmemcmp");
+
+ ret = H5Tclose(src_type);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Tclose(dst_type);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Test space padding. This is very similar to null-padding; we can
+ use the same values of length, small_len, and big_len. */
+
+ src_type = mkstr(big_len, H5T_STR_SPACEPAD);
+ CHECK(src_type, FAIL, "mkstr");
+ dst_type = mkstr(small_len, H5T_STR_SPACEPAD);
+ CHECK(src_type, FAIL, "mkstr");
+
+ /* Fill the buffer with two copies of the UTF-8 string.
+ * It will look like "abcdefghabcdefgh". */
+ HDstrcpy(buf, new_string);
+ HDstrcpy(&buf[big_len], new_string);
+
+ ret = H5Tconvert(src_type, dst_type, (size_t)2, buf, NULL, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tconvert");
+
+ /* After conversion, the buffer should look like
+ * "abcdabcdabcdefgh". Note that this is just what the bytes look
+ * like; UTF-8 characters may have been truncated.
+ * To check that the conversion worked properly, we'll build this
+ * string manually. */
+ HDstrncpy(cmpbuf, new_string, small_len);
+ HDstrncpy(&cmpbuf[small_len], new_string, small_len);
+ HDstrncpy(&cmpbuf[2 * small_len], new_string, big_len);
+
+ VERIFY(HDmemcmp(buf, cmpbuf, 2 * big_len), 0, "HDmemcmp");
+
+ /* Now convert from smaller datatype to bigger datatype. This should
+ * leave our buffer looking like: "abcd abcd " */
+ ret = H5Tconvert(dst_type, src_type, (size_t)2, buf, NULL, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tconvert");
+
+ /* First fill the buffer with spaces */
+ HDmemset(cmpbuf, ' ', (size_t)LONG_BUF_SIZE);
+ /* Copy in the characters */
+ HDstrncpy(cmpbuf, new_string, small_len);
+ HDstrncpy(&cmpbuf[big_len], new_string, small_len);
+
+ VERIFY(HDmemcmp(buf, cmpbuf, 2 * big_len), 0, "HDmemcmp");
+
+ ret = H5Tclose(src_type);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Tclose(dst_type);
+ CHECK(ret, FAIL, "H5Tclose");
+}
+
+/*
+ * test_vl_string
+ * Tests variable-length string datatype with UTF-8 strings.
+ */
+void
+test_vl_string(hid_t fid, const char *string)
+{
+ hid_t type_id, space_id, dset_id;
+ hsize_t dims = 1;
+ hsize_t size; /* Number of bytes used */
+ char *read_buf[1];
+ herr_t ret;
+
+ /* Create dataspace for datasets */
+ space_id = H5Screate_simple(RANK, &dims, NULL);
+ CHECK(space_id, FAIL, "H5Screate_simple");
+
+ /* Create a datatype to refer to */
+ type_id = H5Tcopy(H5T_C_S1);
+ CHECK(type_id, FAIL, "H5Tcopy");
+ ret = H5Tset_size(type_id, H5T_VARIABLE);
+ CHECK(ret, FAIL, "H5Tset_size");
+
+ /* Create a dataset */
+ dset_id = H5Dcreate2(fid, VL_DSET1_NAME, type_id, space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset_id, FAIL, "H5Dcreate2");
+
+ /* Write dataset to disk */
+ ret = H5Dwrite(dset_id, type_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, &string);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Make certain the correct amount of memory will be used */
+ ret = H5Dvlen_get_buf_size(dset_id, type_id, space_id, &size);
+ CHECK(ret, FAIL, "H5Dvlen_get_buf_size");
+ VERIFY(size, (hsize_t)HDstrlen(string) + 1, "H5Dvlen_get_buf_size");
+
+ /* Read dataset from disk */
+ ret = H5Dread(dset_id, type_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, read_buf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read in */
+ VERIFY(HDstrcmp(string, read_buf[0]), 0, "strcmp");
+
+ /* Reclaim the read VL data */
+ ret = H5Treclaim(type_id, space_id, H5P_DEFAULT, read_buf);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Close all */
+ ret = H5Dclose(dset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Tclose(type_id);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Sclose(space_id);
+ CHECK(ret, FAIL, "H5Sclose");
+}
+
+/*
+ * test_objnames
+ * Tests that UTF-8 can be used for object names in the file.
+ * Tests groups, datasets, named datatypes, and soft links.
+ * Note that this test doesn't actually mark the names as being
+ * in UTF-8. At the time this test was written, that feature
+ * didn't exist in HDF5, and when the character encoding property
+ * was added to links it didn't change how they were stored in the file,
+ * -JML 2/2/2006
+ */
+void
+test_objnames(hid_t fid, const char *string)
+{
+ hid_t grp_id, grp1_id, grp2_id, grp3_id;
+ hid_t type_id, dset_id, space_id;
+#if 0
+ char read_buf[MAX_STRING_LENGTH];
+#endif
+ char path_buf[MAX_PATH_LENGTH];
+ hsize_t dims = 1;
+#if 0
+ hobj_ref_t obj_ref;
+ ssize_t size;
+#endif
+ herr_t ret;
+
+ /* Create a group with a UTF-8 name */
+ grp_id = H5Gcreate2(fid, string, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(grp_id, FAIL, "H5Gcreate2");
+#if 0
+ /* Set a comment on the group to test that we can access the group
+ * Also test that UTF-8 comments can be read.
+ */
+ ret = H5Oset_comment_by_name(fid, string, string, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Oset_comment_by_name");
+ size = H5Oget_comment_by_name(fid, string, read_buf, (size_t)MAX_STRING_LENGTH, H5P_DEFAULT);
+ CHECK(size, FAIL, "H5Oget_comment_by_name");
+#endif
+ ret = H5Gclose(grp_id);
+ CHECK(ret, FAIL, "H5Gclose");
+#if 0
+ VERIFY(HDstrcmp(string, read_buf), 0, "strcmp");
+#endif
+ /* Create a new dataset with a UTF-8 name */
+ grp1_id = H5Gcreate2(fid, GROUP1_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(grp1_id, FAIL, "H5Gcreate2");
+
+ space_id = H5Screate_simple(RANK, &dims, NULL);
+ CHECK(space_id, FAIL, "H5Screate_simple");
+ dset_id = H5Dcreate2(grp1_id, string, H5T_NATIVE_INT, space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset_id, FAIL, "H5Dcreate2");
+
+ /* Make sure that dataset can be opened again */
+ ret = H5Dclose(dset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Sclose(space_id);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ dset_id = H5Dopen2(grp1_id, string, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Dopen2");
+ ret = H5Dclose(dset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Gclose(grp1_id);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Do the same for a named datatype */
+ grp2_id = H5Gcreate2(fid, GROUP2_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(grp2_id, FAIL, "H5Gcreate2");
+
+ type_id = H5Tcreate(H5T_OPAQUE, (size_t)1);
+ CHECK(type_id, FAIL, "H5Tcreate");
+ ret = H5Tcommit2(grp2_id, string, type_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(type_id, FAIL, "H5Tcommit2");
+ ret = H5Tclose(type_id);
+ CHECK(type_id, FAIL, "H5Tclose");
+
+ type_id = H5Topen2(grp2_id, string, H5P_DEFAULT);
+ CHECK(type_id, FAIL, "H5Topen2");
+ ret = H5Tclose(type_id);
+ CHECK(type_id, FAIL, "H5Tclose");
+
+ /* Don't close the group -- use it to test that object references
+ * can refer to objects named in UTF-8 */
+#if 0
+ space_id = H5Screate_simple(RANK, &dims, NULL);
+ CHECK(space_id, FAIL, "H5Screate_simple");
+ dset_id =
+ H5Dcreate2(grp2_id, DSET3_NAME, H5T_STD_REF_OBJ, space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Dcreate2");
+
+ /* Create reference to named datatype */
+ ret = H5Rcreate(&obj_ref, grp2_id, string, H5R_OBJECT, (hid_t)-1);
+ CHECK(ret, FAIL, "H5Rcreate");
+ /* Write selection and read it back*/
+ ret = H5Dwrite(dset_id, H5T_STD_REF_OBJ, H5S_ALL, H5S_ALL, H5P_DEFAULT, &obj_ref);
+ CHECK(ret, FAIL, "H5Dwrite");
+ ret = H5Dread(dset_id, H5T_STD_REF_OBJ, H5S_ALL, H5S_ALL, H5P_DEFAULT, &obj_ref);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Ensure that we can open named datatype using object reference */
+ type_id = H5Rdereference2(dset_id, H5P_DEFAULT, H5R_OBJECT, &obj_ref);
+ CHECK(type_id, FAIL, "H5Rdereference2");
+ ret = H5Tcommitted(type_id);
+ VERIFY(ret, 1, "H5Tcommitted");
+
+ ret = H5Tclose(type_id);
+ CHECK(type_id, FAIL, "H5Tclose");
+ ret = H5Dclose(dset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+ ret = H5Sclose(space_id);
+ CHECK(ret, FAIL, "H5Sclose");
+#endif
+ ret = H5Gclose(grp2_id);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ /* Create "group3". Build a hard link from group3 to group2, which has
+ * a datatype with the UTF-8 name. Create a soft link in group3
+ * pointing through the hard link to the datatype. Give the soft
+ * link a name in UTF-8. Ensure that the soft link works. */
+
+ grp3_id = H5Gcreate2(fid, GROUP3_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(grp3_id, FAIL, "H5Gcreate2");
+
+ ret = H5Lcreate_hard(fid, GROUP2_NAME, grp3_id, GROUP2_NAME, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lcreate_hard");
+ HDstrcpy(path_buf, GROUP2_NAME);
+ HDstrcat(path_buf, "/");
+ HDstrcat(path_buf, string);
+ ret = H5Lcreate_hard(grp3_id, path_buf, H5L_SAME_LOC, string, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Lcreate_hard");
+
+ /* Open named datatype using soft link */
+ type_id = H5Topen2(grp3_id, string, H5P_DEFAULT);
+ CHECK(type_id, FAIL, "H5Topen2");
+
+ ret = H5Tclose(type_id);
+ CHECK(type_id, FAIL, "H5Tclose");
+ ret = H5Gclose(grp3_id);
+ CHECK(ret, FAIL, "H5Gclose");
+}
+
+/*
+ * test_attrname
+ * Test that attributes can deal with UTF-8 strings
+ */
+void
+test_attrname(hid_t fid, const char *string)
+{
+ hid_t group_id, attr_id;
+ hid_t dtype_id, space_id;
+ hsize_t dims = 1;
+ char read_buf[MAX_STRING_LENGTH];
+ ssize_t size;
+ herr_t ret;
+
+ /* Create a new group and give it an attribute whose
+ * name and value are UTF-8 strings.
+ */
+ group_id = H5Gcreate2(fid, GROUP4_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(group_id, FAIL, "H5Gcreate2");
+
+ space_id = H5Screate_simple(RANK, &dims, NULL);
+ CHECK(space_id, FAIL, "H5Screate_simple");
+ dtype_id = H5Tcopy(H5T_C_S1);
+ CHECK(dtype_id, FAIL, "H5Tcopy");
+ ret = H5Tset_size(dtype_id, (size_t)MAX_STRING_LENGTH);
+ CHECK(ret, FAIL, "H5Tset_size");
+
+ /* Create the attribute and check that its name is correct */
+ attr_id = H5Acreate2(group_id, string, dtype_id, space_id, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(attr_id, FAIL, "H5Acreate2");
+ size = H5Aget_name(attr_id, (size_t)MAX_STRING_LENGTH, read_buf);
+ CHECK(size, FAIL, "H5Aget_name");
+ ret = HDstrcmp(read_buf, string);
+ VERIFY(ret, 0, "strcmp");
+ read_buf[0] = '\0';
+
+ /* Try writing and reading from the attribute */
+ ret = H5Awrite(attr_id, dtype_id, string);
+ CHECK(ret, FAIL, "H5Awrite");
+ ret = H5Aread(attr_id, dtype_id, read_buf);
+ CHECK(ret, FAIL, "H5Aread");
+ ret = HDstrcmp(read_buf, string);
+ VERIFY(ret, 0, "strcmp");
+
+ /* Clean up */
+ ret = H5Aclose(attr_id);
+ CHECK(ret, FAIL, "H5Aclose");
+ ret = H5Tclose(dtype_id);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Sclose(space_id);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Gclose(group_id);
+ CHECK(ret, FAIL, "H5Gclose");
+}
+
+/*
+ * test_compound
+ * Test that compound datatypes can have UTF-8 field names.
+ */
+void
+test_compound(hid_t fid, const char *string)
+{
+ /* Define two compound structures, s1_t and s2_t.
+ * s2_t is a subset of s1_t, with two out of three
+ * fields.
+ * This is stolen from the h5_compound example.
+ */
+ typedef struct s1_t {
+ int a;
+ double c;
+ float b;
+ } s1_t;
+ typedef struct s2_t {
+ double c;
+ int a;
+ } s2_t;
+ /* Actual variable declarations */
+ s1_t s1;
+ s2_t s2;
+ hid_t s1_tid, s2_tid;
+ hid_t space_id, dset_id;
+ hsize_t dim = 1;
+ char *readbuf;
+ herr_t ret;
+
+ /* Initialize compound data */
+ HDmemset(&s1, 0, sizeof(s1_t)); /* To make purify happy */
+ s1.a = COMP_INT_VAL;
+ s1.c = COMP_DOUBLE_VAL;
+ s1.b = COMP_FLOAT_VAL;
+
+ /* Create compound datatypes using UTF-8 field name */
+ s1_tid = H5Tcreate(H5T_COMPOUND, sizeof(s1_t));
+ CHECK(s1_tid, FAIL, "H5Tcreate");
+ ret = H5Tinsert(s1_tid, string, HOFFSET(s1_t, a), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Check that the field name was stored correctly */
+ readbuf = H5Tget_member_name(s1_tid, 0);
+ ret = HDstrcmp(readbuf, string);
+ VERIFY(ret, 0, "strcmp");
+ H5free_memory(readbuf);
+
+ /* Add the other fields to the datatype */
+ ret = H5Tinsert(s1_tid, "c_name", HOFFSET(s1_t, c), H5T_NATIVE_DOUBLE);
+ CHECK(ret, FAIL, "H5Tinsert");
+ ret = H5Tinsert(s1_tid, "b_name", HOFFSET(s1_t, b), H5T_NATIVE_FLOAT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Create second datatype, with only two fields. */
+ s2_tid = H5Tcreate(H5T_COMPOUND, sizeof(s2_t));
+ CHECK(s2_tid, FAIL, "H5Tcreate");
+ ret = H5Tinsert(s2_tid, "c_name", HOFFSET(s2_t, c), H5T_NATIVE_DOUBLE);
+ CHECK(ret, FAIL, "H5Tinsert");
+ ret = H5Tinsert(s2_tid, string, HOFFSET(s2_t, a), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Create the dataspace and dataset. */
+ space_id = H5Screate_simple(1, &dim, NULL);
+ CHECK(space_id, FAIL, "H5Screate_simple");
+ dset_id = H5Dcreate2(fid, DSET4_NAME, s1_tid, space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset_id, FAIL, "H5Dcreate2");
+
+ /* Write data to the dataset. */
+ ret = H5Dwrite(dset_id, s1_tid, H5S_ALL, H5S_ALL, H5P_DEFAULT, &s1);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Ensure that data can be read back by field name into s2 struct */
+ ret = H5Dread(dset_id, s2_tid, H5S_ALL, H5S_ALL, H5P_DEFAULT, &s2);
+ CHECK(ret, FAIL, "H5Dread");
+
+ VERIFY(s2.a, COMP_INT_VAL, "H5Dread");
+ VERIFY(s2.c, COMP_DOUBLE_VAL, "H5Dread");
+
+ /* Clean up */
+ ret = H5Tclose(s1_tid);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Tclose(s2_tid);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Sclose(space_id);
+ CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Dclose(dset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+}
+
+/*
+ * test_enum
+ * Test that enumerated datatypes can have UTF-8 member names.
+ */
+void
+test_enum(hid_t H5_ATTR_UNUSED fid, const char *string)
+{
+ /* Define an enumerated type */
+ typedef enum { E1_RED, E1_GREEN, E1_BLUE, E1_WHITE } c_e1;
+ /* Variable declarations */
+ c_e1 val;
+ herr_t ret;
+ hid_t type_id;
+ char readbuf[MAX_STRING_LENGTH];
+
+ /* Create an enumerated datatype in HDF5 with a UTF-8 member name*/
+ type_id = H5Tcreate(H5T_ENUM, sizeof(c_e1));
+ CHECK(type_id, FAIL, "H5Tcreate");
+ val = E1_RED;
+ ret = H5Tenum_insert(type_id, "RED", &val);
+ CHECK(ret, FAIL, "H5Tenum_insert");
+ val = E1_GREEN;
+ ret = H5Tenum_insert(type_id, "GREEN", &val);
+ CHECK(ret, FAIL, "H5Tenum_insert");
+ val = E1_BLUE;
+ ret = H5Tenum_insert(type_id, "BLUE", &val);
+ CHECK(ret, FAIL, "H5Tenum_insert");
+ val = E1_WHITE;
+ ret = H5Tenum_insert(type_id, string, &val);
+ CHECK(ret, FAIL, "H5Tenum_insert");
+
+ /* Ensure that UTF-8 member name gives the right value and vice versa. */
+ ret = H5Tenum_valueof(type_id, string, &val);
+ CHECK(ret, FAIL, "H5Tenum_valueof");
+ VERIFY(val, E1_WHITE, "H5Tenum_valueof");
+ ret = H5Tenum_nameof(type_id, &val, readbuf, (size_t)MAX_STRING_LENGTH);
+ CHECK(ret, FAIL, "H5Tenum_nameof");
+ ret = HDstrcmp(readbuf, string);
+ VERIFY(ret, 0, "strcmp");
+
+ /* Close the datatype */
+ ret = H5Tclose(type_id);
+ CHECK(ret, FAIL, "H5Tclose");
+}
+
+/*
+ * test_opaque
+ * Test comments on opaque datatypes
+ */
+void
+test_opaque(hid_t H5_ATTR_UNUSED fid, const char *string)
+{
+ hid_t type_id;
+ char *read_buf;
+ herr_t ret;
+
+ /* Create an opaque type and give it a UTF-8 tag */
+ type_id = H5Tcreate(H5T_OPAQUE, (size_t)4);
+ CHECK(type_id, FAIL, "H5Tcreate");
+ ret = H5Tset_tag(type_id, string);
+ CHECK(ret, FAIL, "H5Tset_tag");
+
+ /* Read the tag back. */
+ read_buf = H5Tget_tag(type_id);
+ ret = HDstrcmp(read_buf, string);
+ VERIFY(ret, 0, "H5Tget_tag");
+ H5free_memory(read_buf);
+
+ ret = H5Tclose(type_id);
+ CHECK(ret, FAIL, "H5Tclose");
+}
+
+/*********************/
+/* Utility functions */
+/*********************/
+
+/* mkstr
+ * Borrwed from dtypes.c.
+ * Creates a new string data type. Used in string padding tests */
+static hid_t
+mkstr(size_t len, H5T_str_t strpad)
+{
+ hid_t t;
+ if ((t = H5Tcopy(H5T_C_S1)) < 0)
+ return -1;
+ if (H5Tset_size(t, len) < 0)
+ return -1;
+ if (H5Tset_strpad(t, strpad) < 0)
+ return -1;
+ return t;
+}
+
+/* write_char
+ * Append a unicode code point c to test_string in UTF-8 encoding.
+ * Return the new end of the string.
+ */
+unsigned int
+write_char(unsigned int c, char *test_string, unsigned int cur_pos)
+{
+ if (c < 0x80) {
+ test_string[cur_pos] = (char)c;
+ cur_pos++;
+ }
+ else if (c < 0x800) {
+ test_string[cur_pos] = (char)(0xC0 | c >> 6);
+ test_string[cur_pos + 1] = (char)(0x80 | (c & 0x3F));
+ cur_pos += 2;
+ }
+ else if (c < 0x10000) {
+ test_string[cur_pos] = (char)(0xE0 | c >> 12);
+ test_string[cur_pos + 1] = (char)(0x80 | (c >> 6 & 0x3F));
+ test_string[cur_pos + 2] = (char)(0x80 | (c & 0x3F));
+ cur_pos += 3;
+ }
+ else if (c < 0x200000) {
+ test_string[cur_pos] = (char)(0xF0 | c >> 18);
+ test_string[cur_pos + 1] = (char)(0x80 | (c >> 12 & 0x3F));
+ test_string[cur_pos + 2] = (char)(0x80 | (c >> 6 & 0x3F));
+ test_string[cur_pos + 3] = (char)(0x80 | (c & 0x3F));
+ cur_pos += 4;
+ }
+
+ return cur_pos;
+}
+
+/* dump_string
+ * Print a string both as text (which will look like garbage) and as hex.
+ * The text display is not guaranteed to be accurate--certain characters
+ * could confuse printf (e.g., '\n'). */
+void
+dump_string(const char *string)
+{
+ size_t length;
+ size_t x;
+
+ HDprintf("The string was:\n %s", string);
+ HDprintf("Or in hex:\n");
+
+ length = HDstrlen(string);
+
+ for (x = 0; x < length; x++)
+ HDprintf("%x ", string[x] & (0x000000FF));
+
+ HDprintf("\n");
+}
+
+/* Main test.
+ * Create a string of random Unicode characters, then run each test with
+ * that string.
+ */
+void
+test_unicode(void)
+{
+ char test_string[MAX_STRING_LENGTH];
+ unsigned int cur_pos = 0; /* Current position in test_string */
+ unsigned int unicode_point; /* Unicode code point for a single character */
+ hid_t fid; /* ID of file */
+ int x; /* Temporary variable */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing UTF-8 Encoding\n"));
+
+ /* Create a random string with length NUM_CHARS */
+ HDsrandom((unsigned)HDtime(NULL));
+
+ HDmemset(test_string, 0, sizeof(test_string));
+ for (x = 0; x < NUM_CHARS; x++) {
+ /* We need to avoid unprintable characters (codes 0-31) and the
+ * . and / characters, since they aren't allowed in path names.
+ */
+ unicode_point = (unsigned)(HDrandom() % (MAX_CODE_POINT - 32)) + 32;
+ if (unicode_point != 46 && unicode_point != 47)
+ cur_pos = write_char(unicode_point, test_string, cur_pos);
+ }
+
+ /* Avoid unlikely case of the null string */
+ if (cur_pos == 0) {
+ test_string[cur_pos] = 'Q';
+ cur_pos++;
+ }
+ test_string[cur_pos] = '\0';
+
+ /* Create file */
+ fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fcreate");
+
+ test_fl_string(fid, test_string);
+ test_strpad(fid, "abcdefgh");
+ test_strpad(fid, test_string);
+ test_vl_string(fid, test_string);
+ test_objnames(fid, test_string);
+ test_attrname(fid, test_string);
+ test_compound(fid, test_string);
+ test_enum(fid, test_string);
+ test_opaque(fid, test_string);
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* This function could be useful in debugging if certain strings
+ * create errors.
+ */
+#ifdef DEBUG
+ dump_string(test_string);
+#endif /* DEBUG */
+}
+
+/* cleanup_unicode(void)
+ * Delete the file this test created.
+ */
+void
+cleanup_unicode(void)
+{
+ H5Fdelete(FILENAME, H5P_DEFAULT);
+}
diff --git a/test/API/tvlstr.c b/test/API/tvlstr.c
new file mode 100644
index 0000000..b05ff66
--- /dev/null
+++ b/test/API/tvlstr.c
@@ -0,0 +1,1013 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/***********************************************************
+ *
+ * Test program: tvlstr
+ *
+ * Test the Variable-Length String functionality
+ *
+ *************************************************************/
+
+#include "testhdf5.h"
+
+#define DATAFILE "tvlstr.h5"
+#define DATAFILE2 "tvlstr2.h5"
+#define DATAFILE3 "sel2el.h5"
+
+#define DATASET "1Darray"
+
+/* 1-D dataset with fixed dimensions */
+#define SPACE1_RANK 1
+#define SPACE1_DIM1 4
+#define NUMP 4
+
+#define VLSTR_TYPE "vl_string_type"
+
+/* Definitions for the VL re-writing test */
+#define REWRITE_NDATASETS 32
+
+/* String for testing attributes */
+static const char *string_att = "This is the string for the attribute";
+static char *string_att_write = NULL;
+
+void *test_vlstr_alloc_custom(size_t size, void *info);
+void test_vlstr_free_custom(void *mem, void *info);
+
+/****************************************************************
+**
+** test_vlstr_alloc_custom(): Test VL datatype custom memory
+** allocation routines. This routine just uses malloc to
+** allocate the memory and increments the amount of memory
+** allocated.
+**
+****************************************************************/
+void *
+test_vlstr_alloc_custom(size_t size, void *info)
+{
+ void *ret_value = NULL; /* Pointer to return */
+ size_t *mem_used = (size_t *)info; /* Get the pointer to the memory used */
+ size_t extra; /* Extra space needed */
+
+ /*
+ * This weird contortion is required on the DEC Alpha to keep the
+ * alignment correct - QAK
+ */
+ extra = MAX(sizeof(void *), sizeof(size_t));
+
+ if ((ret_value = HDmalloc(extra + size)) != NULL) {
+ *(size_t *)ret_value = size;
+ *mem_used += size;
+ } /* end if */
+ ret_value = ((unsigned char *)ret_value) + extra;
+ return (ret_value);
+}
+
+/****************************************************************
+**
+** test_vlstr_free_custom(): Test VL datatype custom memory
+** allocation routines. This routine just uses free to
+** release the memory and decrements the amount of memory
+** allocated.
+**
+****************************************************************/
+void
+test_vlstr_free_custom(void *_mem, void *info)
+{
+ unsigned char *mem;
+ size_t *mem_used = (size_t *)info; /* Get the pointer to the memory used */
+ size_t extra; /* Extra space needed */
+
+ /*
+ * This weird contortion is required on the DEC Alpha to keep the
+ * alignment correct - QAK
+ */
+ extra = MAX(sizeof(void *), sizeof(size_t));
+
+ if (_mem != NULL) {
+ mem = ((unsigned char *)_mem) - extra;
+ *mem_used -= *(size_t *)((void *)mem);
+ HDfree(mem);
+ } /* end if */
+}
+
+/****************************************************************
+**
+** test_vlstrings_basic(): Test basic VL string code.
+** Tests simple VL string I/O
+**
+****************************************************************/
+static void
+test_vlstrings_basic(void)
+{
+ /* Information to write */
+ const char *wdata[SPACE1_DIM1] = {
+ "Four score and seven years ago our forefathers brought forth on this continent a new nation,",
+ "conceived in liberty and dedicated to the proposition that all men are created equal.",
+ "Now we are engaged in a great civil war,",
+ "testing whether that nation or any nation so conceived and so dedicated can long endure."};
+
+ char *rdata[SPACE1_DIM1]; /* Information read in */
+ char *wdata2;
+ hid_t dataspace, dataset2;
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1; /* Datatype ID */
+ hid_t xfer_pid; /* Dataset transfer property list ID */
+ hsize_t dims1[] = {SPACE1_DIM1};
+ hsize_t size; /* Number of bytes which will be used */
+ unsigned i; /* counting variable */
+ size_t str_used; /* String data in memory */
+ size_t mem_used = 0; /* Memory used during allocation */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Basic VL String Functionality\n"));
+
+ /* Create file */
+ fid1 = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create a datatype to refer to */
+ tid1 = H5Tcopy(H5T_C_S1);
+ CHECK(tid1, FAIL, "H5Tcopy");
+
+ ret = H5Tset_size(tid1, H5T_VARIABLE);
+ CHECK(ret, FAIL, "H5Tset_size");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, "Dataset1", tid1, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write dataset to disk */
+ ret = H5Dwrite(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ dataspace = H5Screate(H5S_SCALAR);
+
+ dataset2 = H5Dcreate2(fid1, "Dataset2", tid1, dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ wdata2 = (char *)HDcalloc((size_t)65534, sizeof(char));
+ HDmemset(wdata2, 'A', (size_t)65533);
+
+ ret = H5Dwrite(dataset2, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, &wdata2);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ H5Sclose(dataspace);
+ H5Dclose(dataset2);
+ HDfree(wdata2);
+
+ /* Change to the custom memory allocation routines for reading VL string */
+ xfer_pid = H5Pcreate(H5P_DATASET_XFER);
+ CHECK(xfer_pid, FAIL, "H5Pcreate");
+
+ ret = H5Pset_vlen_mem_manager(xfer_pid, test_vlstr_alloc_custom, &mem_used, test_vlstr_free_custom,
+ &mem_used);
+ CHECK(ret, FAIL, "H5Pset_vlen_mem_manager");
+
+ /* Make certain the correct amount of memory will be used */
+ ret = H5Dvlen_get_buf_size(dataset, tid1, sid1, &size);
+ CHECK(ret, FAIL, "H5Dvlen_get_buf_size");
+
+ /* Count the actual number of bytes used by the strings */
+ for (i = 0, str_used = 0; i < SPACE1_DIM1; i++)
+ str_used += HDstrlen(wdata[i]) + 1;
+
+ /* Compare against the strings actually written */
+ VERIFY(size, (hsize_t)str_used, "H5Dvlen_get_buf_size");
+
+ /* Read dataset from disk */
+ ret = H5Dread(dataset, tid1, H5S_ALL, H5S_ALL, xfer_pid, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Make certain the correct amount of memory has been used */
+ VERIFY(mem_used, str_used, "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ if (HDstrlen(wdata[i]) != HDstrlen(rdata[i])) {
+ TestErrPrintf("VL data length don't match!, strlen(wdata[%d])=%d, strlen(rdata[%d])=%d\n", (int)i,
+ (int)HDstrlen(wdata[i]), (int)i, (int)HDstrlen(rdata[i]));
+ continue;
+ } /* end if */
+ if (HDstrcmp(wdata[i], rdata[i]) != 0) {
+ TestErrPrintf("VL data values don't match!, wdata[%d]=%s, rdata[%d]=%s\n", (int)i, wdata[i],
+ (int)i, rdata[i]);
+ continue;
+ } /* end if */
+ } /* end for */
+
+ /* Reclaim the read VL data */
+ ret = H5Treclaim(tid1, sid1, xfer_pid, rdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Make certain the VL memory has been freed */
+ VERIFY(mem_used, 0, "H5Treclaim");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close dataset transfer property list */
+ ret = H5Pclose(xfer_pid);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* end test_vlstrings_basic() */
+
+/****************************************************************
+**
+** test_vlstrings_special(): Test VL string code for special
+** string cases, nil and zero-sized.
+**
+****************************************************************/
+static void
+test_vlstrings_special(void)
+{
+ const char *wdata[SPACE1_DIM1] = {"", "two", "three", "\0"};
+ const char *wdata2[SPACE1_DIM1] = {NULL, NULL, NULL, NULL};
+ char *rdata[SPACE1_DIM1]; /* Information read in */
+ char *fill; /* Fill value */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1; /* Datatype ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ hsize_t dims1[] = {SPACE1_DIM1};
+ unsigned i; /* counting variable */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Special VL Strings\n"));
+
+ /* Create file */
+ fid1 = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create a datatype to refer to */
+ tid1 = H5Tcopy(H5T_C_S1);
+ CHECK(tid1, FAIL, "H5Tcopy");
+
+ ret = H5Tset_size(tid1, H5T_VARIABLE);
+ CHECK(ret, FAIL, "H5Tset_size");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, "Dataset3", tid1, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Read from dataset before writing data */
+ ret = H5Dread(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check data read in */
+ for (i = 0; i < SPACE1_DIM1; i++)
+ if (rdata[i] != NULL)
+ TestErrPrintf("VL doesn't match!, rdata[%d]=%s\n", (int)i, rdata[i]);
+
+ /* Write dataset to disk */
+ ret = H5Dwrite(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Read dataset from disk */
+ ret = H5Dread(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ if (HDstrlen(wdata[i]) != HDstrlen(rdata[i])) {
+ TestErrPrintf("VL data length don't match!, strlen(wdata[%d])=%d, strlen(rdata[%d])=%d\n", (int)i,
+ (int)HDstrlen(wdata[i]), (int)i, (int)HDstrlen(rdata[i]));
+ continue;
+ } /* end if */
+ if ((wdata[i] == NULL && rdata[i] != NULL) || (rdata[i] == NULL && wdata[i] != NULL)) {
+ TestErrPrintf("VL data values don't match!\n");
+ continue;
+ } /* end if */
+ if (HDstrcmp(wdata[i], rdata[i]) != 0) {
+ TestErrPrintf("VL data values don't match!, wdata[%d]=%s, rdata[%d]=%s\n", (int)i, wdata[i],
+ (int)i, rdata[i]);
+ continue;
+ } /* end if */
+ } /* end for */
+
+ /* Reclaim the read VL data */
+ ret = H5Treclaim(tid1, sid1, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Create another dataset to test nil strings */
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+
+ /* Set the fill value for the second dataset */
+ fill = NULL;
+ ret = H5Pset_fill_value(dcpl, tid1, &fill);
+ CHECK(ret, FAIL, "H5Pset_fill_value");
+
+ dataset = H5Dcreate2(fid1, "Dataset4", tid1, sid1, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Close dataset creation property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Read from dataset before writing data */
+ ret = H5Dread(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check data read in */
+ for (i = 0; i < SPACE1_DIM1; i++)
+ if (rdata[i] != NULL)
+ TestErrPrintf("VL doesn't match!, rdata[%d]=%s\n", (int)i, rdata[i]);
+
+ /* Try to write nil strings to disk. */
+ ret = H5Dwrite(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata2);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Read nil strings back from disk */
+ ret = H5Dread(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check data read in */
+ for (i = 0; i < SPACE1_DIM1; i++)
+ if (rdata[i] != NULL)
+ TestErrPrintf("VL doesn't match!, rdata[%d]=%s\n", (int)i, rdata[i]);
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+}
+
+/****************************************************************
+**
+** test_vlstring_type(): Test VL string type.
+** Tests if VL string is treated as string.
+**
+****************************************************************/
+static void
+test_vlstring_type(void)
+{
+ hid_t fid; /* HDF5 File IDs */
+ hid_t tid_vlstr;
+ H5T_cset_t cset;
+ H5T_str_t pad;
+ htri_t vl_str; /* Whether string is VL */
+ herr_t ret;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing VL String type\n"));
+
+ /* Open file */
+ fid = H5Fopen(DATAFILE, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Create a datatype to refer to */
+ tid_vlstr = H5Tcopy(H5T_C_S1);
+ CHECK(tid_vlstr, FAIL, "H5Tcopy");
+
+ /* Change padding and verify it */
+ ret = H5Tset_strpad(tid_vlstr, H5T_STR_NULLPAD);
+ CHECK(ret, FAIL, "H5Tset_strpad");
+ pad = H5Tget_strpad(tid_vlstr);
+ VERIFY(pad, H5T_STR_NULLPAD, "H5Tget_strpad");
+
+ /* Convert to variable-length string */
+ ret = H5Tset_size(tid_vlstr, H5T_VARIABLE);
+ CHECK(ret, FAIL, "H5Tset_size");
+
+ /* Check if datatype is VL string */
+ ret = H5Tget_class(tid_vlstr);
+ VERIFY(ret, H5T_STRING, "H5Tget_class");
+ ret = H5Tis_variable_str(tid_vlstr);
+ VERIFY(ret, TRUE, "H5Tis_variable_str");
+
+ /* Verify that the class detects as a string */
+ vl_str = H5Tdetect_class(tid_vlstr, H5T_STRING);
+ CHECK(vl_str, FAIL, "H5Tdetect_class");
+ VERIFY(vl_str, TRUE, "H5Tdetect_class");
+
+ /* Check default character set and padding */
+ cset = H5Tget_cset(tid_vlstr);
+ VERIFY(cset, H5T_CSET_ASCII, "H5Tget_cset");
+ pad = H5Tget_strpad(tid_vlstr);
+ VERIFY(pad, H5T_STR_NULLPAD, "H5Tget_strpad");
+
+ /* Commit variable-length string datatype to storage */
+ ret = H5Tcommit2(fid, VLSTR_TYPE, tid_vlstr, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Tcommit2");
+
+ /* Close datatype */
+ ret = H5Tclose(tid_vlstr);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ tid_vlstr = H5Topen2(fid, VLSTR_TYPE, H5P_DEFAULT);
+ CHECK(tid_vlstr, FAIL, "H5Topen2");
+
+ ret = H5Tclose(tid_vlstr);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ fid = H5Fopen(DATAFILE, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid, FAIL, "H5Fopen");
+
+ /* Open the variable-length string datatype just created */
+ tid_vlstr = H5Topen2(fid, VLSTR_TYPE, H5P_DEFAULT);
+ CHECK(tid_vlstr, FAIL, "H5Topen2");
+
+ /* Verify character set and padding */
+ cset = H5Tget_cset(tid_vlstr);
+ VERIFY(cset, H5T_CSET_ASCII, "H5Tget_cset");
+ pad = H5Tget_strpad(tid_vlstr);
+ VERIFY(pad, H5T_STR_NULLPAD, "H5Tget_strpad");
+
+ /* Close datatype and file */
+ ret = H5Tclose(tid_vlstr);
+ CHECK(ret, FAIL, "H5Tclose");
+ ret = H5Fclose(fid);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* end test_vlstring_type() */
+
+/****************************************************************
+**
+** test_compact_vlstring(): Test code for storing VL strings in
+** compact datasets.
+**
+****************************************************************/
+static void
+test_compact_vlstring(void)
+{
+ const char *wdata[SPACE1_DIM1] = {"one", "two", "three", "four"};
+ char *rdata[SPACE1_DIM1]; /* Information read in */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1; /* Datatype ID */
+ hid_t plist; /* Dataset creation property list */
+ hsize_t dims1[] = {SPACE1_DIM1};
+ unsigned i; /* counting variable */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing VL Strings in compact dataset\n"));
+
+ /* Create file */
+ fid1 = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create a datatype to refer to */
+ tid1 = H5Tcopy(H5T_C_S1);
+ CHECK(tid1, FAIL, "H5Tcopy");
+
+ ret = H5Tset_size(tid1, H5T_VARIABLE);
+ CHECK(ret, FAIL, "H5Tset_size");
+
+ plist = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(plist, FAIL, "H5Pcreate");
+
+ ret = H5Pset_layout(plist, H5D_COMPACT);
+ CHECK(ret, FAIL, "H5Pset_layout");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, "Dataset5", tid1, sid1, H5P_DEFAULT, plist, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write dataset to disk */
+ ret = H5Dwrite(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Read dataset from disk */
+ ret = H5Dread(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ if (HDstrlen(wdata[i]) != HDstrlen(rdata[i])) {
+ TestErrPrintf("VL data length don't match!, strlen(wdata[%d])=%d, strlen(rdata[%d])=%d\n", (int)i,
+ (int)HDstrlen(wdata[i]), (int)i, (int)HDstrlen(rdata[i]));
+ continue;
+ } /* end if */
+ if (HDstrcmp(wdata[i], rdata[i]) != 0) {
+ TestErrPrintf("VL data values don't match!, wdata[%d]=%s, rdata[%d]=%s\n", (int)i, wdata[i],
+ (int)i, rdata[i]);
+ continue;
+ } /* end if */
+ } /* end for */
+
+ /* Reclaim the read VL data */
+ ret = H5Treclaim(tid1, sid1, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close dataset create property list */
+ ret = H5Pclose(plist);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+} /*test_compact_vlstrings*/
+
+/****************************************************************
+**
+** test_write_vl_string_attribute(): Test basic VL string code.
+** Tests writing VL strings as attributes
+**
+****************************************************************/
+static void
+test_write_vl_string_attribute(void)
+{
+ hid_t file, root, dataspace, att;
+ hid_t type;
+ herr_t ret;
+ char *string_att_check = NULL;
+
+ /* Open the file */
+ file = H5Fopen(DATAFILE, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(file, FAIL, "H5Fopen");
+
+ /* Create a datatype to refer to. */
+ type = H5Tcopy(H5T_C_S1);
+ CHECK(type, FAIL, "H5Tcopy");
+
+ ret = H5Tset_size(type, H5T_VARIABLE);
+ CHECK(ret, FAIL, "H5Tset_size");
+
+ root = H5Gopen2(file, "/", H5P_DEFAULT);
+ CHECK(root, FAIL, "H5Gopen2");
+
+ dataspace = H5Screate(H5S_SCALAR);
+ CHECK(dataspace, FAIL, "H5Screate");
+
+ /* Test creating a "normal" sized string attribute */
+ att = H5Acreate2(root, "test_scalar", type, dataspace, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(att, FAIL, "H5Acreate2");
+
+ ret = H5Awrite(att, type, &string_att);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ ret = H5Aread(att, type, &string_att_check);
+ CHECK(ret, FAIL, "H5Aread");
+
+ if (HDstrcmp(string_att_check, string_att) != 0)
+ TestErrPrintf("VL string attributes don't match!, string_att=%s, string_att_check=%s\n", string_att,
+ string_att_check);
+
+ H5free_memory(string_att_check);
+ string_att_check = NULL;
+
+ ret = H5Aclose(att);
+ CHECK(ret, FAIL, "HAclose");
+
+ /* Test creating a "large" sized string attribute */
+ att = H5Acreate2(root, "test_scalar_large", type, dataspace, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(att, FAIL, "H5Acreate2");
+
+ string_att_write = (char *)HDcalloc((size_t)8192, sizeof(char));
+ HDmemset(string_att_write, 'A', (size_t)8191);
+
+ ret = H5Awrite(att, type, &string_att_write);
+ CHECK(ret, FAIL, "H5Awrite");
+
+ ret = H5Aread(att, type, &string_att_check);
+ CHECK(ret, FAIL, "H5Aread");
+
+ if (HDstrcmp(string_att_check, string_att_write) != 0)
+ TestErrPrintf("VL string attributes don't match!, string_att_write=%s, string_att_check=%s\n",
+ string_att_write, string_att_check);
+
+ H5free_memory(string_att_check);
+ string_att_check = NULL;
+
+ /* The attribute string written is freed below, in the test_read_vl_string_attribute() test */
+ /* HDfree(string_att_write); */
+
+ ret = H5Aclose(att);
+ CHECK(ret, FAIL, "HAclose");
+
+ ret = H5Gclose(root);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Tclose(type);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ ret = H5Sclose(dataspace);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+}
+
+/****************************************************************
+**
+** test_read_vl_string_attribute(): Test basic VL string code.
+** Tests reading VL strings from attributes
+**
+****************************************************************/
+static void
+test_read_vl_string_attribute(void)
+{
+ hid_t file, root, att;
+ hid_t type;
+ herr_t ret;
+ char *string_att_check = NULL;
+
+ /* Open file */
+ file = H5Fopen(DATAFILE, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(file, FAIL, "H5Fopen");
+
+ /* Create a datatype to refer to. */
+ type = H5Tcopy(H5T_C_S1);
+ CHECK(type, FAIL, "H5Tcopy");
+
+ ret = H5Tset_size(type, H5T_VARIABLE);
+ CHECK(ret, FAIL, "H5Tset_size");
+
+ root = H5Gopen2(file, "/", H5P_DEFAULT);
+ CHECK(root, FAIL, "H5Gopen2");
+
+ /* Test reading "normal" sized string attribute */
+ att = H5Aopen(root, "test_scalar", H5P_DEFAULT);
+ CHECK(att, FAIL, "H5Aopen");
+
+ ret = H5Aread(att, type, &string_att_check);
+ CHECK(ret, FAIL, "H5Aread");
+
+ if (HDstrcmp(string_att_check, string_att) != 0)
+ TestErrPrintf("VL string attributes don't match!, string_att=%s, string_att_check=%s\n", string_att,
+ string_att_check);
+
+ H5free_memory(string_att_check);
+ string_att_check = NULL;
+
+ ret = H5Aclose(att);
+ CHECK(ret, FAIL, "HAclose");
+
+ /* Test reading "large" sized string attribute */
+ att = H5Aopen(root, "test_scalar_large", H5P_DEFAULT);
+ CHECK(att, FAIL, "H5Aopen");
+
+ if (string_att_write) {
+ ret = H5Aread(att, type, &string_att_check);
+ CHECK(ret, FAIL, "H5Aread");
+
+ if (HDstrcmp(string_att_check, string_att_write) != 0)
+ TestErrPrintf("VL string attributes don't match!, string_att_write=%s, string_att_check=%s\n",
+ string_att_write, string_att_check);
+
+ H5free_memory(string_att_check);
+ string_att_check = NULL;
+ }
+
+ /* Free string allocated in test_write_vl_string_attribute */
+ if (string_att_write)
+ HDfree(string_att_write);
+
+ ret = H5Aclose(att);
+ CHECK(ret, FAIL, "HAclose");
+
+ ret = H5Tclose(type);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ ret = H5Gclose(root);
+ CHECK(ret, FAIL, "H5Gclose");
+
+ ret = H5Fclose(file);
+ CHECK(ret, FAIL, "H5Fclose");
+}
+
+/* Helper routine for test_vl_rewrite() */
+static void
+write_scalar_dset(hid_t file, hid_t type, hid_t space, char *name, char *data)
+{
+ hid_t dset;
+ herr_t ret;
+
+ dset = H5Dcreate2(file, name, type, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dset, FAIL, "H5Dcreate2");
+
+ ret = H5Dwrite(dset, type, space, space, H5P_DEFAULT, &data);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ ret = H5Dclose(dset);
+ CHECK(ret, FAIL, "H5Dclose");
+}
+
+/* Helper routine for test_vl_rewrite() */
+static void
+read_scalar_dset(hid_t file, hid_t type, hid_t space, char *name, char *data)
+{
+ hid_t dset;
+ herr_t ret;
+ char *data_read;
+
+ dset = H5Dopen2(file, name, H5P_DEFAULT);
+ CHECK(dset, FAIL, "H5Dopen2");
+
+ ret = H5Dread(dset, type, space, space, H5P_DEFAULT, &data_read);
+ CHECK(ret, FAIL, "H5Dread");
+
+ ret = H5Dclose(dset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ if (HDstrcmp(data, data_read) != 0)
+ TestErrPrintf("Expected %s for dataset %s but read %s\n", data, name, data_read);
+
+ ret = H5Treclaim(type, space, H5P_DEFAULT, &data_read);
+ CHECK(ret, FAIL, "H5Treclaim");
+}
+
+/****************************************************************
+**
+** test_vl_rewrite(): Test basic VL string code.
+** Tests I/O on VL strings when lots of objects in the file
+** have been linked/unlinked.
+**
+****************************************************************/
+static void
+test_vl_rewrite(void)
+{
+ hid_t file1, file2; /* File IDs */
+ hid_t type; /* VL string datatype ID */
+ hid_t space; /* Scalar dataspace */
+ char name[256]; /* Buffer for names & data */
+ int i; /* Local index variable */
+ herr_t ret; /* Generic return value */
+
+ /* Create the VL string datatype */
+ type = H5Tcopy(H5T_C_S1);
+ CHECK(type, FAIL, "H5Tcopy");
+
+ ret = H5Tset_size(type, H5T_VARIABLE);
+ CHECK(ret, FAIL, "H5Tset_size");
+
+ /* Create the scalar dataspace */
+ space = H5Screate(H5S_SCALAR);
+ CHECK(space, FAIL, "H5Screate");
+
+ /* Open the files */
+ file1 = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file1, FAIL, "H5Fcreate");
+
+ file2 = H5Fcreate(DATAFILE2, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file1, FAIL, "H5Fcreate");
+
+ /* Create in file 1 */
+ for (i = 0; i < REWRITE_NDATASETS; i++) {
+ HDsnprintf(name, sizeof(name), "/set_%d", i);
+ write_scalar_dset(file1, type, space, name, name);
+ }
+
+ /* Effectively copy data from file 1 to 2 */
+ for (i = 0; i < REWRITE_NDATASETS; i++) {
+ HDsnprintf(name, sizeof(name), "/set_%d", i);
+ read_scalar_dset(file1, type, space, name, name);
+ write_scalar_dset(file2, type, space, name, name);
+ }
+
+ /* Read back from file 2 */
+ for (i = 0; i < REWRITE_NDATASETS; i++) {
+ HDsnprintf(name, sizeof(name), "/set_%d", i);
+ read_scalar_dset(file2, type, space, name, name);
+ } /* end for */
+
+ /* Remove from file 2. */
+ for (i = 0; i < REWRITE_NDATASETS; i++) {
+ HDsnprintf(name, sizeof(name), "/set_%d", i);
+ ret = H5Ldelete(file2, name, H5P_DEFAULT);
+ CHECK(ret, FAIL, "H5Ldelete");
+ } /* end for */
+
+ /* Effectively copy from file 1 to file 2 */
+ for (i = 0; i < REWRITE_NDATASETS; i++) {
+ HDsnprintf(name, sizeof(name), "/set_%d", i);
+ read_scalar_dset(file1, type, space, name, name);
+ write_scalar_dset(file2, type, space, name, name);
+ } /* end for */
+
+ /* Close everything */
+ ret = H5Tclose(type);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ ret = H5Sclose(space);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Fclose(file1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ ret = H5Fclose(file2);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_vl_rewrite() */
+
+/****************************************************************
+ **
+ ** test_write_same_element():
+ ** Tests writing to the same element of VL string using
+ ** H5Sselect_element.
+ **
+ ****************************************************************/
+static void
+test_write_same_element(void)
+{
+#ifndef NO_WRITE_SAME_ELEMENT_TWICE
+ hid_t file1, dataset1;
+ hid_t mspace, fspace, dtype;
+ hsize_t fdim[] = {SPACE1_DIM1};
+ const char *wdata[SPACE1_DIM1] = {"Parting", "is such a", "sweet", "sorrow."};
+ const char *val[SPACE1_DIM1] = {"But", "reuniting", "is a", "great joy"};
+ hsize_t marray[] = {NUMP};
+ hsize_t coord[SPACE1_RANK][NUMP];
+ herr_t ret;
+#endif
+
+ MESSAGE(
+ 5,
+ ("Testing writing to same element of VL string dataset twice - SKIPPED for now due to no support\n"));
+#ifndef NO_WRITE_SAME_ELEMENT_TWICE
+ file1 = H5Fcreate(DATAFILE3, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file1, FAIL, "H5Fcreate");
+
+ dtype = H5Tcopy(H5T_C_S1);
+ CHECK(dtype, FAIL, "H5Tcopy");
+
+ ret = H5Tset_size(dtype, H5T_VARIABLE);
+ CHECK(ret, FAIL, "H5Tset_size");
+
+ fspace = H5Screate_simple(SPACE1_RANK, fdim, NULL);
+ CHECK(fspace, FAIL, "H5Screate_simple");
+
+ dataset1 = H5Dcreate2(file1, DATASET, dtype, fspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset1, FAIL, "H5Dcreate");
+
+ ret = H5Dwrite(dataset1, dtype, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ ret = H5Dclose(dataset1);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Tclose(dtype);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ ret = H5Sclose(fspace);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Fclose(file1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /*
+ * Open the file. Select the same points, write values to those point locations.
+ */
+ file1 = H5Fopen(DATAFILE3, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(file1, FAIL, "H5Fopen");
+
+ dataset1 = H5Dopen2(file1, DATASET, H5P_DEFAULT);
+ CHECK(dataset1, FAIL, "H5Dopen");
+
+ fspace = H5Dget_space(dataset1);
+ CHECK(fspace, FAIL, "H5Dget_space");
+
+ dtype = H5Dget_type(dataset1);
+ CHECK(dtype, FAIL, "H5Dget_type");
+
+ mspace = H5Screate_simple(1, marray, NULL);
+ CHECK(mspace, FAIL, "H5Screate_simple");
+
+ coord[0][0] = 0;
+ coord[0][1] = 2;
+ coord[0][2] = 2;
+ coord[0][3] = 0;
+
+ ret = H5Sselect_elements(fspace, H5S_SELECT_SET, NUMP, (const hsize_t *)&coord);
+ CHECK(ret, FAIL, "H5Sselect_elements");
+
+ ret = H5Dwrite(dataset1, dtype, mspace, fspace, H5P_DEFAULT, val);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ ret = H5Tclose(dtype);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ ret = H5Dclose(dataset1);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Sclose(fspace);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ ret = H5Sclose(mspace);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Fclose(file1);
+ CHECK(ret, FAIL, "H5Fclose");
+#endif
+} /* test_write_same_element */
+
+/****************************************************************
+**
+** test_vlstrings(): Main VL string testing routine.
+**
+****************************************************************/
+void
+test_vlstrings(void)
+{
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Variable-Length Strings\n"));
+
+ /* These tests use the same file */
+ /* Test basic VL string datatype */
+ test_vlstrings_basic();
+ test_vlstrings_special();
+ test_vlstring_type();
+ test_compact_vlstring();
+
+ /* Test using VL strings in attributes */
+ test_write_vl_string_attribute();
+ test_read_vl_string_attribute();
+
+ /* Test writing VL datasets in files with lots of unlinking */
+ test_vl_rewrite();
+ /* Test writing to the same element more than once using H5Sselect_elements */
+ test_write_same_element();
+} /* test_vlstrings() */
+
+/*-------------------------------------------------------------------------
+ * Function: cleanup_vlstrings
+ *
+ * Purpose: Cleanup temporary test files
+ *
+ * Return: none
+ *
+ * Programmer: Quincey Koziol
+ * September 10, 1999
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+cleanup_vlstrings(void)
+{
+ H5Fdelete(DATAFILE, H5P_DEFAULT);
+ H5Fdelete(DATAFILE2, H5P_DEFAULT);
+ H5Fdelete(DATAFILE3, H5P_DEFAULT);
+}
diff --git a/test/API/tvltypes.c b/test/API/tvltypes.c
new file mode 100644
index 0000000..eca534b
--- /dev/null
+++ b/test/API/tvltypes.c
@@ -0,0 +1,3268 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/***********************************************************
+ *
+ * Test program: tvltypes
+ *
+ * Test the Variable-Length Datatype functionality
+ *
+ *************************************************************/
+
+#include "testhdf5.h"
+
+/* #include "H5Dprivate.h" */
+
+#define FILENAME "tvltypes.h5"
+
+/* 1-D dataset with fixed dimensions */
+#define SPACE1_RANK 1
+#define SPACE1_DIM1 4
+
+/* 1-D dataset with fixed dimensions */
+#define SPACE3_RANK 1
+#define SPACE3_DIM1 128
+#define L1_INCM 16
+#define L2_INCM 8
+#define L3_INCM 3
+
+/* Default temporary buffer size - Pulled from H5Dprivate.h */
+#define H5D_TEMP_BUF_SIZE (1024 * 1024)
+
+/* 1-D dataset with fixed dimensions */
+#define SPACE4_RANK 1
+#define SPACE4_DIM_SMALL 128
+#define SPACE4_DIM_LARGE (H5D_TEMP_BUF_SIZE / 64)
+
+void *test_vltypes_alloc_custom(size_t size, void *info);
+void test_vltypes_free_custom(void *mem, void *info);
+
+/****************************************************************
+**
+** test_vltypes_alloc_custom(): Test VL datatype custom memory
+** allocation routines. This routine just uses malloc to
+** allocate the memory and increments the amount of memory
+** allocated.
+**
+****************************************************************/
+void *
+test_vltypes_alloc_custom(size_t size, void *mem_used)
+{
+ void *ret_value; /* Pointer to return */
+ const size_t extra = MAX(sizeof(void *), sizeof(size_t)); /* Extra space needed */
+ /* (This weird contortion is required on the
+ * DEC Alpha to keep the alignment correct - QAK)
+ */
+
+ if ((ret_value = HDmalloc(extra + size)) != NULL) {
+ *(size_t *)ret_value = size;
+ *(size_t *)mem_used += size;
+ } /* end if */
+
+ ret_value = ((unsigned char *)ret_value) + extra;
+
+ return (ret_value);
+}
+
+/****************************************************************
+**
+** test_vltypes_free_custom(): Test VL datatype custom memory
+** allocation routines. This routine just uses free to
+** release the memory and decrements the amount of memory
+** allocated.
+**
+****************************************************************/
+void
+test_vltypes_free_custom(void *_mem, void *mem_used)
+{
+ if (_mem) {
+ const size_t extra = MAX(sizeof(void *), sizeof(size_t)); /* Extra space needed */
+ /* (This weird contortion is required
+ * on the DEC Alpha to keep the
+ * alignment correct - QAK)
+ */
+ unsigned char *mem = ((unsigned char *)_mem) - extra; /* Pointer to actual block allocated */
+
+ *(size_t *)mem_used -= *(size_t *)((void *)mem);
+ HDfree(mem);
+ } /* end if */
+}
+
+/****************************************************************
+**
+** test_vltypes_data_create(): Dataset of VL is supposed to
+** fail when fill value is never written to dataset.
+**
+****************************************************************/
+static void
+test_vltypes_dataset_create(void)
+{
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dcpl; /* Dataset Property list */
+ hid_t dataset; /* Dataset ID */
+ hsize_t dims1[] = {SPACE1_DIM1};
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1; /* Datatype ID */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Dataset of VL Datatype Functionality\n"));
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create a datatype to refer to */
+ tid1 = H5Tvlen_create(H5T_NATIVE_UINT);
+ CHECK(tid1, FAIL, "H5Tvlen_create");
+
+ /* Create dataset property list */
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl, FAIL, "H5Pcreate");
+
+ /* Set fill value writing time to be NEVER */
+ ret = H5Pset_fill_time(dcpl, H5D_FILL_TIME_NEVER);
+ CHECK(ret, FAIL, "H5Pset_fill_time");
+
+ /* Create a dataset, supposed to fail */
+ H5E_BEGIN_TRY
+ {
+ dataset = H5Dcreate2(fid1, "Dataset1", tid1, sid1, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ }
+ H5E_END_TRY;
+ VERIFY(dataset, FAIL, "H5Dcreate2");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close dataset transfer property list */
+ ret = H5Pclose(dcpl);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+}
+
+/****************************************************************
+**
+** test_vltypes_funcs(): Test some type functions that are and
+** aren't supposed to work with VL type.
+**
+****************************************************************/
+static void
+test_vltypes_funcs(void)
+{
+ hid_t type; /* Datatype ID */
+ size_t size;
+ H5T_pad_t inpad;
+ H5T_norm_t norm;
+ H5T_cset_t cset;
+ H5T_str_t strpad;
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing some type functions for VL\n"));
+
+ /* Create a datatype to refer to */
+ type = H5Tvlen_create(H5T_IEEE_F32BE);
+ CHECK(type, FAIL, "H5Tvlen_create");
+
+ size = H5Tget_precision(type);
+ CHECK(size, 0, "H5Tget_precision");
+
+ size = H5Tget_size(type);
+ CHECK(size, 0, "H5Tget_size");
+
+ size = H5Tget_ebias(type);
+ CHECK(size, 0, "H5Tget_ebias");
+
+ ret = H5Tset_pad(type, H5T_PAD_ZERO, H5T_PAD_ONE);
+ CHECK(ret, FAIL, "H5Tset_pad");
+
+ inpad = H5Tget_inpad(type);
+ CHECK(inpad, FAIL, "H5Tget_inpad");
+
+ norm = H5Tget_norm(type);
+ CHECK(norm, FAIL, "H5Tget_norm");
+
+ ret = H5Tset_offset(type, (size_t)16);
+ CHECK(ret, FAIL, "H5Tset_offset");
+
+ H5E_BEGIN_TRY
+ {
+ cset = H5Tget_cset(type);
+ }
+ H5E_END_TRY;
+ VERIFY(cset, FAIL, "H5Tget_cset");
+
+ H5E_BEGIN_TRY
+ {
+ strpad = H5Tget_strpad(type);
+ }
+ H5E_END_TRY;
+ VERIFY(strpad, FAIL, "H5Tget_strpad");
+
+ /* Close datatype */
+ ret = H5Tclose(type);
+ CHECK(ret, FAIL, "H5Tclose");
+}
+
+/****************************************************************
+**
+** test_vltypes_vlen_atomic(): Test basic VL datatype code.
+** Tests VL datatypes of atomic datatypes
+**
+****************************************************************/
+static void
+test_vltypes_vlen_atomic(void)
+{
+ hvl_t wdata[SPACE1_DIM1]; /* Information to write */
+ hvl_t wdata2[SPACE1_DIM1]; /* Information to write */
+ hvl_t rdata[SPACE1_DIM1]; /* Information read in */
+ hvl_t fill; /* Fill value */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t sid2; /* ID of bad dataspace (no extent set) */
+ hid_t tid1; /* Datatype ID */
+ hid_t dcpl_pid; /* Dataset creation property list ID */
+ hid_t xfer_pid; /* Dataset transfer property list ID */
+ hsize_t dims1[] = {SPACE1_DIM1};
+ hsize_t size; /* Number of bytes which will be used */
+ unsigned i, j; /* counting variables */
+ size_t mem_used = 0; /* Memory used during allocation */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Basic Atomic VL Datatype Functionality\n"));
+
+ /* Allocate and initialize VL data to write */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ wdata[i].p = HDmalloc((i + 1) * sizeof(unsigned int));
+ wdata[i].len = i + 1;
+ for (j = 0; j < (i + 1); j++)
+ ((unsigned int *)wdata[i].p)[j] = i * 10 + j;
+
+ wdata2[i].p = NULL;
+ wdata2[i].len = 0;
+ } /* end for */
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create a datatype to refer to */
+ tid1 = H5Tvlen_create(H5T_NATIVE_UINT);
+ CHECK(tid1, FAIL, "H5Tvlen_create");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, "Dataset1", tid1, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Read from dataset before writing data */
+ ret = H5Dread(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check data read in */
+ for (i = 0; i < SPACE1_DIM1; i++)
+ if (rdata[i].len != 0 || rdata[i].p != NULL)
+ TestErrPrintf("VL doesn't match!, rdata[%d].len=%u, rdata[%d].p=%p\n", (int)i,
+ (unsigned)rdata[i].len, (int)i, rdata[i].p);
+
+ /* Write "nil" data to disk */
+ ret = H5Dwrite(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata2);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Read from dataset with "nil" data */
+ ret = H5Dread(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check data read in */
+ for (i = 0; i < SPACE1_DIM1; i++)
+ if (rdata[i].len != 0 || rdata[i].p != NULL)
+ TestErrPrintf("VL doesn't match!, rdata[%d].len=%u, rdata[%d].p=%p\n", (int)i,
+ (unsigned)rdata[i].len, (int)i, rdata[i].p);
+
+ /* Write dataset to disk */
+ ret = H5Dwrite(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Create second dataset, with fill value */
+ dcpl_pid = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl_pid, FAIL, "H5Pcreate");
+
+ /* Set the fill value for the second dataset */
+ fill.p = NULL;
+ fill.len = 0;
+ ret = H5Pset_fill_value(dcpl_pid, tid1, &fill);
+ CHECK(ret, FAIL, "H5Pset_fill_value");
+
+ /* Create a second dataset */
+ dataset = H5Dcreate2(fid1, "Dataset2", tid1, sid1, H5P_DEFAULT, dcpl_pid, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Close dataset creation property list */
+ ret = H5Pclose(dcpl_pid);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Read from dataset before writing data */
+ ret = H5Dread(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check data read in */
+ for (i = 0; i < SPACE1_DIM1; i++)
+ if (rdata[i].len != 0 || rdata[i].p != NULL)
+ TestErrPrintf("VL doesn't match!, rdata[%d].len=%u, rdata[%d].p=%p\n", (int)i,
+ (unsigned)rdata[i].len, (int)i, rdata[i].p);
+
+ /* Write "nil" data to disk */
+ ret = H5Dwrite(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata2);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Read from dataset with "nil" data */
+ ret = H5Dread(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check data read in */
+ for (i = 0; i < SPACE1_DIM1; i++)
+ if (rdata[i].len != 0 || rdata[i].p != NULL)
+ TestErrPrintf("VL doesn't match!, rdata[%d].len=%u, rdata[%d].p=%p\n", (int)i,
+ (unsigned)rdata[i].len, (int)i, rdata[i].p);
+
+ /* Write data to disk */
+ ret = H5Dwrite(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Open the file for data checking */
+ fid1 = H5Fopen(FILENAME, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Open a dataset */
+ dataset = H5Dopen2(fid1, "Dataset1", H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Get dataspace for datasets */
+ sid1 = H5Dget_space(dataset);
+ CHECK(sid1, FAIL, "H5Dget_space");
+
+ /* Get datatype for dataset */
+ tid1 = H5Dget_type(dataset);
+ CHECK(tid1, FAIL, "H5Dget_type");
+
+ /* Change to the custom memory allocation routines for reading VL data */
+ xfer_pid = H5Pcreate(H5P_DATASET_XFER);
+ CHECK(xfer_pid, FAIL, "H5Pcreate");
+
+ ret = H5Pset_vlen_mem_manager(xfer_pid, test_vltypes_alloc_custom, &mem_used, test_vltypes_free_custom,
+ &mem_used);
+ CHECK(ret, FAIL, "H5Pset_vlen_mem_manager");
+
+ /* Make certain the correct amount of memory will be used */
+ ret = H5Dvlen_get_buf_size(dataset, tid1, sid1, &size);
+ CHECK(ret, FAIL, "H5Dvlen_get_buf_size");
+
+ /* 10 elements allocated = 1 + 2 + 3 + 4 elements for each array position */
+ VERIFY(size, ((SPACE1_DIM1 * (SPACE1_DIM1 + 1)) / 2) * sizeof(unsigned int), "H5Dvlen_get_buf_size");
+
+ /* Read dataset from disk */
+ ret = H5Dread(dataset, tid1, H5S_ALL, H5S_ALL, xfer_pid, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Make certain the correct amount of memory has been used */
+ /* 10 elements allocated = 1 + 2 + 3 + 4 elements for each array position */
+ VERIFY(mem_used, ((SPACE1_DIM1 * (SPACE1_DIM1 + 1)) / 2) * sizeof(unsigned int), "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ if (wdata[i].len != rdata[i].len) {
+ TestErrPrintf("%d: VL data lengths don't match!, wdata[%d].len=%d, rdata[%d].len=%d\n", __LINE__,
+ (int)i, (int)wdata[i].len, (int)i, (int)rdata[i].len);
+ continue;
+ } /* end if */
+ for (j = 0; j < rdata[i].len; j++) {
+ if (((unsigned int *)wdata[i].p)[j] != ((unsigned int *)rdata[i].p)[j]) {
+ TestErrPrintf("VL data values don't match!, wdata[%d].p[%d]=%d, rdata[%d].p[%d]=%d\n", (int)i,
+ (int)j, (int)((unsigned int *)wdata[i].p)[j], (int)i, (int)j,
+ (int)((unsigned int *)rdata[i].p)[j]);
+ continue;
+ } /* end if */
+ } /* end for */
+ } /* end for */
+
+ /* Reclaim the read VL data */
+ ret = H5Treclaim(tid1, sid1, xfer_pid, rdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Make certain the VL memory has been freed */
+ VERIFY(mem_used, 0, "H5Treclaim");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close dataset transfer property list */
+ ret = H5Pclose(xfer_pid);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Open second dataset */
+ dataset = H5Dopen2(fid1, "Dataset2", H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Get dataspace for datasets */
+ sid1 = H5Dget_space(dataset);
+ CHECK(sid1, FAIL, "H5Dget_space");
+
+ /* Get datatype for dataset */
+ tid1 = H5Dget_type(dataset);
+ CHECK(tid1, FAIL, "H5Dget_type");
+
+ /* Create a "bad" dataspace with no extent set */
+ sid2 = H5Screate(H5S_SIMPLE);
+ CHECK(sid2, FAIL, "H5Screate");
+
+ /* Change to the custom memory allocation routines for reading VL data */
+ xfer_pid = H5Pcreate(H5P_DATASET_XFER);
+ CHECK(xfer_pid, FAIL, "H5Pcreate");
+
+ ret = H5Pset_vlen_mem_manager(xfer_pid, test_vltypes_alloc_custom, &mem_used, test_vltypes_free_custom,
+ &mem_used);
+ CHECK(ret, FAIL, "H5Pset_vlen_mem_manager");
+
+ /* Make certain the correct amount of memory will be used */
+ ret = H5Dvlen_get_buf_size(dataset, tid1, sid1, &size);
+ CHECK(ret, FAIL, "H5Dvlen_get_buf_size");
+
+ /* 10 elements allocated = 1 + 2 + 3 + 4 elements for each array position */
+ VERIFY(size, ((SPACE1_DIM1 * (SPACE1_DIM1 + 1)) / 2) * sizeof(unsigned int), "H5Dvlen_get_buf_size");
+
+ /* Try to call H5Dvlen_get_buf with bad dataspace */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Dvlen_get_buf_size(dataset, tid1, sid2, &size);
+ }
+ H5E_END_TRY
+ VERIFY(ret, FAIL, "H5Dvlen_get_buf_size");
+
+ /* Read dataset from disk */
+ ret = H5Dread(dataset, tid1, H5S_ALL, H5S_ALL, xfer_pid, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Make certain the correct amount of memory has been used */
+ /* 10 elements allocated = 1 + 2 + 3 + 4 elements for each array position */
+ VERIFY(mem_used, ((SPACE1_DIM1 * (SPACE1_DIM1 + 1)) / 2) * sizeof(unsigned int), "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ if (wdata[i].len != rdata[i].len) {
+ TestErrPrintf("%d: VL data lengths don't match!, wdata[%d].len=%d, rdata[%d].len=%d\n", __LINE__,
+ (int)i, (int)wdata[i].len, (int)i, (int)rdata[i].len);
+ continue;
+ } /* end if */
+ for (j = 0; j < rdata[i].len; j++) {
+ if (((unsigned int *)wdata[i].p)[j] != ((unsigned int *)rdata[i].p)[j]) {
+ TestErrPrintf("VL data values don't match!, wdata[%d].p[%d]=%d, rdata[%d].p[%d]=%d\n", (int)i,
+ (int)j, (int)((unsigned int *)wdata[i].p)[j], (int)i, (int)j,
+ (int)((unsigned int *)rdata[i].p)[j]);
+ continue;
+ } /* end if */
+ } /* end for */
+ } /* end for */
+
+ /* Try to reclaim read data using "bad" dataspace with no extent
+ * Should fail */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Treclaim(tid1, sid2, xfer_pid, rdata);
+ }
+ H5E_END_TRY
+ VERIFY(ret, FAIL, "H5Treclaim");
+
+ /* Reclaim the read VL data */
+ ret = H5Treclaim(tid1, sid1, xfer_pid, rdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Make certain the VL memory has been freed */
+ VERIFY(mem_used, 0, "H5Treclaim");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Reclaim the write VL data */
+ ret = H5Treclaim(tid1, sid1, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close dataset transfer property list */
+ ret = H5Pclose(xfer_pid);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* end test_vltypes_vlen_atomic() */
+
+/****************************************************************
+**
+** rewrite_vltypes_vlen_atomic(): check memory leak for basic VL datatype.
+** Check memory leak for VL datatypes of atomic datatypes
+**
+****************************************************************/
+static void
+rewrite_vltypes_vlen_atomic(void)
+{
+ hvl_t wdata[SPACE1_DIM1]; /* Information to write */
+ hvl_t rdata[SPACE1_DIM1]; /* Information read in */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1; /* Datatype ID */
+ hid_t xfer_pid; /* Dataset transfer property list ID */
+ hsize_t size; /* Number of bytes which will be used */
+ unsigned i, j; /* counting variables */
+ size_t mem_used = 0; /* Memory used during allocation */
+ unsigned increment = 4;
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Check Memory Leak for Basic Atomic VL Datatype Functionality\n"));
+
+ /* Allocate and initialize VL data to write */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ wdata[i].p = HDmalloc((i + increment) * sizeof(unsigned int));
+ wdata[i].len = i + increment;
+ for (j = 0; j < (i + increment); j++)
+ ((unsigned int *)wdata[i].p)[j] = i * 20 + j;
+ } /* end for */
+
+ /* Open file created in test_vltypes_vlen_atomic() */
+ fid1 = H5Fopen(FILENAME, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Open the dataset created in test_vltypes_vlen_atomic() */
+ dataset = H5Dopen2(fid1, "Dataset1", H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Open dataspace for dataset */
+ sid1 = H5Dget_space(dataset);
+ CHECK(sid1, FAIL, "H5Dget_space");
+
+ /* Get datatype for dataset */
+ tid1 = H5Dget_type(dataset);
+ CHECK(tid1, FAIL, "H5Dget_type");
+
+ /* Write dataset to disk */
+ ret = H5Dwrite(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Open the file for data checking */
+ fid1 = H5Fopen(FILENAME, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Open a dataset */
+ dataset = H5Dopen2(fid1, "Dataset1", H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Get dataspace for datasets */
+ sid1 = H5Dget_space(dataset);
+ CHECK(sid1, FAIL, "H5Dget_space");
+
+ /* Get datatype for dataset */
+ tid1 = H5Dget_type(dataset);
+ CHECK(tid1, FAIL, "H5Dget_type");
+
+ /* Change to the custom memory allocation routines for reading VL data */
+ xfer_pid = H5Pcreate(H5P_DATASET_XFER);
+ CHECK(xfer_pid, FAIL, "H5Pcreate");
+
+ ret = H5Pset_vlen_mem_manager(xfer_pid, test_vltypes_alloc_custom, &mem_used, test_vltypes_free_custom,
+ &mem_used);
+ CHECK(ret, FAIL, "H5Pset_vlen_mem_manager");
+
+ /* Make certain the correct amount of memory will be used */
+ ret = H5Dvlen_get_buf_size(dataset, tid1, sid1, &size);
+ CHECK(ret, FAIL, "H5Dvlen_get_buf_size");
+
+ /* 22 elements allocated = 4+5+6+7 elements for each array position */
+ VERIFY(size, 22 * sizeof(unsigned int), "H5Dvlen_get_buf_size");
+
+ /* Read dataset from disk */
+ ret = H5Dread(dataset, tid1, H5S_ALL, H5S_ALL, xfer_pid, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Make certain the correct amount of memory has been used */
+ /* 22 elements allocated = 4+5+6+7 elements for each array position */
+ VERIFY(mem_used, 22 * sizeof(unsigned int), "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ if (wdata[i].len != rdata[i].len) {
+ TestErrPrintf("%d: VL data lengths don't match!, wdata[%d].len=%d, rdata[%d].len=%d\n", __LINE__,
+ (int)i, (int)wdata[i].len, (int)i, (int)rdata[i].len);
+ continue;
+ } /* end if */
+ for (j = 0; j < rdata[i].len; j++) {
+ if (((unsigned int *)wdata[i].p)[j] != ((unsigned int *)rdata[i].p)[j]) {
+ TestErrPrintf("VL data values don't match!, wdata[%d].p[%d]=%d, rdata[%d].p[%d]=%d\n", (int)i,
+ (int)j, (int)((unsigned int *)wdata[i].p)[j], (int)i, (int)j,
+ (int)((unsigned int *)rdata[i].p)[j]);
+ continue;
+ } /* end if */
+ } /* end for */
+ } /* end for */
+
+ /* Reclaim the read VL data */
+ ret = H5Treclaim(tid1, sid1, xfer_pid, rdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Make certain the VL memory has been freed */
+ VERIFY(mem_used, 0, "H5Treclaim");
+
+ /* Reclaim the write VL data */
+ ret = H5Treclaim(tid1, sid1, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close dataset transfer property list */
+ ret = H5Pclose(xfer_pid);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* end rewrite_vltypes_vlen_atomic() */
+
+/****************************************************************
+**
+** test_vltypes_vlen_compound(): Test basic VL datatype code.
+** Test VL datatypes of compound datatypes
+**
+****************************************************************/
+static void
+test_vltypes_vlen_compound(void)
+{
+ typedef struct { /* Struct that the VL sequences are composed of */
+ int i;
+ float f;
+ } s1;
+ hvl_t wdata[SPACE1_DIM1]; /* Information to write */
+ hvl_t rdata[SPACE1_DIM1]; /* Information read in */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1, tid2; /* Datatype IDs */
+ hid_t xfer_pid; /* Dataset transfer property list ID */
+ hsize_t dims1[] = {SPACE1_DIM1};
+ hsize_t size; /* Number of bytes which will be used */
+ unsigned i, j; /* counting variables */
+ size_t mem_used = 0; /* Memory used during allocation */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Basic Compound VL Datatype Functionality\n"));
+
+ /* Allocate and initialize VL data to write */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ wdata[i].p = HDmalloc((i + 1) * sizeof(s1));
+ wdata[i].len = i + 1;
+ for (j = 0; j < (i + 1); j++) {
+ ((s1 *)wdata[i].p)[j].i = (int)(i * 10 + j);
+ ((s1 *)wdata[i].p)[j].f = (float)(i * 20 + j) / 3.0F;
+ } /* end for */
+ } /* end for */
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create the base compound type */
+ tid2 = H5Tcreate(H5T_COMPOUND, sizeof(s1));
+ CHECK(tid2, FAIL, "H5Tcreate");
+
+ /* Insert fields */
+ ret = H5Tinsert(tid2, "i", HOFFSET(s1, i), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+ ret = H5Tinsert(tid2, "f", HOFFSET(s1, f), H5T_NATIVE_FLOAT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Create a datatype to refer to */
+ tid1 = H5Tvlen_create(tid2);
+ CHECK(tid1, FAIL, "H5Tvlen_create");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, "Dataset1", tid1, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write dataset to disk */
+ ret = H5Dwrite(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Change to the custom memory allocation routines for reading VL data */
+ xfer_pid = H5Pcreate(H5P_DATASET_XFER);
+ CHECK(xfer_pid, FAIL, "H5Pcreate");
+
+ ret = H5Pset_vlen_mem_manager(xfer_pid, test_vltypes_alloc_custom, &mem_used, test_vltypes_free_custom,
+ &mem_used);
+ CHECK(ret, FAIL, "H5Pset_vlen_mem_manager");
+
+ /* Make certain the correct amount of memory will be used */
+ ret = H5Dvlen_get_buf_size(dataset, tid1, sid1, &size);
+ CHECK(ret, FAIL, "H5Dvlen_get_buf_size");
+
+ /* 10 elements allocated = 1 + 2 + 3 + 4 elements for each array position */
+ VERIFY(size, ((SPACE1_DIM1 * (SPACE1_DIM1 + 1)) / 2) * sizeof(s1), "H5Dvlen_get_buf_size");
+
+ /* Read dataset from disk */
+ ret = H5Dread(dataset, tid1, H5S_ALL, H5S_ALL, xfer_pid, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Make certain the correct amount of memory has been used */
+ /* 10 elements allocated = 1 + 2 + 3 + 4 elements for each array position */
+ VERIFY(mem_used, ((SPACE1_DIM1 * (SPACE1_DIM1 + 1)) / 2) * sizeof(s1), "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ if (wdata[i].len != rdata[i].len) {
+ TestErrPrintf("%d: VL data length don't match!, wdata[%d].len=%d, rdata[%d].len=%d\n", __LINE__,
+ (int)i, (int)wdata[i].len, (int)i, (int)rdata[i].len);
+ continue;
+ } /* end if */
+ for (j = 0; j < rdata[i].len; j++) {
+ if (((s1 *)wdata[i].p)[j].i != ((s1 *)rdata[i].p)[j].i) {
+ TestErrPrintf("VL data values don't match!, wdata[%d].p[%d].i=%d, rdata[%d].p[%d].i=%d\n",
+ (int)i, (int)j, (int)((s1 *)wdata[i].p)[j].i, (int)i, (int)j,
+ (int)((s1 *)rdata[i].p)[j].i);
+ continue;
+ } /* end if */
+ if (!H5_FLT_ABS_EQUAL(((s1 *)wdata[i].p)[j].f, ((s1 *)rdata[i].p)[j].f)) {
+ TestErrPrintf("VL data values don't match!, wdata[%d].p[%d].f=%f, rdata[%d].p[%d].f=%f\n",
+ (int)i, (int)j, (double)((s1 *)wdata[i].p)[j].f, (int)i, (int)j,
+ (double)((s1 *)rdata[i].p)[j].f);
+ continue;
+ } /* end if */
+ } /* end for */
+ } /* end for */
+
+ /* Reclaim the VL data */
+ ret = H5Treclaim(tid1, sid1, xfer_pid, rdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Make certain the VL memory has been freed */
+ VERIFY(mem_used, 0, "H5Treclaim");
+
+ /* Reclaim the write VL data */
+ ret = H5Treclaim(tid1, sid1, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close dataset transfer property list */
+ ret = H5Pclose(xfer_pid);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* end test_vltypes_vlen_compound() */
+
+/****************************************************************
+**
+** rewrite_vltypes_vlen_compound(): Check memory leak for basic VL datatype.
+** Checks memory leak for VL datatypes of compound datatypes
+**
+****************************************************************/
+static void
+rewrite_vltypes_vlen_compound(void)
+{
+ typedef struct { /* Struct that the VL sequences are composed of */
+ int i;
+ float f;
+ } s1;
+ hvl_t wdata[SPACE1_DIM1]; /* Information to write */
+ hvl_t rdata[SPACE1_DIM1]; /* Information read in */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1, tid2; /* Datatype IDs */
+ hid_t xfer_pid; /* Dataset transfer property list ID */
+ hsize_t size; /* Number of bytes which will be used */
+ unsigned i, j; /* counting variables */
+ size_t mem_used = 0; /* Memory used during allocation */
+ unsigned increment = 4;
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Check Memory Leak for Basic Compound VL Datatype Functionality\n"));
+
+ /* Allocate and initialize VL data to write */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ wdata[i].p = HDmalloc((i + increment) * sizeof(s1));
+ wdata[i].len = i + increment;
+ for (j = 0; j < (i + increment); j++) {
+ ((s1 *)wdata[i].p)[j].i = (int)(i * 40 + j);
+ ((s1 *)wdata[i].p)[j].f = (float)(i * 60 + j) / 3.0F;
+ } /* end for */
+ } /* end for */
+
+ /* Create file */
+ fid1 = H5Fopen(FILENAME, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Create the base compound type */
+ tid2 = H5Tcreate(H5T_COMPOUND, sizeof(s1));
+ CHECK(tid2, FAIL, "H5Tcreate");
+
+ ret = H5Tinsert(tid2, "i", HOFFSET(s1, i), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+ ret = H5Tinsert(tid2, "f", HOFFSET(s1, f), H5T_NATIVE_FLOAT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Create a datatype to refer to */
+ tid1 = H5Tvlen_create(tid2);
+ CHECK(tid1, FAIL, "H5Tvlen_create");
+
+ /* Create a dataset */
+ dataset = H5Dopen2(fid1, "Dataset1", H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Create dataspace for datasets */
+ sid1 = H5Dget_space(dataset);
+ CHECK(sid1, FAIL, "H5Dget_space");
+
+ /* Write dataset to disk */
+ ret = H5Dwrite(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Change to the custom memory allocation routines for reading VL data */
+ xfer_pid = H5Pcreate(H5P_DATASET_XFER);
+ CHECK(xfer_pid, FAIL, "H5Pcreate");
+
+ ret = H5Pset_vlen_mem_manager(xfer_pid, test_vltypes_alloc_custom, &mem_used, test_vltypes_free_custom,
+ &mem_used);
+ CHECK(ret, FAIL, "H5Pset_vlen_mem_manager");
+
+ /* Make certain the correct amount of memory will be used */
+ ret = H5Dvlen_get_buf_size(dataset, tid1, sid1, &size);
+ CHECK(ret, FAIL, "H5Dvlen_get_buf_size");
+
+ /* 22 elements allocated = 4 + 5 + 6 + 7 elements for each array position */
+ VERIFY(size, 22 * sizeof(s1), "H5Dvlen_get_buf_size");
+
+ /* Read dataset from disk */
+ ret = H5Dread(dataset, tid1, H5S_ALL, H5S_ALL, xfer_pid, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Make certain the correct amount of memory has been used */
+ /* 22 elements allocated = 4 + 5 + 6 + 7 elements for each array position */
+ VERIFY(mem_used, 22 * sizeof(s1), "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ if (wdata[i].len != rdata[i].len) {
+ TestErrPrintf("%d: VL data length don't match!, wdata[%d].len=%d, rdata[%d].len=%d\n", __LINE__,
+ (int)i, (int)wdata[i].len, (int)i, (int)rdata[i].len);
+ continue;
+ } /* end if */
+ for (j = 0; j < rdata[i].len; j++) {
+ if (((s1 *)wdata[i].p)[j].i != ((s1 *)rdata[i].p)[j].i) {
+ TestErrPrintf("VL data values don't match!, wdata[%d].p[%d].i=%d, rdata[%d].p[%d].i=%d\n",
+ (int)i, (int)j, (int)((s1 *)wdata[i].p)[j].i, (int)i, (int)j,
+ (int)((s1 *)rdata[i].p)[j].i);
+ continue;
+ } /* end if */
+ if (!H5_FLT_ABS_EQUAL(((s1 *)wdata[i].p)[j].f, ((s1 *)rdata[i].p)[j].f)) {
+ TestErrPrintf("VL data values don't match!, wdata[%d].p[%d].f=%f, rdata[%d].p[%d].f=%f\n",
+ (int)i, (int)j, (double)((s1 *)wdata[i].p)[j].f, (int)i, (int)j,
+ (double)((s1 *)rdata[i].p)[j].f);
+ continue;
+ } /* end if */
+ } /* end for */
+ } /* end for */
+
+ /* Reclaim the VL data */
+ ret = H5Treclaim(tid1, sid1, xfer_pid, rdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Make certain the VL memory has been freed */
+ VERIFY(mem_used, 0, "H5Treclaim");
+
+ /* Reclaim the write VL data */
+ ret = H5Treclaim(tid1, sid1, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close dataset transfer property list */
+ ret = H5Pclose(xfer_pid);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* end rewrite_vltypes_vlen_compound() */
+
+/****************************************************************
+**
+** test_vltypes_compound_vlen_vlen(): Test basic VL datatype code.
+** Tests compound datatypes with VL datatypes of VL datatypes.
+**
+****************************************************************/
+static void
+test_vltypes_compound_vlen_vlen(void)
+{
+ typedef struct { /* Struct that the compound type are composed of */
+ int i;
+ float f;
+ hvl_t v;
+ } s1;
+ s1 *wdata; /* data to write */
+ s1 *rdata; /* data to read */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1, tid2, tid3; /* Datatype IDs */
+ hsize_t dims1[] = {SPACE3_DIM1};
+ unsigned i, j, k; /* counting variables */
+ hvl_t *t1, *t2; /* Temporary pointer to VL information */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Compound Datatypes with VL Atomic Datatype Component Functionality\n"));
+
+ /* Allocate and initialize VL data to write */
+ wdata = (s1 *)HDmalloc(sizeof(s1) * SPACE3_DIM1);
+ CHECK_PTR(wdata, "HDmalloc");
+ rdata = (s1 *)HDmalloc(sizeof(s1) * SPACE3_DIM1);
+ CHECK_PTR(rdata, "HDmalloc");
+ for (i = 0; i < SPACE3_DIM1; i++) {
+ wdata[i].i = (int)(i * 10);
+ wdata[i].f = (float)(i * 20) / 3.0F;
+ wdata[i].v.p = HDmalloc((i + L1_INCM) * sizeof(hvl_t));
+ wdata[i].v.len = i + L1_INCM;
+ for (t1 = (hvl_t *)((wdata[i].v).p), j = 0; j < (i + L1_INCM); j++, t1++) {
+ t1->p = HDmalloc((j + L2_INCM) * sizeof(unsigned int));
+ t1->len = j + L2_INCM;
+ for (k = 0; k < j + L2_INCM; k++)
+ ((unsigned int *)t1->p)[k] = i * 100 + j * 10 + k;
+ } /* end for */
+ } /* end for */
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE3_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create a VL datatype to refer to */
+ tid3 = H5Tvlen_create(H5T_NATIVE_UINT);
+ CHECK(tid3, FAIL, "H5Tvlen_create");
+
+ /* Create a VL datatype to refer to */
+ tid1 = H5Tvlen_create(tid3);
+ CHECK(tid1, FAIL, "H5Tvlen_create");
+
+ /* Create the base compound type */
+ tid2 = H5Tcreate(H5T_COMPOUND, sizeof(s1));
+ CHECK(tid2, FAIL, "H5Tcreate");
+
+ /* Insert fields */
+ ret = H5Tinsert(tid2, "i", HOFFSET(s1, i), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+ ret = H5Tinsert(tid2, "f", HOFFSET(s1, f), H5T_NATIVE_FLOAT);
+ CHECK(ret, FAIL, "H5Tinsert");
+ ret = H5Tinsert(tid2, "v", HOFFSET(s1, v), tid1);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, "Dataset1", tid2, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write dataset to disk */
+ ret = H5Dwrite(dataset, tid2, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Open file */
+ fid1 = H5Fopen(FILENAME, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Open a dataset */
+ dataset = H5Dopen2(fid1, "Dataset1", H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Read dataset from disk */
+ ret = H5Dread(dataset, tid2, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < SPACE3_DIM1; i++) {
+ if (wdata[i].i != rdata[i].i) {
+ TestErrPrintf("Integer components don't match!, wdata[%d].i=%d, rdata[%d].i=%d\n", (int)i,
+ (int)wdata[i].i, (int)i, (int)rdata[i].i);
+ continue;
+ } /* end if */
+ if (!H5_FLT_ABS_EQUAL(wdata[i].f, rdata[i].f)) {
+ TestErrPrintf("Float components don't match!, wdata[%d].f=%f, rdata[%d].f=%f\n", (int)i,
+ (double)wdata[i].f, (int)i, (double)rdata[i].f);
+ continue;
+ } /* end if */
+
+ if (wdata[i].v.len != rdata[i].v.len) {
+ TestErrPrintf("%d: VL data length don't match!, wdata[%d].v.len=%d, rdata[%d].v.len=%d\n",
+ __LINE__, (int)i, (int)wdata[i].v.len, (int)i, (int)rdata[i].v.len);
+ continue;
+ } /* end if */
+
+ for (t1 = (hvl_t *)(wdata[i].v.p), t2 = (hvl_t *)(rdata[i].v.p), j = 0; j < rdata[i].v.len;
+ j++, t1++, t2++) {
+ if (t1->len != t2->len) {
+ TestErrPrintf("%d: VL data length don't match!, i=%d, j=%d, t1->len=%d, t2->len=%d\n",
+ __LINE__, (int)i, (int)j, (int)t1->len, (int)t2->len);
+ continue;
+ } /* end if */
+ for (k = 0; k < t2->len; k++) {
+ if (((unsigned int *)t1->p)[k] != ((unsigned int *)t2->p)[k]) {
+ TestErrPrintf("VL data values don't match!, t1->p[%d]=%d, t2->p[%d]=%d\n", (int)k,
+ (int)((unsigned int *)t1->p)[k], (int)k, (int)((unsigned int *)t2->p)[k]);
+ continue;
+ } /* end if */
+ } /* end for */
+ } /* end for */
+ } /* end for */
+
+ /* Reclaim the VL data */
+ ret = H5Treclaim(tid2, sid1, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Reclaim the write VL data */
+ ret = H5Treclaim(tid2, sid1, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid3);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Release buffers */
+ HDfree(wdata);
+ HDfree(rdata);
+} /* end test_vltypes_compound_vlen_vlen() */
+
+/****************************************************************
+**
+** test_vltypes_compound_vlstr(): Test VL datatype code.
+** Tests VL datatypes of compound datatypes with VL string.
+** Dataset is extensible chunked, and data is rewritten with
+** shorter VL data.
+**
+****************************************************************/
+static void
+test_vltypes_compound_vlstr(void)
+{
+ typedef enum { red, blue, green } e1;
+ typedef struct {
+ char *string;
+ e1 color;
+ } s2;
+ typedef struct { /* Struct that the compound type are composed of */
+ hvl_t v;
+ } s1;
+ s1 wdata[SPACE1_DIM1]; /* data to write */
+ s1 wdata2[SPACE1_DIM1]; /* data to write */
+ s1 rdata[SPACE1_DIM1]; /* data to read */
+ s1 rdata2[SPACE1_DIM1]; /* data to read */
+ char str[64] = "a\0";
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset, dset2; /* Dataset ID */
+ hid_t sid1, sid2, filespace, filespace2; /* Dataspace ID */
+ hid_t tid1, tid2, tid3, tid4, tid5; /* Datatype IDs */
+ hid_t cparms;
+ hsize_t dims1[] = {SPACE1_DIM1};
+ hsize_t chunk_dims[] = {SPACE1_DIM1 / 2};
+ hsize_t maxdims[] = {H5S_UNLIMITED};
+ hsize_t size[] = {SPACE1_DIM1};
+ hsize_t offset[] = {0};
+ unsigned i, j; /* counting variables */
+ s2 *t1, *t2; /* Temporary pointer to VL information */
+ int val;
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing VL Datatype of Compound Datatype with VL String Functionality\n"));
+
+ /* Allocate and initialize VL data to write */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ wdata[i].v.p = (s2 *)HDmalloc((i + L3_INCM) * sizeof(s2));
+ wdata[i].v.len = i + L3_INCM;
+ for (t1 = (s2 *)((wdata[i].v).p), j = 0; j < (i + L3_INCM); j++, t1++) {
+ HDstrcat(str, "m");
+ t1->string = (char *)HDmalloc(HDstrlen(str) * sizeof(char) + 1);
+ HDstrcpy(t1->string, str);
+ /*t1->color = red;*/
+ t1->color = blue;
+ }
+ } /* end for */
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, maxdims);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create a VL string type*/
+ tid4 = H5Tcopy(H5T_C_S1);
+ CHECK(tid4, FAIL, "H5Tcopy");
+ ret = H5Tset_size(tid4, H5T_VARIABLE);
+ CHECK(ret, FAIL, "H5Tset_size");
+
+ /* Create an enum type */
+ tid3 = H5Tenum_create(H5T_STD_I32LE);
+ val = 0;
+ ret = H5Tenum_insert(tid3, "RED", &val);
+ CHECK(ret, FAIL, "H5Tenum_insert");
+ val = 1;
+ ret = H5Tenum_insert(tid3, "BLUE", &val);
+ CHECK(ret, FAIL, "H5Tenum_insert");
+ val = 2;
+ ret = H5Tenum_insert(tid3, "GREEN", &val);
+ CHECK(ret, FAIL, "H5Tenum_insert");
+
+ /* Create the first layer compound type */
+ tid5 = H5Tcreate(H5T_COMPOUND, sizeof(s2));
+ CHECK(tid5, FAIL, "H5Tcreate");
+ /* Insert fields */
+ ret = H5Tinsert(tid5, "string", HOFFSET(s2, string), tid4);
+ CHECK(ret, FAIL, "H5Tinsert");
+ /* Insert fields */
+ ret = H5Tinsert(tid5, "enumerate", HOFFSET(s2, color), tid3);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Create a VL datatype of first layer compound type */
+ tid1 = H5Tvlen_create(tid5);
+ CHECK(tid1, FAIL, "H5Tvlen_create");
+
+ /* Create the base compound type */
+ tid2 = H5Tcreate(H5T_COMPOUND, sizeof(s1));
+ CHECK(tid2, FAIL, "H5Tcreate");
+
+ /* Insert fields */
+ ret = H5Tinsert(tid2, "v", HOFFSET(s1, v), tid1);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Modify dataset creation properties, i.e. enable chunking */
+ cparms = H5Pcreate(H5P_DATASET_CREATE);
+ ret = H5Pset_chunk(cparms, SPACE1_RANK, chunk_dims);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, "Dataset1", tid2, sid1, H5P_DEFAULT, cparms, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Extend the dataset. This call assures that dataset is 4.*/
+ ret = H5Dset_extent(dataset, size);
+ CHECK(ret, FAIL, "H5Dset_extent");
+
+ /* Select a hyperslab */
+ filespace = H5Dget_space(dataset);
+ ret = H5Sselect_hyperslab(filespace, H5S_SELECT_SET, offset, NULL, dims1, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Write dataset to disk */
+ ret = H5Dwrite(dataset, tid2, sid1, filespace, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ ret = H5Fflush(fid1, H5F_SCOPE_GLOBAL);
+ CHECK(ret, FAIL, "H5Fflush");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(filespace);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid4);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid5);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid3);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close Property list */
+ ret = H5Pclose(cparms);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Open file */
+ fid1 = H5Fopen(FILENAME, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Open the dataset */
+ dset2 = H5Dopen2(fid1, "Dataset1", H5P_DEFAULT);
+ CHECK(dset2, FAIL, "H5Dopen2");
+
+ /* Get the data type */
+ tid2 = H5Dget_type(dset2);
+ CHECK(tid2, FAIL, "H5Dget_type");
+
+ /* Read dataset from disk */
+ ret = H5Dread(dset2, tid2, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ if (wdata[i].v.len != rdata[i].v.len) {
+ TestErrPrintf("%d: VL data length don't match!, wdata[%d].v.len=%d, rdata[%d].v.len=%d\n",
+ __LINE__, (int)i, (int)wdata[i].v.len, (int)i, (int)rdata[i].v.len);
+ continue;
+ } /* end if */
+
+ for (t1 = (s2 *)(wdata[i].v.p), t2 = (s2 *)(rdata[i].v.p), j = 0; j < rdata[i].v.len;
+ j++, t1++, t2++) {
+ if (HDstrcmp(t1->string, t2->string) != 0) {
+ TestErrPrintf("VL data values don't match!, t1->string=%s, t2->string=%s\n", t1->string,
+ t2->string);
+ continue;
+ } /* end if */
+ if (t1->color != t2->color) {
+ TestErrPrintf("VL data values don't match!, t1->color=%d, t2->color=%d\n", t1->color,
+ t2->color);
+ continue;
+ } /* end if */
+ } /* end for */
+ } /* end for */
+
+ /* Reclaim the VL data */
+ ret = H5Treclaim(tid2, sid1, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Reclaim the write VL data */
+ ret = H5Treclaim(tid2, sid1, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Use this part for new data */
+ HDstrcpy(str, "bbbbbbbb\0");
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ wdata2[i].v.p = (s2 *)HDmalloc((i + 1) * sizeof(s2));
+ wdata2[i].v.len = i + 1;
+ for (t1 = (s2 *)(wdata2[i].v).p, j = 0; j < i + 1; j++, t1++) {
+ HDstrcat(str, "pp");
+ t1->string = (char *)HDmalloc(HDstrlen(str) * sizeof(char) + 1);
+ HDstrcpy(t1->string, str);
+ t1->color = green;
+ }
+ } /* end for */
+
+ /* Select a hyperslab */
+ filespace2 = H5Dget_space(dset2);
+ ret = H5Sselect_hyperslab(filespace2, H5S_SELECT_SET, offset, NULL, dims1, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create dataspace for datasets */
+ sid2 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Write dataset to disk */
+ ret = H5Dwrite(dset2, tid2, sid2, filespace2, H5P_DEFAULT, &wdata2);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Read dataset from disk */
+ ret = H5Dread(dset2, tid2, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata2);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ if (wdata2[i].v.len != rdata2[i].v.len) {
+ TestErrPrintf("%d: VL data length don't match!, wdata2[%d].v.len=%d, rdata2[%d].v.len=%d\n",
+ __LINE__, (int)i, (int)wdata2[i].v.len, (int)i, (int)rdata2[i].v.len);
+ continue;
+ } /* end if */
+
+ for (t1 = (s2 *)(wdata2[i].v.p), t2 = (s2 *)(rdata2[i].v.p), j = 0; j < rdata2[i].v.len;
+ j++, t1++, t2++) {
+ if (HDstrcmp(t1->string, t2->string) != 0) {
+ TestErrPrintf("VL data values don't match!, t1->string=%s, t2->string=%s\n", t1->string,
+ t2->string);
+ continue;
+ } /* end if */
+ if (t1->color != t2->color) {
+ TestErrPrintf("VL data values don't match!, t1->color=%d, t2->color=%d\n", t1->color,
+ t2->color);
+ continue;
+ } /* end if */
+ } /* end for */
+ } /* end for */
+
+ /* Reclaim the write VL data */
+ ret = H5Treclaim(tid2, sid1, H5P_DEFAULT, wdata2);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Reclaim the VL data */
+ ret = H5Treclaim(tid2, sid1, H5P_DEFAULT, rdata2);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ ret = H5Dclose(dset2);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(filespace2);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+} /* end test_vltypes_compound_vlstr() */
+
+/****************************************************************
+**
+** test_vltypes_compound_vlen_atomic(): Test basic VL datatype code.
+** Tests compound datatypes with VL datatypes of atomic datatypes.
+**
+****************************************************************/
+static void
+test_vltypes_compound_vlen_atomic(void)
+{
+ typedef struct { /* Struct that the VL sequences are composed of */
+ int i;
+ float f;
+ hvl_t v;
+ } s1;
+ s1 wdata[SPACE1_DIM1]; /* Information to write */
+ s1 rdata[SPACE1_DIM1]; /* Information read in */
+ s1 fill; /* Fill value */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1, tid2; /* Datatype IDs */
+ hid_t xfer_pid; /* Dataset transfer property list ID */
+ hid_t dcpl_pid; /* Dataset creation property list ID */
+ hsize_t dims1[] = {SPACE1_DIM1};
+ hsize_t size; /* Number of bytes which will be used */
+ unsigned i, j; /* counting variables */
+ size_t mem_used = 0; /* Memory used during allocation */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Compound Datatypes with VL Atomic Datatype Component Functionality\n"));
+
+ /* Allocate and initialize VL data to write */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ wdata[i].i = (int)(i * 10);
+ wdata[i].f = (float)(i * 20) / 3.0F;
+ wdata[i].v.p = HDmalloc((i + 1) * sizeof(unsigned int));
+ wdata[i].v.len = i + 1;
+ for (j = 0; j < (i + 1); j++)
+ ((unsigned int *)wdata[i].v.p)[j] = i * 10 + j;
+ } /* end for */
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create a VL datatype to refer to */
+ tid1 = H5Tvlen_create(H5T_NATIVE_UINT);
+ CHECK(tid1, FAIL, "H5Tvlen_create");
+
+ /* Create the base compound type */
+ tid2 = H5Tcreate(H5T_COMPOUND, sizeof(s1));
+ CHECK(tid2, FAIL, "H5Tcreate");
+
+ /* Insert fields */
+ ret = H5Tinsert(tid2, "i", HOFFSET(s1, i), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+ ret = H5Tinsert(tid2, "f", HOFFSET(s1, f), H5T_NATIVE_FLOAT);
+ CHECK(ret, FAIL, "H5Tinsert");
+ ret = H5Tinsert(tid2, "v", HOFFSET(s1, v), tid1);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, "Dataset1", tid2, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write dataset to disk */
+ ret = H5Dwrite(dataset, tid2, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Change to the custom memory allocation routines for reading VL data */
+ xfer_pid = H5Pcreate(H5P_DATASET_XFER);
+ CHECK(xfer_pid, FAIL, "H5Pcreate");
+
+ ret = H5Pset_vlen_mem_manager(xfer_pid, test_vltypes_alloc_custom, &mem_used, test_vltypes_free_custom,
+ &mem_used);
+ CHECK(ret, FAIL, "H5Pset_vlen_mem_manager");
+
+ /* Make certain the correct amount of memory will be used */
+ ret = H5Dvlen_get_buf_size(dataset, tid2, sid1, &size);
+ CHECK(ret, FAIL, "H5Dvlen_get_buf_size");
+
+ /* 10 elements allocated = 1 + 2 + 3 + 4 elements for each array position */
+ VERIFY(size, ((SPACE1_DIM1 * (SPACE1_DIM1 + 1)) / 2) * sizeof(unsigned int), "H5Dvlen_get_buf_size");
+
+ /* Read dataset from disk */
+ ret = H5Dread(dataset, tid2, H5S_ALL, H5S_ALL, xfer_pid, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Make certain the correct amount of memory has been used */
+ /* 10 elements allocated = 1 + 2 + 3 + 4 elements for each array position */
+ VERIFY(mem_used, ((SPACE1_DIM1 * (SPACE1_DIM1 + 1)) / 2) * sizeof(unsigned int), "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ if (wdata[i].i != rdata[i].i) {
+ TestErrPrintf("Integer components don't match!, wdata[%d].i=%d, rdata[%d].i=%d\n", (int)i,
+ (int)wdata[i].i, (int)i, (int)rdata[i].i);
+ continue;
+ } /* end if */
+ if (!H5_FLT_ABS_EQUAL(wdata[i].f, rdata[i].f)) {
+ TestErrPrintf("Float components don't match!, wdata[%d].f=%f, rdata[%d].f=%f\n", (int)i,
+ (double)wdata[i].f, (int)i, (double)rdata[i].f);
+ continue;
+ } /* end if */
+ if (wdata[i].v.len != rdata[i].v.len) {
+ TestErrPrintf("%d: VL data length don't match!, wdata[%d].v.len=%d, rdata[%d].v.len=%d\n",
+ __LINE__, (int)i, (int)wdata[i].v.len, (int)i, (int)rdata[i].v.len);
+ continue;
+ } /* end if */
+ for (j = 0; j < rdata[i].v.len; j++) {
+ if (((unsigned int *)wdata[i].v.p)[j] != ((unsigned int *)rdata[i].v.p)[j]) {
+ TestErrPrintf("VL data values don't match!, wdata[%d].v.p[%d]=%d, rdata[%d].v.p[%d]=%d\n",
+ (int)i, (int)j, (int)((unsigned int *)wdata[i].v.p)[j], (int)i, (int)j,
+ (int)((unsigned int *)rdata[i].v.p)[j]);
+ continue;
+ } /* end if */
+ } /* end for */
+ } /* end for */
+
+ /* Reclaim the VL data */
+ ret = H5Treclaim(tid2, sid1, xfer_pid, rdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Make certain the VL memory has been freed */
+ VERIFY(mem_used, 0, "H5Treclaim");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Create a second dataset, with a fill value */
+ dcpl_pid = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl_pid, FAIL, "H5Pcreate");
+
+ /* Set the fill value for the second dataset */
+ HDmemset(&fill, 0, sizeof(s1));
+ ret = H5Pset_fill_value(dcpl_pid, tid2, &fill);
+ CHECK(ret, FAIL, "H5Pset_fill_value");
+
+ dataset = H5Dcreate2(fid1, "Dataset2", tid2, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Close dataset creation property list */
+ ret = H5Pclose(dcpl_pid);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Read from dataset before writing data */
+ ret = H5Dread(dataset, tid2, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check data read in */
+ for (i = 0; i < SPACE1_DIM1; i++)
+ if (rdata[i].i != 0 || !H5_FLT_ABS_EQUAL(rdata[i].f, 0.0F) || rdata[i].v.len != 0 ||
+ rdata[i].v.p != NULL)
+ TestErrPrintf(
+ "VL doesn't match!, rdata[%d].i=%d, rdata[%d].f=%f, rdata[%d].v.len=%u, rdata[%d].v.p=%p\n",
+ (int)i, rdata[i].i, (int)i, (double)rdata[i].f, (int)i, (unsigned)rdata[i].v.len, (int)i,
+ rdata[i].v.p);
+
+ /* Write dataset to disk */
+ ret = H5Dwrite(dataset, tid2, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Read dataset from disk */
+ ret = H5Dread(dataset, tid2, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ if (wdata[i].i != rdata[i].i) {
+ TestErrPrintf("Integer components don't match!, wdata[%d].i=%d, rdata[%d].i=%d\n", (int)i,
+ (int)wdata[i].i, (int)i, (int)rdata[i].i);
+ continue;
+ } /* end if */
+ if (!H5_FLT_ABS_EQUAL(wdata[i].f, rdata[i].f)) {
+ TestErrPrintf("Float components don't match!, wdata[%d].f=%f, rdata[%d].f=%f\n", (int)i,
+ (double)wdata[i].f, (int)i, (double)rdata[i].f);
+ continue;
+ } /* end if */
+ if (wdata[i].v.len != rdata[i].v.len) {
+ TestErrPrintf("%d: VL data length don't match!, wdata[%d].v.len=%d, rdata[%d].v.len=%d\n",
+ __LINE__, (int)i, (int)wdata[i].v.len, (int)i, (int)rdata[i].v.len);
+ continue;
+ } /* end if */
+ for (j = 0; j < rdata[i].v.len; j++) {
+ if (((unsigned int *)wdata[i].v.p)[j] != ((unsigned int *)rdata[i].v.p)[j]) {
+ TestErrPrintf("VL data values don't match!, wdata[%d].v.p[%d]=%d, rdata[%d].v.p[%d]=%d\n",
+ (int)i, (int)j, (int)((unsigned int *)wdata[i].v.p)[j], (int)i, (int)j,
+ (int)((unsigned int *)rdata[i].v.p)[j]);
+ continue;
+ } /* end if */
+ } /* end for */
+ } /* end for */
+
+ /* Reclaim the VL data */
+ ret = H5Treclaim(tid2, sid1, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Reclaim the write VL data */
+ ret = H5Treclaim(tid2, sid1, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Close datatype */
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close dataset transfer property list */
+ ret = H5Pclose(xfer_pid);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* end test_vltypes_compound_vlen_atomic() */
+
+/****************************************************************
+**
+** rewrite_vltypes_compound_vlen_atomic(): Check memory leak for
+** basic VL datatype code.
+** Check memory leak for compound datatypes with VL datatypes
+** of atomic datatypes.
+**
+****************************************************************/
+static void
+rewrite_vltypes_compound_vlen_atomic(void)
+{
+ typedef struct { /* Struct that the VL sequences are composed of */
+ int i;
+ float f;
+ hvl_t v;
+ } s1;
+ s1 wdata[SPACE1_DIM1]; /* Information to write */
+ s1 rdata[SPACE1_DIM1]; /* Information read in */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1, tid2; /* Datatype IDs */
+ hid_t xfer_pid; /* Dataset transfer property list ID */
+ hsize_t size; /* Number of bytes which will be used */
+ unsigned i, j; /* counting variables */
+ size_t mem_used = 0; /* Memory used during allocation */
+ unsigned increment = 4;
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5,
+ ("Checking memory leak for compound datatype with VL Atomic Datatype Component Functionality\n"));
+
+ /* Allocate and initialize VL data to write */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ wdata[i].i = (int)(i * 40);
+ wdata[i].f = (float)(i * 50) / 3.0F;
+ wdata[i].v.p = HDmalloc((i + increment) * sizeof(unsigned int));
+ wdata[i].v.len = i + increment;
+ for (j = 0; j < (i + increment); j++)
+ ((unsigned int *)wdata[i].v.p)[j] = i * 60 + j;
+ } /* end for */
+
+ /* Create file */
+ fid1 = H5Fopen(FILENAME, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Create a VL datatype to refer to */
+ tid1 = H5Tvlen_create(H5T_NATIVE_UINT);
+ CHECK(tid1, FAIL, "H5Tvlen_create");
+
+ /* Create the base compound type */
+ tid2 = H5Tcreate(H5T_COMPOUND, sizeof(s1));
+ CHECK(tid2, FAIL, "H5Tcreate");
+
+ /* Insert fields */
+ ret = H5Tinsert(tid2, "i", HOFFSET(s1, i), H5T_NATIVE_INT);
+ CHECK(ret, FAIL, "H5Tinsert");
+ ret = H5Tinsert(tid2, "f", HOFFSET(s1, f), H5T_NATIVE_FLOAT);
+ CHECK(ret, FAIL, "H5Tinsert");
+ ret = H5Tinsert(tid2, "v", HOFFSET(s1, v), tid1);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Create a dataset */
+ dataset = H5Dopen2(fid1, "Dataset1", H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Create dataspace for datasets */
+ sid1 = H5Dget_space(dataset);
+ CHECK(sid1, FAIL, "H5Dget_space");
+
+ /* Write dataset to disk */
+ ret = H5Dwrite(dataset, tid2, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Change to the custom memory allocation routines for reading VL data */
+ xfer_pid = H5Pcreate(H5P_DATASET_XFER);
+ CHECK(xfer_pid, FAIL, "H5Pcreate");
+
+ ret = H5Pset_vlen_mem_manager(xfer_pid, test_vltypes_alloc_custom, &mem_used, test_vltypes_free_custom,
+ &mem_used);
+ CHECK(ret, FAIL, "H5Pset_vlen_mem_manager");
+
+ /* Make certain the correct amount of memory will be used */
+ ret = H5Dvlen_get_buf_size(dataset, tid2, sid1, &size);
+ CHECK(ret, FAIL, "H5Dvlen_get_buf_size");
+
+ /* 22 elements allocated = 4+5+6+7 elements for each array position */
+ VERIFY(size, 22 * sizeof(unsigned int), "H5Dvlen_get_buf_size");
+
+ /* Read dataset from disk */
+ ret = H5Dread(dataset, tid2, H5S_ALL, H5S_ALL, xfer_pid, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Make certain the correct amount of memory has been used */
+ /* 22 elements allocated = 4+5+6+7 elements for each array position */
+ VERIFY(mem_used, 22 * sizeof(unsigned int), "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ if (wdata[i].i != rdata[i].i) {
+ TestErrPrintf("Integer components don't match!, wdata[%d].i=%d, rdata[%d].i=%d\n", (int)i,
+ (int)wdata[i].i, (int)i, (int)rdata[i].i);
+ continue;
+ } /* end if */
+ if (!H5_FLT_ABS_EQUAL(wdata[i].f, rdata[i].f)) {
+ TestErrPrintf("Float components don't match!, wdata[%d].f=%f, rdata[%d].f=%f\n", (int)i,
+ (double)wdata[i].f, (int)i, (double)rdata[i].f);
+ continue;
+ } /* end if */
+ if (wdata[i].v.len != rdata[i].v.len) {
+ TestErrPrintf("%d: VL data length don't match!, wdata[%d].v.len=%d, rdata[%d].v.len=%d\n",
+ __LINE__, (int)i, (int)wdata[i].v.len, (int)i, (int)rdata[i].v.len);
+ continue;
+ } /* end if */
+ for (j = 0; j < rdata[i].v.len; j++) {
+ if (((unsigned int *)wdata[i].v.p)[j] != ((unsigned int *)rdata[i].v.p)[j]) {
+ TestErrPrintf("VL data values don't match!, wdata[%d].v.p[%d]=%d, rdata[%d].v.p[%d]=%d\n",
+ (int)i, (int)j, (int)((unsigned int *)wdata[i].v.p)[j], (int)i, (int)j,
+ (int)((unsigned int *)rdata[i].v.p)[j]);
+ continue;
+ } /* end if */
+ } /* end for */
+ } /* end for */
+
+ /* Reclaim the VL data */
+ ret = H5Treclaim(tid2, sid1, xfer_pid, rdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Make certain the VL memory has been freed */
+ VERIFY(mem_used, 0, "H5Treclaim");
+
+ /* Reclaim the write VL data */
+ ret = H5Treclaim(tid2, sid1, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close dataset transfer property list */
+ ret = H5Pclose(xfer_pid);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* end rewrite_vltypes_compound_vlen_atomic() */
+
+/****************************************************************
+**
+** vlen_size_func(): Test basic VL datatype code.
+** Tests VL datatype with VL datatypes of atomic datatypes.
+**
+****************************************************************/
+static size_t
+vlen_size_func(unsigned long n)
+{
+ size_t u = 1;
+ size_t tmp = 1;
+ size_t result = 1;
+
+ while (u < n) {
+ u++;
+ tmp += u;
+ result += tmp;
+ }
+ return (result);
+}
+
+/****************************************************************
+**
+** test_vltypes_vlen_vlen_atomic(): Test basic VL datatype code.
+** Tests VL datatype with VL datatypes of atomic datatypes.
+**
+****************************************************************/
+static void
+test_vltypes_vlen_vlen_atomic(void)
+{
+ hvl_t wdata[SPACE1_DIM1]; /* Information to write */
+ hvl_t rdata[SPACE1_DIM1]; /* Information read in */
+ hvl_t *t1, *t2; /* Temporary pointer to VL information */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1, tid2; /* Datatype IDs */
+ hid_t xfer_pid; /* Dataset transfer property list ID */
+ hsize_t dims1[] = {SPACE1_DIM1};
+ hsize_t size; /* Number of bytes which will be used */
+ unsigned i, j, k; /* counting variables */
+ size_t mem_used = 0; /* Memory used during allocation */
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing VL Datatypes with VL Atomic Datatype Component Functionality\n"));
+
+ /* Allocate and initialize VL data to write */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ wdata[i].p = HDmalloc((i + 1) * sizeof(hvl_t));
+ if (wdata[i].p == NULL) {
+ TestErrPrintf("Cannot allocate memory for VL data! i=%u\n", i);
+ return;
+ } /* end if */
+ wdata[i].len = i + 1;
+ for (t1 = (hvl_t *)(wdata[i].p), j = 0; j < (i + 1); j++, t1++) {
+ t1->p = HDmalloc((j + 1) * sizeof(unsigned int));
+ if (t1->p == NULL) {
+ TestErrPrintf("Cannot allocate memory for VL data! i=%u, j=%u\n", i, j);
+ return;
+ } /* end if */
+ t1->len = j + 1;
+ for (k = 0; k < (j + 1); k++)
+ ((unsigned int *)t1->p)[k] = i * 100 + j * 10 + k;
+ } /* end for */
+ } /* end for */
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create a VL datatype to refer to */
+ tid1 = H5Tvlen_create(H5T_NATIVE_UINT);
+ CHECK(tid1, FAIL, "H5Tvlen_create");
+
+ /* Create the base VL type */
+ tid2 = H5Tvlen_create(tid1);
+ CHECK(tid2, FAIL, "H5Tvlen_create");
+
+ /* Create a dataset */
+ dataset = H5Dcreate2(fid1, "Dataset1", tid2, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dcreate2");
+
+ /* Write dataset to disk */
+ ret = H5Dwrite(dataset, tid2, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Open file */
+ fid1 = H5Fopen(FILENAME, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+ CHECK(sid1, FAIL, "H5Screate_simple");
+
+ /* Create a VL datatype to refer to */
+ tid1 = H5Tvlen_create(H5T_NATIVE_UINT);
+ CHECK(tid1, FAIL, "H5Tvlen_create");
+
+ /* Create the base VL type */
+ tid2 = H5Tvlen_create(tid1);
+ CHECK(tid2, FAIL, "H5Tvlen_create");
+
+ /* Open a dataset */
+ dataset = H5Dopen2(fid1, "Dataset1", H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Change to the custom memory allocation routines for reading VL data */
+ xfer_pid = H5Pcreate(H5P_DATASET_XFER);
+ CHECK(xfer_pid, FAIL, "H5Pcreate");
+
+ ret = H5Pset_vlen_mem_manager(xfer_pid, test_vltypes_alloc_custom, &mem_used, test_vltypes_free_custom,
+ &mem_used);
+ CHECK(ret, FAIL, "H5Pset_vlen_mem_manager");
+
+ /* Make certain the correct amount of memory was used */
+ ret = H5Dvlen_get_buf_size(dataset, tid2, sid1, &size);
+ CHECK(ret, FAIL, "H5Dvlen_get_buf_size");
+
+ /* 10 hvl_t elements allocated = 1 + 2 + 3 + 4 elements for each array position */
+ /* 20 unsigned int elements allocated = 1 + 3 + 6 + 10 elements */
+ VERIFY(size,
+ (hsize_t)(((SPACE1_DIM1 * (SPACE1_DIM1 + 1)) / 2) * sizeof(hvl_t) +
+ vlen_size_func((unsigned long)SPACE1_DIM1) * sizeof(unsigned int)),
+ "H5Dvlen_get_buf_size");
+
+ /* Read dataset from disk */
+ ret = H5Dread(dataset, tid2, H5S_ALL, H5S_ALL, xfer_pid, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Make certain the correct amount of memory has been used */
+ /* 10 hvl_t elements allocated = 1 + 2 + 3 + 4 elements for each array position */
+ /* 20 unsigned int elements allocated = 1 + 3 + 6 + 10 elements */
+ VERIFY(mem_used,
+ (size_t)(((SPACE1_DIM1 * (SPACE1_DIM1 + 1)) / 2) * sizeof(hvl_t) +
+ vlen_size_func((unsigned long)SPACE1_DIM1) * sizeof(unsigned int)),
+ "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ if (wdata[i].len != rdata[i].len) {
+ TestErrPrintf("%d: VL data length don't match!, wdata[%d].len=%d, rdata[%d].len=%d\n", __LINE__,
+ (int)i, (int)wdata[i].len, (int)i, (int)rdata[i].len);
+ continue;
+ } /* end if */
+ for (t1 = (hvl_t *)wdata[i].p, t2 = (hvl_t *)(rdata[i].p), j = 0; j < rdata[i].len; j++, t1++, t2++) {
+ if (t1->len != t2->len) {
+ TestErrPrintf("%d: VL data length don't match!, i=%d, j=%d, t1->len=%d, t2->len=%d\n",
+ __LINE__, (int)i, (int)j, (int)t1->len, (int)t2->len);
+ continue;
+ } /* end if */
+ for (k = 0; k < t2->len; k++) {
+ if (((unsigned int *)t1->p)[k] != ((unsigned int *)t2->p)[k]) {
+ TestErrPrintf("VL data values don't match!, t1->p[%d]=%d, t2->p[%d]=%d\n", (int)k,
+ (int)((unsigned int *)t1->p)[k], (int)k, (int)((unsigned int *)t2->p)[k]);
+ continue;
+ } /* end if */
+ } /* end for */
+ } /* end for */
+ } /* end for */
+
+ /* Reclaim all the (nested) VL data */
+ ret = H5Treclaim(tid2, sid1, xfer_pid, rdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Make certain the VL memory has been freed */
+ VERIFY(mem_used, 0, "H5Treclaim");
+
+ /* Reclaim the write VL data */
+ ret = H5Treclaim(tid2, sid1, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid1);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close dataset transfer property list */
+ ret = H5Pclose(xfer_pid);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* end test_vltypes_vlen_vlen_atomic() */
+
+/****************************************************************
+**
+** rewrite_longer_vltypes_vlen_vlen_atomic(): Test basic VL datatype code.
+** Tests VL datatype with VL datatypes of atomic datatypes.
+**
+****************************************************************/
+static void
+rewrite_longer_vltypes_vlen_vlen_atomic(void)
+{
+ hvl_t wdata[SPACE1_DIM1]; /* Information to write */
+ hvl_t rdata[SPACE1_DIM1]; /* Information read in */
+ hvl_t *t1, *t2; /* Temporary pointer to VL information */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid2; /* Datatype IDs */
+ hid_t xfer_pid; /* Dataset transfer property list ID */
+ hsize_t size; /* Number of bytes which will be used */
+ unsigned i, j, k; /* counting variables */
+ size_t mem_used = 0; /* Memory used during allocation */
+ unsigned increment = 1;
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Check memory leak for VL Datatypes with VL Atomic Datatype Component Functionality\n"));
+
+ /* Allocate and initialize VL data to write */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ wdata[i].p = HDmalloc((i + increment) * sizeof(hvl_t));
+ if (wdata[i].p == NULL) {
+ TestErrPrintf("Cannot allocate memory for VL data! i=%u\n", i);
+ return;
+ } /* end if */
+ wdata[i].len = i + increment;
+ for (t1 = (hvl_t *)(wdata[i].p), j = 0; j < (i + increment); j++, t1++) {
+ t1->p = HDmalloc((j + 1) * sizeof(unsigned int));
+ if (t1->p == NULL) {
+ TestErrPrintf("Cannot allocate memory for VL data! i=%u, j=%u\n", i, j);
+ return;
+ } /* end if */
+ t1->len = j + 1;
+ for (k = 0; k < (j + 1); k++)
+ ((unsigned int *)t1->p)[k] = i * 1000 + j * 100 + k * 10;
+ } /* end for */
+ } /* end for */
+
+ /* Open file */
+ fid1 = H5Fopen(FILENAME, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Open the dataset */
+ dataset = H5Dopen2(fid1, "Dataset1", H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Get dataspace for datasets */
+ sid1 = H5Dget_space(dataset);
+ CHECK(sid1, FAIL, "H5Dget_space");
+
+ /* Open datatype of the dataset */
+ tid2 = H5Dget_type(dataset);
+ CHECK(tid2, FAIL, "H5Dget_type");
+
+ /* Write dataset to disk */
+ ret = H5Dwrite(dataset, tid2, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Open the file for data checking */
+ fid1 = H5Fopen(FILENAME, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Open a dataset */
+ dataset = H5Dopen2(fid1, "Dataset1", H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Get dataspace for datasets */
+ sid1 = H5Dget_space(dataset);
+ CHECK(sid1, FAIL, "H5Dget_space");
+
+ /* Get datatype for dataset */
+ tid2 = H5Dget_type(dataset);
+ CHECK(tid2, FAIL, "H5Dget_type");
+
+ /* Change to the custom memory allocation routines for reading VL data */
+ xfer_pid = H5Pcreate(H5P_DATASET_XFER);
+ CHECK(xfer_pid, FAIL, "H5Pcreate");
+
+ ret = H5Pset_vlen_mem_manager(xfer_pid, test_vltypes_alloc_custom, &mem_used, test_vltypes_free_custom,
+ &mem_used);
+ CHECK(ret, FAIL, "H5Pset_vlen_mem_manager");
+
+ /* Make certain the correct amount of memory was used */
+ ret = H5Dvlen_get_buf_size(dataset, tid2, sid1, &size);
+ CHECK(ret, FAIL, "H5Dvlen_get_buf_size");
+
+ /* 18 hvl_t elements allocated = 3 + 4 + 5 + 6 elements for each array position */
+ /* 52 unsigned int elements allocated = 6 + 10 + 15 + 21 elements */
+ /*VERIFY(size, 18 * sizeof(hvl_t) + 52 * sizeof(unsigned int), "H5Dvlen_get_buf_size");*/
+
+ /* Read dataset from disk */
+ ret = H5Dread(dataset, tid2, H5S_ALL, H5S_ALL, xfer_pid, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Make certain the correct amount of memory has been used */
+ /* 18 hvl_t elements allocated = 3+4+5+6elements for each array position */
+ /* 52 unsigned int elements allocated = 6+10+15+21 elements */
+ /*VERIFY(mem_used,18*sizeof(hvl_t)+52*sizeof(unsigned int),"H5Dread");*/
+
+ /* Compare data read in */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ if (wdata[i].len != rdata[i].len) {
+ TestErrPrintf("%d: VL data length don't match!, wdata[%d].len=%d, rdata[%d].len=%d\n", __LINE__,
+ (int)i, (int)wdata[i].len, (int)i, (int)rdata[i].len);
+ continue;
+ } /* end if */
+ for (t1 = (hvl_t *)(wdata[i].p), t2 = (hvl_t *)(rdata[i].p), j = 0; j < rdata[i].len;
+ j++, t1++, t2++) {
+ if (t1->len != t2->len) {
+ TestErrPrintf("%d: VL data length don't match!, i=%d, j=%d, t1->len=%d, t2->len=%d\n",
+ __LINE__, (int)i, (int)j, (int)t1->len, (int)t2->len);
+ continue;
+ } /* end if */
+ for (k = 0; k < t2->len; k++) {
+ if (((unsigned int *)t1->p)[k] != ((unsigned int *)t2->p)[k]) {
+ TestErrPrintf("VL data values don't match!, t1->p[%d]=%d, t2->p[%d]=%d\n", (int)k,
+ (int)((unsigned int *)t1->p)[k], (int)k, (int)((unsigned int *)t2->p)[k]);
+ continue;
+ } /* end if */
+ } /* end for */
+ } /* end for */
+ } /* end for */
+
+ /* Reclaim all the (nested) VL data */
+ ret = H5Treclaim(tid2, sid1, xfer_pid, rdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Make certain the VL memory has been freed */
+ VERIFY(mem_used, 0, "H5Treclaim");
+
+ /* Reclaim the write VL data */
+ ret = H5Treclaim(tid2, sid1, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close dataset transfer property list */
+ ret = H5Pclose(xfer_pid);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* end rewrite_longer_vltypes_vlen_vlen_atomic() */
+
+/****************************************************************
+**
+** rewrite_shorter_vltypes_vlen_vlen_atomic(): Test basic VL datatype code.
+** Tests VL datatype with VL datatypes of atomic datatypes.
+**
+****************************************************************/
+static void
+rewrite_shorter_vltypes_vlen_vlen_atomic(void)
+{
+ hvl_t wdata[SPACE1_DIM1]; /* Information to write */
+ hvl_t rdata[SPACE1_DIM1]; /* Information read in */
+ hvl_t *t1, *t2; /* Temporary pointer to VL information */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid2; /* Datatype IDs */
+ hid_t xfer_pid; /* Dataset transfer property list ID */
+ hsize_t size; /* Number of bytes which will be used */
+ unsigned i, j, k; /* counting variables */
+ size_t mem_used = 0; /* Memory used during allocation */
+ unsigned increment = 1;
+ herr_t ret; /* Generic return value */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Check memory leak for VL Datatypes with VL Atomic Datatype Component Functionality\n"));
+
+ /* Allocate and initialize VL data to write */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ wdata[i].p = HDmalloc((i + increment) * sizeof(hvl_t));
+ if (wdata[i].p == NULL) {
+ TestErrPrintf("Cannot allocate memory for VL data! i=%u\n", i);
+ return;
+ } /* end if */
+ wdata[i].len = i + increment;
+ for (t1 = (hvl_t *)(wdata[i].p), j = 0; j < (i + increment); j++, t1++) {
+ t1->p = HDmalloc((j + 1) * sizeof(unsigned int));
+ if (t1->p == NULL) {
+ TestErrPrintf("Cannot allocate memory for VL data! i=%u, j=%u\n", i, j);
+ return;
+ } /* end if */
+ t1->len = j + 1;
+ for (k = 0; k < (j + 1); k++)
+ ((unsigned int *)t1->p)[k] = i * 100000 + j * 1000 + k * 10;
+ } /* end for */
+ } /* end for */
+
+ /* Open file */
+ fid1 = H5Fopen(FILENAME, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Open the dataset */
+ dataset = H5Dopen2(fid1, "Dataset1", H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Get dataspace for datasets */
+ sid1 = H5Dget_space(dataset);
+ CHECK(sid1, FAIL, "H5Dget_space");
+
+ /* Open datatype of the dataset */
+ tid2 = H5Dget_type(dataset);
+ CHECK(tid2, FAIL, "H5Dget_type");
+
+ /* Write dataset to disk */
+ ret = H5Dwrite(dataset, tid2, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Open the file for data checking */
+ fid1 = H5Fopen(FILENAME, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(fid1, FAIL, "H5Fopen");
+
+ /* Open a dataset */
+ dataset = H5Dopen2(fid1, "Dataset1", H5P_DEFAULT);
+ CHECK(dataset, FAIL, "H5Dopen2");
+
+ /* Get dataspace for datasets */
+ sid1 = H5Dget_space(dataset);
+ CHECK(sid1, FAIL, "H5Dget_space");
+
+ /* Get datatype for dataset */
+ tid2 = H5Dget_type(dataset);
+ CHECK(tid2, FAIL, "H5Dget_type");
+
+ /* Change to the custom memory allocation routines for reading VL data */
+ xfer_pid = H5Pcreate(H5P_DATASET_XFER);
+ CHECK(xfer_pid, FAIL, "H5Pcreate");
+
+ ret = H5Pset_vlen_mem_manager(xfer_pid, test_vltypes_alloc_custom, &mem_used, test_vltypes_free_custom,
+ &mem_used);
+ CHECK(ret, FAIL, "H5Pset_vlen_mem_manager");
+
+ /* Make certain the correct amount of memory was used */
+ ret = H5Dvlen_get_buf_size(dataset, tid2, sid1, &size);
+ CHECK(ret, FAIL, "H5Dvlen_get_buf_size");
+
+ /* 10 hvl_t elements allocated = 1 + 2 + 3 + 4 elements for each array position */
+ /* 20 unsigned int elements allocated = 1 + 3 + 6 + 10 elements */
+ VERIFY(size,
+ (hsize_t)(((SPACE1_DIM1 * (SPACE1_DIM1 + 1)) / 2) * sizeof(hvl_t) +
+ vlen_size_func((unsigned long)SPACE1_DIM1) * sizeof(unsigned int)),
+ "H5Dvlen_get_buf_size");
+
+ /* Read dataset from disk */
+ ret = H5Dread(dataset, tid2, H5S_ALL, H5S_ALL, xfer_pid, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Make certain the correct amount of memory has been used */
+ /* 10 hvl_t elements allocated = 1 + 2 + 3 + 4 elements for each array position */
+ /* 20 unsigned int elements allocated = 1 + 3 + 6 + 10 elements */
+ VERIFY(mem_used,
+ (size_t)(((SPACE1_DIM1 * (SPACE1_DIM1 + 1)) / 2) * sizeof(hvl_t) +
+ vlen_size_func((unsigned long)SPACE1_DIM1) * sizeof(unsigned int)),
+ "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < SPACE1_DIM1; i++) {
+ if (wdata[i].len != rdata[i].len) {
+ TestErrPrintf("%d: VL data length don't match!, wdata[%d].len=%d, rdata[%d].len=%d\n", __LINE__,
+ (int)i, (int)wdata[i].len, (int)i, (int)rdata[i].len);
+ continue;
+ } /* end if */
+ for (t1 = (hvl_t *)(wdata[i].p), t2 = (hvl_t *)(rdata[i].p), j = 0; j < rdata[i].len;
+ j++, t1++, t2++) {
+ if (t1->len != t2->len) {
+ TestErrPrintf("%d: VL data length don't match!, i=%d, j=%d, t1->len=%d, t2->len=%d\n",
+ __LINE__, (int)i, (int)j, (int)t1->len, (int)t2->len);
+ continue;
+ } /* end if */
+ for (k = 0; k < t2->len; k++) {
+ if (((unsigned int *)t1->p)[k] != ((unsigned int *)t2->p)[k]) {
+ TestErrPrintf("VL data values don't match!, t1->p[%d]=%d, t2->p[%d]=%d\n", (int)k,
+ (int)((unsigned int *)t1->p)[k], (int)k, (int)((unsigned int *)t2->p)[k]);
+ continue;
+ } /* end if */
+ } /* end for */
+ } /* end for */
+ } /* end for */
+
+ /* Reclaim all the (nested) VL data */
+ ret = H5Treclaim(tid2, sid1, xfer_pid, rdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Make certain the VL memory has been freed */
+ VERIFY(mem_used, 0, "H5Treclaim");
+
+ /* Reclaim the write VL data */
+ ret = H5Treclaim(tid2, sid1, H5P_DEFAULT, wdata);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close datatype */
+ ret = H5Tclose(tid2);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Close disk dataspace */
+ ret = H5Sclose(sid1);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ /* Close dataset transfer property list */
+ ret = H5Pclose(xfer_pid);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Close file */
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+} /* end rewrite_shorter_vltypes_vlen_vlen_atomic() */
+
+/****************************************************************
+**
+** test_vltypes_fill_value(): Test fill value for VL data.
+** One tests data space isn't allocated; another tests data
+** space is allocated.
+**
+****************************************************************/
+static void
+test_vltypes_fill_value(void)
+{
+ typedef struct dtype1_struct {
+ unsigned int gui;
+ unsigned int pgui;
+ const char *str_id;
+ const char *str_name;
+ const char *str_desc;
+ const char *str_orig;
+ const char *str_stat;
+ unsigned int ver;
+ double val;
+ double ma;
+ double mi;
+ const char *str_form;
+ const char *str_unit;
+ } dtype1_struct;
+
+ herr_t ret;
+ hid_t file_id;
+ hid_t dtype1_id = -1;
+ hid_t str_id = -1;
+ hid_t small_dspace_id; /* Dataspace ID for small datasets */
+ hid_t large_dspace_id; /* Dataspace ID for large datasets */
+ hid_t small_select_dspace_id; /* Dataspace ID for selection in small datasets */
+ hid_t large_select_dspace_id; /* Dataspace ID for selection in large datasets */
+ hid_t dset_dspace_id = -1; /* Dataspace ID for a particular dataset */
+ hid_t dset_select_dspace_id = -1; /* Dataspace ID for selection in a particular dataset */
+ hid_t scalar_dspace_id; /* Dataspace ID for scalar dataspace */
+ hid_t single_dspace_id; /* Dataspace ID for single element selection */
+ hsize_t single_offset[] = {2}; /* Offset of single element selection */
+ hsize_t single_block[] = {1}; /* Block size of single element selection */
+ hsize_t select_offset[] = {0}; /* Offset of non-contiguous element selection */
+ hsize_t select_stride[] = {2}; /* Stride size of non-contiguous element selection */
+ hsize_t small_select_count[] = {SPACE4_DIM_SMALL /
+ 2}; /* Count of small non-contiguous element selection */
+ hsize_t large_select_count[] = {SPACE4_DIM_LARGE /
+ 2}; /* Count of large non-contiguous element selection */
+ hsize_t select_block[] = {1}; /* Block size of non-contiguous element selection */
+ hid_t dcpl_id, xfer_pid;
+ hid_t dset_id;
+ hsize_t small_dims[] = {SPACE4_DIM_SMALL};
+ hsize_t large_dims[] = {SPACE4_DIM_LARGE};
+ size_t dset_elmts = 0; /* Number of elements in a particular dataset */
+ const dtype1_struct fill1 = {1, 2, "foobar", "", NULL, "\0", "dead",
+ 3, 4.0, 100.0, 1.0, "liquid", "meter"};
+ const dtype1_struct wdata = {3, 4, "", NULL, "\0", "foo", "two", 6, 8.0, 200.0, 2.0, "solid", "yard"};
+ dtype1_struct *rbuf = NULL; /* Buffer for reading data */
+ size_t mem_used = 0; /* Memory used during allocation */
+ H5D_layout_t layout; /* Dataset storage layout */
+ char dset_name1[64], dset_name2[64]; /* Dataset names */
+ unsigned i;
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Check fill value for VL data\n"));
+
+ /* Create a string datatype */
+ str_id = H5Tcopy(H5T_C_S1);
+ CHECK(str_id, FAIL, "H5Tcopy");
+ ret = H5Tset_size(str_id, H5T_VARIABLE);
+ CHECK(ret, FAIL, "H5Tset_size");
+
+ /* Create a compound data type */
+ dtype1_id = H5Tcreate(H5T_COMPOUND, sizeof(struct dtype1_struct));
+ CHECK(dtype1_id, FAIL, "H5Tcreate");
+
+ ret = H5Tinsert(dtype1_id, "guid", HOFFSET(struct dtype1_struct, gui), H5T_NATIVE_UINT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(dtype1_id, "pguid", HOFFSET(struct dtype1_struct, pgui), H5T_NATIVE_UINT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(dtype1_id, "str_id", HOFFSET(dtype1_struct, str_id), str_id);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(dtype1_id, "str_name", HOFFSET(dtype1_struct, str_name), str_id);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(dtype1_id, "str_desc", HOFFSET(dtype1_struct, str_desc), str_id);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(dtype1_id, "str_orig", HOFFSET(dtype1_struct, str_orig), str_id);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(dtype1_id, "str_stat", HOFFSET(dtype1_struct, str_stat), str_id);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(dtype1_id, "ver", HOFFSET(struct dtype1_struct, ver), H5T_NATIVE_UINT);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(dtype1_id, "val", HOFFSET(struct dtype1_struct, val), H5T_NATIVE_DOUBLE);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(dtype1_id, "ma", HOFFSET(struct dtype1_struct, ma), H5T_NATIVE_DOUBLE);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(dtype1_id, "mi", HOFFSET(struct dtype1_struct, mi), H5T_NATIVE_DOUBLE);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(dtype1_id, "str_form", HOFFSET(dtype1_struct, str_form), str_id);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ ret = H5Tinsert(dtype1_id, "str_unit", HOFFSET(dtype1_struct, str_unit), str_id);
+ CHECK(ret, FAIL, "H5Tinsert");
+
+ /* Close string datatype */
+ ret = H5Tclose(str_id);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Allocate space for the buffer to read data */
+ rbuf = (dtype1_struct *)HDmalloc(SPACE4_DIM_LARGE * sizeof(dtype1_struct));
+ CHECK_PTR(rbuf, "HDmalloc");
+
+ /* Create the small & large dataspaces to use */
+ small_dspace_id = H5Screate_simple(SPACE4_RANK, small_dims, NULL);
+ CHECK(small_dspace_id, FAIL, "H5Screate_simple");
+
+ large_dspace_id = H5Screate_simple(SPACE4_RANK, large_dims, NULL);
+ CHECK(large_dspace_id, FAIL, "H5Screate_simple");
+
+ /* Create small & large dataspaces w/non-contiguous selections */
+ small_select_dspace_id = H5Scopy(small_dspace_id);
+ CHECK(small_select_dspace_id, FAIL, "H5Scopy");
+
+ ret = H5Sselect_hyperslab(small_select_dspace_id, H5S_SELECT_SET, select_offset, select_stride,
+ small_select_count, select_block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ large_select_dspace_id = H5Scopy(large_dspace_id);
+ CHECK(large_select_dspace_id, FAIL, "H5Scopy");
+
+ ret = H5Sselect_hyperslab(large_select_dspace_id, H5S_SELECT_SET, select_offset, select_stride,
+ large_select_count, select_block);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Create a scalar dataspace */
+ scalar_dspace_id = H5Screate(H5S_SCALAR);
+ CHECK(scalar_dspace_id, FAIL, "H5Screate");
+
+ /* Create dataset create property list and set the fill value */
+ dcpl_id = H5Pcreate(H5P_DATASET_CREATE);
+ CHECK(dcpl_id, FAIL, "H5Pcreate");
+
+ ret = H5Pset_fill_value(dcpl_id, dtype1_id, &fill1);
+ CHECK(ret, FAIL, "H5Pset_fill_value");
+
+ /* Create the file */
+ file_id = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(file_id, FAIL, "H5Fcreate");
+
+ /* Create datasets with different storage layouts */
+ for (layout = H5D_COMPACT; layout <= H5D_CHUNKED; layout++) {
+ unsigned compress_loop; /* # of times to run loop, for testing compressed chunked dataset */
+ unsigned test_loop; /* Loop over datasets */
+
+#ifdef H5_HAVE_FILTER_DEFLATE
+ if (layout == H5D_CHUNKED)
+ compress_loop = 2;
+ else
+#endif /* H5_HAVE_FILTER_DEFLATE */
+ compress_loop = 1;
+
+ /* Loop over dataset operations */
+ for (test_loop = 0; test_loop < compress_loop; test_loop++) {
+ hid_t tmp_dcpl_id; /* Temporary copy of the dataset creation property list */
+
+ /* Make a copy of the dataset creation property list */
+ tmp_dcpl_id = H5Pcopy(dcpl_id);
+ CHECK(tmp_dcpl_id, FAIL, "H5Pcopy");
+
+ /* Layout specific actions */
+ switch (layout) {
+ case H5D_COMPACT:
+ HDstrcpy(dset_name1, "dataset1-compact");
+ HDstrcpy(dset_name2, "dataset2-compact");
+ dset_dspace_id = small_dspace_id;
+ ret = H5Pset_layout(tmp_dcpl_id, H5D_COMPACT);
+ CHECK(ret, FAIL, "H5Pset_layout");
+ break;
+
+ case H5D_CONTIGUOUS:
+ HDstrcpy(dset_name1, "dataset1-contig");
+ HDstrcpy(dset_name2, "dataset2-contig");
+ dset_dspace_id = large_dspace_id;
+ break;
+
+ case H5D_CHUNKED: {
+ hsize_t chunk_dims[1] = {SPACE4_DIM_LARGE / 4};
+
+ dset_dspace_id = large_dspace_id;
+ ret = H5Pset_chunk(tmp_dcpl_id, 1, chunk_dims);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+#ifdef H5_HAVE_FILTER_DEFLATE
+ if (test_loop == 1) {
+ HDstrcpy(dset_name1, "dataset1-chunked-compressed");
+ HDstrcpy(dset_name2, "dataset2-chunked-compressed");
+ ret = H5Pset_deflate(tmp_dcpl_id, 3);
+ CHECK(ret, FAIL, "H5Pset_deflate");
+ } /* end if */
+ else {
+#endif /* H5_HAVE_FILTER_DEFLATE */
+ HDstrcpy(dset_name1, "dataset1-chunked");
+ HDstrcpy(dset_name2, "dataset2-chunked");
+#ifdef H5_HAVE_FILTER_DEFLATE
+ } /* end else */
+#endif /* H5_HAVE_FILTER_DEFLATE */
+ } break;
+
+ case H5D_VIRTUAL:
+ HDassert(0 && "Invalid layout type!");
+ break;
+
+ case H5D_LAYOUT_ERROR:
+ case H5D_NLAYOUTS:
+ default:
+ HDassert(0 && "Unknown layout type!");
+ break;
+ } /* end switch */
+
+ /* Create first data set with default setting - no space is allocated */
+ dset_id = H5Dcreate2(file_id, dset_name1, dtype1_id, dset_dspace_id, H5P_DEFAULT, tmp_dcpl_id,
+ H5P_DEFAULT);
+ CHECK(dset_id, FAIL, "H5Dcreate2");
+
+ ret = H5Dclose(dset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Create a second data set with space allocated and fill value written */
+ ret = H5Pset_fill_time(tmp_dcpl_id, H5D_FILL_TIME_IFSET);
+ CHECK(ret, FAIL, "H5Pset_fill_time");
+
+ ret = H5Pset_alloc_time(tmp_dcpl_id, H5D_ALLOC_TIME_EARLY);
+ CHECK(ret, FAIL, "H5Pset_alloc_time");
+
+ dset_id = H5Dcreate2(file_id, dset_name2, dtype1_id, dset_dspace_id, H5P_DEFAULT, tmp_dcpl_id,
+ H5P_DEFAULT);
+ CHECK(dset_id, FAIL, "H5Dcreate2");
+
+ ret = H5Dclose(dset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close temporary DCPL */
+ ret = H5Pclose(tmp_dcpl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+ } /* end for */
+ } /* end for */
+
+ ret = H5Fclose(file_id);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ ret = H5Pclose(dcpl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ /* Change to the custom memory allocation routines for reading VL data */
+ xfer_pid = H5Pcreate(H5P_DATASET_XFER);
+ CHECK(xfer_pid, FAIL, "H5Pcreate");
+
+ ret = H5Pset_vlen_mem_manager(xfer_pid, test_vltypes_alloc_custom, &mem_used, test_vltypes_free_custom,
+ &mem_used);
+ CHECK(ret, FAIL, "H5Pset_vlen_mem_manager");
+
+ /* Open the file to check data set value */
+ file_id = H5Fopen(FILENAME, H5F_ACC_RDONLY, H5P_DEFAULT);
+ CHECK(file_id, FAIL, "H5Fopen");
+
+ /* Read empty datasets with different storage layouts */
+ for (layout = H5D_COMPACT; layout <= H5D_CHUNKED; layout++) {
+ unsigned compress_loop; /* # of times to run loop, for testing compressed chunked dataset */
+ unsigned test_loop; /* Loop over datasets */
+
+#ifdef H5_HAVE_FILTER_DEFLATE
+ if (layout == H5D_CHUNKED)
+ compress_loop = 2;
+ else
+#endif /* H5_HAVE_FILTER_DEFLATE */
+ compress_loop = 1;
+
+ /* Loop over dataset operations */
+ for (test_loop = 0; test_loop < compress_loop; test_loop++) {
+
+ /* Layout specific actions */
+ switch (layout) {
+ case H5D_COMPACT:
+ HDstrcpy(dset_name1, "dataset1-compact");
+ HDstrcpy(dset_name2, "dataset2-compact");
+ dset_dspace_id = small_dspace_id;
+ dset_select_dspace_id = small_select_dspace_id;
+ dset_elmts = SPACE4_DIM_SMALL;
+ break;
+
+ case H5D_CONTIGUOUS:
+ HDstrcpy(dset_name1, "dataset1-contig");
+ HDstrcpy(dset_name2, "dataset2-contig");
+ dset_dspace_id = large_dspace_id;
+ dset_select_dspace_id = large_select_dspace_id;
+ dset_elmts = SPACE4_DIM_LARGE;
+ break;
+
+ case H5D_CHUNKED:
+#ifdef H5_HAVE_FILTER_DEFLATE
+ if (test_loop == 1) {
+ HDstrcpy(dset_name1, "dataset1-chunked-compressed");
+ HDstrcpy(dset_name2, "dataset2-chunked-compressed");
+ } /* end if */
+ else {
+#endif /* H5_HAVE_FILTER_DEFLATE */
+ HDstrcpy(dset_name1, "dataset1-chunked");
+ HDstrcpy(dset_name2, "dataset2-chunked");
+#ifdef H5_HAVE_FILTER_DEFLATE
+ } /* end else */
+#endif /* H5_HAVE_FILTER_DEFLATE */
+ dset_dspace_id = large_dspace_id;
+ dset_select_dspace_id = large_select_dspace_id;
+ dset_elmts = SPACE4_DIM_LARGE;
+ break;
+
+ case H5D_VIRTUAL:
+ HDassert(0 && "Invalid layout type!");
+ break;
+
+ case H5D_LAYOUT_ERROR:
+ case H5D_NLAYOUTS:
+ default:
+ HDassert(0 && "Unknown layout type!");
+ break;
+ } /* end switch */
+
+ /* Open first data set */
+ dset_id = H5Dopen2(file_id, dset_name1, H5P_DEFAULT);
+ CHECK(dset_id, FAIL, "H5Dopen2");
+
+ /* Read in the entire 'empty' dataset of fill value */
+ ret = H5Dread(dset_id, dtype1_id, dset_dspace_id, dset_dspace_id, xfer_pid, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < dset_elmts; i++) {
+ if (HDstrcmp(rbuf[i].str_id, "foobar") != 0 || HDstrcmp(rbuf[i].str_name, "") != 0 ||
+ rbuf[i].str_desc || HDstrcmp(rbuf[i].str_orig, "\0") != 0 ||
+ HDstrcmp(rbuf[i].str_stat, "dead") != 0 || HDstrcmp(rbuf[i].str_form, "liquid") != 0 ||
+ HDstrcmp(rbuf[i].str_unit, "meter") != 0) {
+ TestErrPrintf("%d: VL data doesn't match!, index(i) = %d\n", __LINE__, (int)i);
+ continue;
+ } /* end if */
+ } /* end for */
+
+ /* Release the space */
+ ret = H5Treclaim(dtype1_id, dset_dspace_id, xfer_pid, rbuf);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Clear the read buffer */
+ HDmemset(rbuf, 0, dset_elmts * sizeof(dtype1_struct));
+
+ /* Read in non-contiguous selection from 'empty' dataset of fill value */
+ ret = H5Dread(dset_id, dtype1_id, dset_select_dspace_id, dset_select_dspace_id, xfer_pid, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < dset_elmts; i++) {
+ if ((i % 2) == select_offset[0]) {
+ if (HDstrcmp(rbuf[i].str_id, "foobar") != 0 || HDstrcmp(rbuf[i].str_name, "") != 0 ||
+ rbuf[i].str_desc || HDstrcmp(rbuf[i].str_orig, "\0") != 0 ||
+ HDstrcmp(rbuf[i].str_stat, "dead") != 0 ||
+ HDstrcmp(rbuf[i].str_form, "liquid") != 0 ||
+ HDstrcmp(rbuf[i].str_unit, "meter") != 0) {
+ TestErrPrintf("%d: VL data doesn't match!, index(i) = %d\n", __LINE__, (int)i);
+ continue;
+ } /* end if */
+ } /* end if */
+ else {
+ if (rbuf[i].str_id || rbuf[i].str_name || rbuf[i].str_desc || rbuf[i].str_orig ||
+ rbuf[i].str_stat || rbuf[i].str_form || rbuf[i].str_unit) {
+ TestErrPrintf("%d: VL data doesn't match!, index(i) = %d\n", __LINE__, (int)i);
+ continue;
+ } /* end if */
+ } /* end else */
+ } /* end for */
+
+ /* Release the space */
+ ret = H5Treclaim(dtype1_id, dset_dspace_id, xfer_pid, rbuf);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ ret = H5Dclose(dset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Open the second data set to check the value of data */
+ dset_id = H5Dopen2(file_id, dset_name2, H5P_DEFAULT);
+ CHECK(dset_id, FAIL, "H5Dopen2");
+
+ /* Read in the entire 'empty' dataset of fill value */
+ ret = H5Dread(dset_id, dtype1_id, dset_dspace_id, dset_dspace_id, xfer_pid, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < dset_elmts; i++) {
+ if (HDstrcmp(rbuf[i].str_id, "foobar") != 0 || HDstrcmp(rbuf[i].str_name, "") != 0 ||
+ rbuf[i].str_desc || HDstrcmp(rbuf[i].str_orig, "\0") != 0 ||
+ HDstrcmp(rbuf[i].str_stat, "dead") != 0 || HDstrcmp(rbuf[i].str_form, "liquid") != 0 ||
+ HDstrcmp(rbuf[i].str_unit, "meter") != 0) {
+ TestErrPrintf("%d: VL data doesn't match!, index(i)=%d\n", __LINE__, (int)i);
+ continue;
+ } /* end if */
+ } /* end for */
+
+ /* Release the space */
+ ret = H5Treclaim(dtype1_id, dset_dspace_id, xfer_pid, rbuf);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Clear the read buffer */
+ HDmemset(rbuf, 0, dset_elmts * sizeof(dtype1_struct));
+
+ /* Read in non-contiguous selection from 'empty' dataset of fill value */
+ ret = H5Dread(dset_id, dtype1_id, dset_select_dspace_id, dset_select_dspace_id, xfer_pid, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < dset_elmts; i++) {
+ if ((i % 2) == select_offset[0]) {
+ if (HDstrcmp(rbuf[i].str_id, "foobar") != 0 || HDstrcmp(rbuf[i].str_name, "") != 0 ||
+ rbuf[i].str_desc || HDstrcmp(rbuf[i].str_orig, "\0") != 0 ||
+ HDstrcmp(rbuf[i].str_stat, "dead") != 0 ||
+ HDstrcmp(rbuf[i].str_form, "liquid") != 0 ||
+ HDstrcmp(rbuf[i].str_unit, "meter") != 0) {
+ TestErrPrintf("%d: VL data doesn't match!, index(i) = %d\n", __LINE__, (int)i);
+ continue;
+ } /* end if */
+ } /* end if */
+ else {
+ if (rbuf[i].str_id || rbuf[i].str_name || rbuf[i].str_desc || rbuf[i].str_orig ||
+ rbuf[i].str_stat || rbuf[i].str_form || rbuf[i].str_unit) {
+ TestErrPrintf("%d: VL data doesn't match!, index(i) = %d\n", __LINE__, (int)i);
+ continue;
+ } /* end if */
+ } /* end else */
+ } /* end for */
+
+ /* Release the space */
+ ret = H5Treclaim(dtype1_id, dset_select_dspace_id, xfer_pid, rbuf);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ ret = H5Dclose(dset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+ } /* end for */
+ } /* end for */
+
+ ret = H5Fclose(file_id);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Open the file to check data set value */
+ file_id = H5Fopen(FILENAME, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(file_id, FAIL, "H5Fopen");
+
+ /* Write one element & fill values to datasets with different storage layouts */
+ for (layout = H5D_COMPACT; layout <= H5D_CHUNKED; layout++) {
+ unsigned compress_loop; /* # of times to run loop, for testing compressed chunked dataset */
+ unsigned test_loop; /* Loop over datasets */
+
+#ifdef H5_HAVE_FILTER_DEFLATE
+ if (layout == H5D_CHUNKED)
+ compress_loop = 2;
+ else
+#endif /* H5_HAVE_FILTER_DEFLATE */
+ compress_loop = 1;
+
+ /* Loop over dataset operations */
+ for (test_loop = 0; test_loop < compress_loop; test_loop++) {
+
+ /* Layout specific actions */
+ switch (layout) {
+ case H5D_COMPACT:
+ HDstrcpy(dset_name1, "dataset1-compact");
+ HDstrcpy(dset_name2, "dataset2-compact");
+ dset_dspace_id = small_dspace_id;
+ dset_select_dspace_id = small_select_dspace_id;
+ dset_elmts = SPACE4_DIM_SMALL;
+ break;
+
+ case H5D_CONTIGUOUS:
+ HDstrcpy(dset_name1, "dataset1-contig");
+ HDstrcpy(dset_name2, "dataset2-contig");
+ dset_dspace_id = large_dspace_id;
+ dset_select_dspace_id = large_select_dspace_id;
+ dset_elmts = SPACE4_DIM_LARGE;
+ break;
+
+ case H5D_CHUNKED:
+#ifdef H5_HAVE_FILTER_DEFLATE
+ if (test_loop == 1) {
+ HDstrcpy(dset_name1, "dataset1-chunked-compressed");
+ HDstrcpy(dset_name2, "dataset2-chunked-compressed");
+ } /* end if */
+ else {
+#endif /* H5_HAVE_FILTER_DEFLATE */
+ HDstrcpy(dset_name1, "dataset1-chunked");
+ HDstrcpy(dset_name2, "dataset2-chunked");
+#ifdef H5_HAVE_FILTER_DEFLATE
+ } /* end else */
+#endif /* H5_HAVE_FILTER_DEFLATE */
+ dset_dspace_id = large_dspace_id;
+ dset_select_dspace_id = large_select_dspace_id;
+ dset_elmts = SPACE4_DIM_LARGE;
+ break;
+
+ case H5D_VIRTUAL:
+ HDassert(0 && "Invalid layout type!");
+ break;
+
+ case H5D_LAYOUT_ERROR:
+ case H5D_NLAYOUTS:
+ default:
+ HDassert(0 && "Unknown layout type!");
+ break;
+ } /* end switch */
+
+ /* Copy the dataset's dataspace */
+ single_dspace_id = H5Scopy(dset_dspace_id);
+ CHECK(single_dspace_id, FAIL, "H5Scopy");
+
+ /* Set a single element in the dataspace */
+ ret = H5Sselect_hyperslab(single_dspace_id, H5S_SELECT_SET, single_offset, NULL, single_block,
+ NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Open first data set */
+ dset_id = H5Dopen2(file_id, dset_name1, H5P_DEFAULT);
+ CHECK(dset_id, FAIL, "H5Dopen2");
+
+ /* Write one element in the dataset */
+ ret = H5Dwrite(dset_id, dtype1_id, scalar_dspace_id, single_dspace_id, xfer_pid, &wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ ret = H5Dread(dset_id, dtype1_id, dset_dspace_id, dset_dspace_id, xfer_pid, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < dset_elmts; i++) {
+ if (i == single_offset[0]) {
+ if (HDstrcmp(rbuf[i].str_id, wdata.str_id) != 0 || rbuf[i].str_name ||
+ HDstrcmp(rbuf[i].str_desc, wdata.str_desc) != 0 ||
+ HDstrcmp(rbuf[i].str_orig, wdata.str_orig) != 0 ||
+ HDstrcmp(rbuf[i].str_stat, wdata.str_stat) != 0 ||
+ HDstrcmp(rbuf[i].str_form, wdata.str_form) != 0 ||
+ HDstrcmp(rbuf[i].str_unit, wdata.str_unit) != 0) {
+ TestErrPrintf("%d: VL data doesn't match!, index(i)=%d\n", __LINE__, (int)i);
+ continue;
+ } /* end if */
+ } /* end if */
+ else {
+ if (HDstrcmp(rbuf[i].str_id, "foobar") != 0 || HDstrcmp(rbuf[i].str_name, "") != 0 ||
+ rbuf[i].str_desc || HDstrcmp(rbuf[i].str_orig, "\0") != 0 ||
+ HDstrcmp(rbuf[i].str_stat, "dead") != 0 ||
+ HDstrcmp(rbuf[i].str_form, "liquid") != 0 ||
+ HDstrcmp(rbuf[i].str_unit, "meter") != 0) {
+ TestErrPrintf("%d: VL data doesn't match!, index(i)=%d\n", __LINE__, (int)i);
+ continue;
+ } /* end if */
+ } /* end if */
+ } /* end for */
+
+ /* Release the space */
+ ret = H5Treclaim(dtype1_id, dset_dspace_id, xfer_pid, rbuf);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Clear the read buffer */
+ HDmemset(rbuf, 0, dset_elmts * sizeof(dtype1_struct));
+
+ /* Read in non-contiguous selection from dataset */
+ ret = H5Dread(dset_id, dtype1_id, dset_select_dspace_id, dset_select_dspace_id, xfer_pid, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < dset_elmts; i++) {
+ if (i == single_offset[0]) {
+ if (HDstrcmp(rbuf[i].str_id, wdata.str_id) != 0 || rbuf[i].str_name ||
+ HDstrcmp(rbuf[i].str_desc, wdata.str_desc) != 0 ||
+ HDstrcmp(rbuf[i].str_orig, wdata.str_orig) != 0 ||
+ HDstrcmp(rbuf[i].str_stat, wdata.str_stat) != 0 ||
+ HDstrcmp(rbuf[i].str_form, wdata.str_form) != 0 ||
+ HDstrcmp(rbuf[i].str_unit, wdata.str_unit) != 0) {
+ TestErrPrintf("%d: VL data doesn't match!, index(i)=%d\n", __LINE__, (int)i);
+ continue;
+ } /* end if */
+ } /* end if */
+ else {
+ if ((i % 2) == select_offset[0]) {
+ if (HDstrcmp(rbuf[i].str_id, "foobar") != 0 || HDstrcmp(rbuf[i].str_name, "") != 0 ||
+ rbuf[i].str_desc || HDstrcmp(rbuf[i].str_orig, "\0") != 0 ||
+ HDstrcmp(rbuf[i].str_stat, "dead") != 0 ||
+ HDstrcmp(rbuf[i].str_form, "liquid") != 0 ||
+ HDstrcmp(rbuf[i].str_unit, "meter") != 0) {
+ TestErrPrintf("%d: VL data doesn't match!, index(i) = %d\n", __LINE__, (int)i);
+ continue;
+ } /* end if */
+ } /* end if */
+ else {
+ if (rbuf[i].str_id || rbuf[i].str_name || rbuf[i].str_desc || rbuf[i].str_orig ||
+ rbuf[i].str_stat || rbuf[i].str_form || rbuf[i].str_unit) {
+ TestErrPrintf("%d: VL data doesn't match!, index(i) = %d\n", __LINE__, (int)i);
+ continue;
+ } /* end if */
+ } /* end else */
+ } /* end else */
+ } /* end for */
+
+ /* Release the space */
+ ret = H5Treclaim(dtype1_id, dset_select_dspace_id, xfer_pid, rbuf);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ ret = H5Dclose(dset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Open the second data set to check the value of data */
+ dset_id = H5Dopen2(file_id, dset_name2, H5P_DEFAULT);
+ CHECK(dset_id, FAIL, "H5Dopen2");
+
+ /* Write one element in the dataset */
+ ret = H5Dwrite(dset_id, dtype1_id, scalar_dspace_id, single_dspace_id, xfer_pid, &wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ ret = H5Dread(dset_id, dtype1_id, dset_dspace_id, dset_dspace_id, xfer_pid, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < dset_elmts; i++) {
+ if (i == single_offset[0]) {
+ if (HDstrcmp(rbuf[i].str_id, wdata.str_id) != 0 || rbuf[i].str_name ||
+ HDstrcmp(rbuf[i].str_desc, wdata.str_desc) != 0 ||
+ HDstrcmp(rbuf[i].str_orig, wdata.str_orig) != 0 ||
+ HDstrcmp(rbuf[i].str_stat, wdata.str_stat) != 0 ||
+ HDstrcmp(rbuf[i].str_form, wdata.str_form) != 0 ||
+ HDstrcmp(rbuf[i].str_unit, wdata.str_unit) != 0) {
+ TestErrPrintf("%d: VL data doesn't match!, index(i)=%d\n", __LINE__, (int)i);
+ continue;
+ } /* end if */
+ } /* end if */
+ else {
+ if (HDstrcmp(rbuf[i].str_id, "foobar") != 0 || HDstrcmp(rbuf[i].str_name, "") != 0 ||
+ rbuf[i].str_desc || HDstrcmp(rbuf[i].str_orig, "\0") != 0 ||
+ HDstrcmp(rbuf[i].str_stat, "dead") != 0 ||
+ HDstrcmp(rbuf[i].str_form, "liquid") != 0 ||
+ HDstrcmp(rbuf[i].str_unit, "meter") != 0) {
+ TestErrPrintf("%d: VL data doesn't match!, index(i)=%d\n", __LINE__, (int)i);
+ continue;
+ } /* end if */
+ } /* end if */
+ } /* end for */
+
+ /* Release the space */
+ ret = H5Treclaim(dtype1_id, dset_dspace_id, xfer_pid, rbuf);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ /* Clear the read buffer */
+ HDmemset(rbuf, 0, dset_elmts * sizeof(dtype1_struct));
+
+ /* Read in non-contiguous selection from dataset */
+ ret = H5Dread(dset_id, dtype1_id, dset_select_dspace_id, dset_select_dspace_id, xfer_pid, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read in */
+ for (i = 0; i < dset_elmts; i++) {
+ if (i == single_offset[0]) {
+ if (HDstrcmp(rbuf[i].str_id, wdata.str_id) != 0 || rbuf[i].str_name ||
+ HDstrcmp(rbuf[i].str_desc, wdata.str_desc) != 0 ||
+ HDstrcmp(rbuf[i].str_orig, wdata.str_orig) != 0 ||
+ HDstrcmp(rbuf[i].str_stat, wdata.str_stat) != 0 ||
+ HDstrcmp(rbuf[i].str_form, wdata.str_form) != 0 ||
+ HDstrcmp(rbuf[i].str_unit, wdata.str_unit) != 0) {
+ TestErrPrintf("%d: VL data doesn't match!, index(i)=%d\n", __LINE__, (int)i);
+ continue;
+ } /* end if */
+ } /* end if */
+ else {
+ if ((i % 2) == select_offset[0]) {
+ if (HDstrcmp(rbuf[i].str_id, "foobar") != 0 || HDstrcmp(rbuf[i].str_name, "") != 0 ||
+ rbuf[i].str_desc || HDstrcmp(rbuf[i].str_orig, "\0") != 0 ||
+ HDstrcmp(rbuf[i].str_stat, "dead") != 0 ||
+ HDstrcmp(rbuf[i].str_form, "liquid") != 0 ||
+ HDstrcmp(rbuf[i].str_unit, "meter") != 0) {
+ TestErrPrintf("%d: VL data doesn't match!, index(i) = %d\n", __LINE__, (int)i);
+ continue;
+ } /* end if */
+ } /* end if */
+ else {
+ if (rbuf[i].str_id || rbuf[i].str_name || rbuf[i].str_desc || rbuf[i].str_orig ||
+ rbuf[i].str_stat || rbuf[i].str_form || rbuf[i].str_unit) {
+ TestErrPrintf("%d: VL data doesn't match!, index(i) = %d\n", __LINE__, (int)i);
+ continue;
+ } /* end if */
+ } /* end else */
+ } /* end else */
+ } /* end for */
+
+ /* Release the space */
+ ret = H5Treclaim(dtype1_id, dset_select_dspace_id, xfer_pid, rbuf);
+ CHECK(ret, FAIL, "H5Treclaim");
+
+ ret = H5Dclose(dset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close the dataspace for the writes */
+ ret = H5Sclose(single_dspace_id);
+ CHECK(ret, FAIL, "H5Sclose");
+ } /* end for */
+ } /* end for */
+
+ ret = H5Fclose(file_id);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Clean up rest of IDs */
+ ret = H5Pclose(xfer_pid);
+ CHECK(ret, FAIL, "H5Pclose");
+
+ ret = H5Sclose(small_dspace_id);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(large_dspace_id);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(small_select_dspace_id);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(large_select_dspace_id);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Sclose(scalar_dspace_id);
+ CHECK(ret, FAIL, "H5Sclose");
+
+ ret = H5Tclose(dtype1_id);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ /* Release buffer */
+ HDfree(rbuf);
+} /* end test_vltypes_fill_value() */
+
+/****************************************************************
+**
+** test_vltypes(): Main VL datatype testing routine.
+**
+****************************************************************/
+void
+test_vltypes(void)
+{
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing Variable-Length Datatypes\n"));
+
+ /* These next tests use the same file */
+ test_vltypes_dataset_create(); /* Check dataset of VL when fill value
+ * won't be rewritten to it.*/
+ test_vltypes_funcs(); /* Test functions with VL types */
+ test_vltypes_vlen_atomic(); /* Test VL atomic datatypes */
+ rewrite_vltypes_vlen_atomic(); /* Check VL memory leak */
+ test_vltypes_vlen_compound(); /* Test VL compound datatypes */
+ rewrite_vltypes_vlen_compound(); /* Check VL memory leak */
+ test_vltypes_compound_vlen_atomic(); /* Test compound datatypes with VL atomic components */
+ rewrite_vltypes_compound_vlen_atomic(); /* Check VL memory leak */
+ test_vltypes_vlen_vlen_atomic(); /* Test VL datatype with VL atomic components */
+ rewrite_longer_vltypes_vlen_vlen_atomic(); /*overwrite with VL data of longer sequence*/
+ rewrite_shorter_vltypes_vlen_vlen_atomic(); /*overwrite with VL data of shorted sequence*/
+ test_vltypes_compound_vlen_vlen(); /* Test compound datatypes with VL atomic components */
+ test_vltypes_compound_vlstr(); /* Test data rewritten of nested VL data */
+ test_vltypes_fill_value(); /* Test fill value for VL data */
+} /* test_vltypes() */
+
+/*-------------------------------------------------------------------------
+ * Function: cleanup_vltypes
+ *
+ * Purpose: Cleanup temporary test files
+ *
+ * Return: none
+ *
+ * Programmer: Quincey Koziol
+ * June 8, 1999
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+cleanup_vltypes(void)
+{
+ H5Fdelete(FILENAME, H5P_DEFAULT);
+}
diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt
index c3365b7..fe52cd3 100644
--- a/test/CMakeLists.txt
+++ b/test/CMakeLists.txt
@@ -882,3 +882,7 @@ endif ()
if (HDF5_TEST_SERIAL)
include (CMakeTests.cmake)
endif ()
+
+if (HDF5_TEST_API)
+ add_subdirectory (API)
+endif ()
diff --git a/test/h5test.c b/test/h5test.c
index 1797df9..856de4b 100644
--- a/test/h5test.c
+++ b/test/h5test.c
@@ -115,6 +115,13 @@ const char *LIBVER_NAMES[] = {"earliest", /* H5F_LIBVER_EARLIEST = 0 */
/* Previous error reporting function */
static H5E_auto2_t err_func = NULL;
+/* Global variables for testing */
+size_t n_tests_run_g = 0;
+size_t n_tests_passed_g = 0;
+size_t n_tests_failed_g = 0;
+size_t n_tests_skipped_g = 0;
+uint64_t vol_cap_flags_g = H5VL_CAP_FLAG_NONE;
+
static herr_t h5_errors(hid_t estack, void *client_data);
static char *h5_fixname_real(const char *base_name, hid_t fapl, const char *_suffix, char *fullname,
size_t size, hbool_t nest_printf, hbool_t subst_for_superblock);
diff --git a/test/h5test.h b/test/h5test.h
index ea7ab4d..b2c2cda 100644
--- a/test/h5test.h
+++ b/test/h5test.h
@@ -106,21 +106,25 @@ H5TEST_DLLVAR MPI_Info h5_io_info_g; /* MPI INFO object for IO */
do { \
HDprintf("Testing %-62s", WHAT); \
HDfflush(stdout); \
+ n_tests_run_g++; \
} while (0)
#define TESTING_2(WHAT) \
do { \
HDprintf(" Testing %-60s", WHAT); \
HDfflush(stdout); \
+ n_tests_run_g++; \
} while (0)
#define PASSED() \
do { \
HDputs(" PASSED"); \
HDfflush(stdout); \
+ n_tests_passed_g++; \
} while (0)
#define H5_FAILED() \
do { \
HDputs("*FAILED*"); \
HDfflush(stdout); \
+ n_tests_failed_g++; \
} while (0)
#define H5_WARNING() \
do { \
@@ -131,6 +135,7 @@ H5TEST_DLLVAR MPI_Info h5_io_info_g; /* MPI INFO object for IO */
do { \
HDputs(" -SKIP-"); \
HDfflush(stdout); \
+ n_tests_skipped_g++; \
} while (0)
#define PUTS_ERROR(s) \
do { \
@@ -164,6 +169,66 @@ H5TEST_DLLVAR MPI_Info h5_io_info_g; /* MPI INFO object for IO */
goto error; \
} while (0)
+/*
+ * Testing macros used for multi-part tests.
+ */
+#define TESTING_MULTIPART(WHAT) \
+ do { \
+ HDprintf("Testing %-62s", WHAT); \
+ HDputs(""); \
+ HDfflush(stdout); \
+ } while (0)
+
+/*
+ * Begin and end an entire section of multi-part tests. By placing all the
+ * parts of a test between these macros, skipping to the 'error' cleanup
+ * section of a test is deferred until all parts have finished.
+ */
+#define BEGIN_MULTIPART \
+ { \
+ int part_nerrors = 0;
+
+#define END_MULTIPART \
+ if (part_nerrors > 0) \
+ goto error; \
+ }
+
+/*
+ * Begin, end and handle errors within a single part of a multi-part test.
+ * The PART_END macro creates a goto label based on the given "part name".
+ * When a failure occurs in the current part, the PART_ERROR macro uses
+ * this label to skip to the next part of the multi-part test. The PART_ERROR
+ * macro also increments the error count so that the END_MULTIPART macro
+ * knows to skip to the test's 'error' label once all test parts have finished.
+ */
+#define PART_BEGIN(part_name) {
+#define PART_END(part_name) \
+ } \
+ part_##part_name##_end:
+#define PART_ERROR(part_name) \
+ do { \
+ n_tests_failed_g++; \
+ part_nerrors++; \
+ goto part_##part_name##_end; \
+ } while (0)
+#define PART_TEST_ERROR(part_name) \
+ do { \
+ H5_FAILED(); \
+ AT(); \
+ part_nerrors++; \
+ goto part_##part_name##_end; \
+ } while (0)
+
+/*
+ * Simply skips to the goto label for this test part and moves on to the
+ * next test part. Useful for when a test part needs to be skipped for
+ * some reason or is currently unimplemented and empty.
+ */
+#define PART_EMPTY(part_name) \
+ do { \
+ goto part_##part_name##_end; \
+ } while (0)
+
/* Number of seconds to wait before killing a test (requires alarm(2)) */
#define H5_ALARM_SEC 1200 /* default is 20 minutes */
@@ -285,7 +350,12 @@ H5TEST_DLL char *getenv_all(MPI_Comm comm, int root, const char *name);
#endif
/* Extern global variables */
-H5TEST_DLLVAR int TestVerbosity;
+H5TEST_DLLVAR int TestVerbosity;
+H5TEST_DLLVAR size_t n_tests_run_g;
+H5TEST_DLLVAR size_t n_tests_passed_g;
+H5TEST_DLLVAR size_t n_tests_failed_g;
+H5TEST_DLLVAR size_t n_tests_skipped_g;
+H5TEST_DLLVAR uint64_t vol_cap_flags_g;
H5TEST_DLL void h5_send_message(const char *file, const char *arg1, const char *arg2);
H5TEST_DLL herr_t h5_wait_message(const char *file);
diff --git a/test/vol.c b/test/vol.c
index 29bbb06..6bcae6b 100644
--- a/test/vol.c
+++ b/test/vol.c
@@ -2076,11 +2076,12 @@ test_async_vol_props(void)
hid_t fapl_id = H5I_INVALID_HID;
hid_t vol_id = H5I_INVALID_HID;
H5VL_pass_through_info_t passthru_info;
- uint64_t cap_flags = H5VL_CAP_FLAG_NONE;
char *conn_env_str = NULL;
TESTING("Async VOL props");
+ vol_cap_flags_g = H5VL_CAP_FLAG_NONE;
+
/* Retrieve the file access property for testing */
fapl_id = h5_fileaccess();
@@ -2104,11 +2105,11 @@ test_async_vol_props(void)
/* Test query w/default VOL, which should indicate no async, since native connector
* doesn't support async.
*/
- if (H5Pget_vol_cap_flags(fapl_id, &cap_flags) < 0)
+ if (H5Pget_vol_cap_flags(fapl_id, &vol_cap_flags_g) < 0)
FAIL_STACK_ERROR;
- if ((cap_flags & H5VL_CAP_FLAG_ASYNC) > 0)
+ if ((vol_cap_flags_g & H5VL_CAP_FLAG_ASYNC) > 0)
TEST_ERROR;
- if ((cap_flags & H5VL_CAP_FLAG_NATIVE_FILES) == 0)
+ if ((vol_cap_flags_g & H5VL_CAP_FLAG_NATIVE_FILES) == 0)
TEST_ERROR;
/* Close FAPL */
@@ -2129,12 +2130,12 @@ test_async_vol_props(void)
fapl_id = h5_fileaccess();
/* Test query w/fake async VOL, which should succeed */
- cap_flags = H5VL_CAP_FLAG_NONE;
- if (H5Pget_vol_cap_flags(fapl_id, &cap_flags) < 0)
+ vol_cap_flags_g = H5VL_CAP_FLAG_NONE;
+ if (H5Pget_vol_cap_flags(fapl_id, &vol_cap_flags_g) < 0)
FAIL_STACK_ERROR;
- if ((cap_flags & H5VL_CAP_FLAG_ASYNC) == 0)
+ if ((vol_cap_flags_g & H5VL_CAP_FLAG_ASYNC) == 0)
TEST_ERROR;
- if ((cap_flags & H5VL_CAP_FLAG_NATIVE_FILES) > 0)
+ if ((vol_cap_flags_g & H5VL_CAP_FLAG_NATIVE_FILES) > 0)
TEST_ERROR;
/* Reset environment variable & re-init default connector */
@@ -2155,12 +2156,12 @@ test_async_vol_props(void)
FAIL_STACK_ERROR;
/* Test query w/fake async VOL, which should succeed */
- cap_flags = H5VL_CAP_FLAG_NONE;
- if (H5Pget_vol_cap_flags(fapl_id, &cap_flags) < 0)
+ vol_cap_flags_g = H5VL_CAP_FLAG_NONE;
+ if (H5Pget_vol_cap_flags(fapl_id, &vol_cap_flags_g) < 0)
FAIL_STACK_ERROR;
- if ((cap_flags & H5VL_CAP_FLAG_ASYNC) == 0)
+ if ((vol_cap_flags_g & H5VL_CAP_FLAG_ASYNC) == 0)
TEST_ERROR;
- if ((cap_flags & H5VL_CAP_FLAG_NATIVE_FILES) > 0)
+ if ((vol_cap_flags_g & H5VL_CAP_FLAG_NATIVE_FILES) > 0)
TEST_ERROR;
/* Stack the [internal] passthrough VOL connector on top of the fake async connector */
@@ -2170,12 +2171,12 @@ test_async_vol_props(void)
FAIL_STACK_ERROR;
/* Test query w/passthru -> fake async VOL, which should succeed */
- cap_flags = H5VL_CAP_FLAG_NONE;
- if (H5Pget_vol_cap_flags(fapl_id, &cap_flags) < 0)
+ vol_cap_flags_g = H5VL_CAP_FLAG_NONE;
+ if (H5Pget_vol_cap_flags(fapl_id, &vol_cap_flags_g) < 0)
FAIL_STACK_ERROR;
- if ((cap_flags & H5VL_CAP_FLAG_ASYNC) == 0)
+ if ((vol_cap_flags_g & H5VL_CAP_FLAG_ASYNC) == 0)
TEST_ERROR;
- if ((cap_flags & H5VL_CAP_FLAG_NATIVE_FILES) > 0)
+ if ((vol_cap_flags_g & H5VL_CAP_FLAG_NATIVE_FILES) > 0)
TEST_ERROR;
/* Unregister the fake async VOL ID */
@@ -2224,14 +2225,15 @@ error:
static herr_t
test_vol_cap_flags(void)
{
- hid_t fapl_id = H5I_INVALID_HID;
- hid_t vol_id = H5I_INVALID_HID;
- uint64_t vol_cap_flags = H5VL_CAP_FLAG_NONE;
- char *vol_env = NULL;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t vol_id = H5I_INVALID_HID;
+ char *vol_env = NULL;
H5VL_pass_through_info_t passthru_info;
TESTING("VOL capability flags");
+ vol_cap_flags_g = H5VL_CAP_FLAG_NONE;
+
/* Register a fake VOL */
if ((vol_id = H5VLregister_connector(&fake_vol_g, H5P_DEFAULT)) < 0)
TEST_ERROR;
@@ -2243,13 +2245,13 @@ test_vol_cap_flags(void)
TEST_ERROR;
/* Verify the correctness of the VOL capacity flags */
- if (H5Pget_vol_cap_flags(fapl_id, &vol_cap_flags) < 0)
+ if (H5Pget_vol_cap_flags(fapl_id, &vol_cap_flags_g) < 0)
TEST_ERROR;
- if (!(vol_cap_flags & H5VL_CAP_FLAG_FILE_BASIC))
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC))
TEST_ERROR;
- if (vol_cap_flags & H5VL_CAP_FLAG_ATTR_BASIC)
+ if (vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)
TEST_ERROR;
/* If using the native VOL by default, check flags again with H5P_DEFAULT */
@@ -2263,12 +2265,12 @@ test_vol_cap_flags(void)
if (NULL == (cls = H5I_object(connector_id)))
TEST_ERROR;
- vol_cap_flags = H5VL_CAP_FLAG_NONE;
+ vol_cap_flags_g = H5VL_CAP_FLAG_NONE;
- if (H5Pget_vol_cap_flags(H5P_DEFAULT, &vol_cap_flags) < 0)
+ if (H5Pget_vol_cap_flags(H5P_DEFAULT, &vol_cap_flags_g) < 0)
TEST_ERROR;
- if (vol_cap_flags != cls->cap_flags)
+ if (vol_cap_flags_g != cls->cap_flags)
TEST_ERROR;
if (H5VLclose(connector_id) < 0)
@@ -2283,15 +2285,15 @@ test_vol_cap_flags(void)
FAIL_STACK_ERROR;
/* Verify the correctness of the VOL capacity flags */
- vol_cap_flags = H5VL_CAP_FLAG_NONE;
+ vol_cap_flags_g = H5VL_CAP_FLAG_NONE;
- if (H5Pget_vol_cap_flags(fapl_id, &vol_cap_flags) < 0)
+ if (H5Pget_vol_cap_flags(fapl_id, &vol_cap_flags_g) < 0)
TEST_ERROR;
- if (!(vol_cap_flags & H5VL_CAP_FLAG_FILE_BASIC))
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC))
TEST_ERROR;
- if (vol_cap_flags & H5VL_CAP_FLAG_ATTR_BASIC)
+ if (vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)
TEST_ERROR;
if (H5Pclose(fapl_id) < 0)
diff --git a/testpar/API/CMakeLists.txt b/testpar/API/CMakeLists.txt
new file mode 100644
index 0000000..e907078
--- /dev/null
+++ b/testpar/API/CMakeLists.txt
@@ -0,0 +1,294 @@
+# Copyright by The HDF Group.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the COPYING file, which can be found at the root of the source code
+# distribution tree, or in https://www.hdfgroup.org/licenses.
+# If you do not have access to either file, you may request a copy from
+# help@hdfgroup.org.
+#
+
+cmake_minimum_required (VERSION 3.18)
+project (HDF5_TEST_PAR_API C)
+
+#------------------------------------------------------------------------------
+# Define for API tests
+#------------------------------------------------------------------------------
+
+set (HDF5_API_TESTS
+ attribute
+ dataset
+ datatype
+ file
+ group
+ link
+ misc
+ object
+)
+
+if (HDF5_TEST_API_ENABLE_ASYNC)
+ set (HDF5_API_TESTS
+ ${HDF5_API_TESTS}
+ async
+ )
+endif ()
+
+# Ported HDF5 tests
+set (HDF5_API_PAR_TESTS_EXTRA
+ t_bigio
+ t_pshutdown
+ t_shapesame
+ testphdf5
+)
+
+# List of files generated by the HDF5 API tests which
+# should be cleaned up in case the test failed to remove
+# them
+set (HDF5_API_PAR_TESTS_FILES
+ H5_api_test_parallel.h5
+ H5_api_async_test_parallel.h5
+ H5_api_async_test_parallel_0.h5
+ H5_api_async_test_parallel_1.h5
+ H5_api_async_test_parallel_2.h5
+ H5_api_async_test_parallel_3.h5
+ H5_api_async_test_parallel_4.h5
+ test_file_parallel.h5
+ split_comm_file.h5
+)
+
+#-----------------------------------------------------------------------------
+# Build the main API test executable
+#-----------------------------------------------------------------------------
+foreach (api_test ${HDF5_API_TESTS})
+ set (HDF5_API_PAR_TEST_SRCS
+ ${HDF5_API_PAR_TEST_SRCS}
+ ${CMAKE_CURRENT_SOURCE_DIR}/H5_api_${api_test}_test_parallel.c
+ )
+endforeach ()
+
+set (HDF5_API_PAR_TEST_SRCS
+ ${HDF5_API_PAR_TEST_SRCS}
+ ${CMAKE_CURRENT_SOURCE_DIR}/H5_api_test_parallel.c
+ ${HDF5_TEST_API_SRC_DIR}/H5_api_test_util.c
+)
+
+add_executable (h5_api_test_parallel ${HDF5_API_PAR_TEST_SRCS})
+target_include_directories (
+ h5_api_test_parallel
+ PRIVATE
+ "${HDF5_SRC_INCLUDE_DIRS}"
+ "${HDF5_TEST_PAR_DIR}"
+ "${HDF5_TEST_API_SRC_DIR}"
+ "${HDF5_TEST_API_PAR_SRC_DIR}"
+ "${HDF5_SRC_BINARY_DIR}"
+ "${HDF5_TEST_BINARY_DIR}"
+ "${HDF5_TEST_API_SRC_DIR}"
+ "$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>"
+)
+target_compile_options (
+ h5_api_test_parallel
+ PRIVATE
+ "${HDF5_CMAKE_C_FLAGS}"
+)
+target_compile_definitions (
+ h5_api_test_parallel
+ PRIVATE
+ $<$<CONFIG:Developer>:${HDF5_DEVELOPER_DEFS}>
+)
+if (NOT BUILD_SHARED_LIBS)
+ TARGET_C_PROPERTIES (h5_api_test_parallel STATIC)
+ target_link_libraries (
+ h5_api_test_parallel
+ PRIVATE
+ ${HDF5_TEST_LIB_TARGET}
+ ${HDF5_LIB_TARGET}
+ "$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:MPI::MPI_C>"
+ )
+else ()
+ TARGET_C_PROPERTIES (h5_api_test_parallel SHARED)
+ target_link_libraries (
+ h5_api_test_parallel
+ PRIVATE
+ ${HDF5_TEST_LIBSH_TARGET}
+ ${HDF5_LIBSH_TARGET}
+ "$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:MPI::MPI_C>"
+ )
+endif ()
+set_target_properties (
+ h5_api_test_parallel
+ PROPERTIES
+ FOLDER test/par/API
+)
+# Add Target to clang-format
+if (HDF5_ENABLE_FORMATTERS)
+ clang_format (HDF5_TEST_h5_api_test_parallel_FORMAT h5_api_test_parallel)
+endif ()
+
+#-----------------------------------------------------------------------------
+# Build the ported HDF5 test executables
+#-----------------------------------------------------------------------------
+foreach (api_test_extra ${HDF5_API_PAR_TESTS_EXTRA})
+ unset (HDF5_API_PAR_TEST_EXTRA_SRCS)
+
+ set (HDF5_API_PAR_TEST_EXTRA_SRCS
+ ${HDF5_API_PAR_TEST_EXTRA_SRCS}
+ ${CMAKE_CURRENT_SOURCE_DIR}/${api_test_extra}.c
+ )
+
+ if (${api_test_extra} STREQUAL "testphdf5")
+ set (HDF5_API_PAR_TEST_EXTRA_SRCS
+ ${HDF5_API_PAR_TEST_EXTRA_SRCS}
+ ${CMAKE_CURRENT_SOURCE_DIR}/t_ph5basic.c
+ ${CMAKE_CURRENT_SOURCE_DIR}/t_file.c
+ ${CMAKE_CURRENT_SOURCE_DIR}/t_dset.c
+ ${CMAKE_CURRENT_SOURCE_DIR}/t_mdset.c
+ ${CMAKE_CURRENT_SOURCE_DIR}/t_coll_chunk.c
+ ${CMAKE_CURRENT_SOURCE_DIR}/t_span_tree.c
+ ${CMAKE_CURRENT_SOURCE_DIR}/t_prop.c
+ ${CMAKE_CURRENT_SOURCE_DIR}/t_file_image.c
+ ${CMAKE_CURRENT_SOURCE_DIR}/t_coll_md_read.c
+ ${CMAKE_CURRENT_SOURCE_DIR}/t_chunk_alloc.c
+ ${CMAKE_CURRENT_SOURCE_DIR}/t_filter_read.c
+ )
+ endif ()
+
+ add_executable (h5_api_test_parallel_${api_test_extra} ${HDF5_API_PAR_TEST_EXTRA_SRCS})
+ target_include_directories (
+ h5_api_test_parallel_${api_test_extra}
+ PRIVATE
+ "${HDF5_SRC_INCLUDE_DIRS}"
+ "${HDF5_TEST_PAR_DIR}"
+ "${HDF5_TEST_API_SRC_DIR}"
+ "${HDF5_TEST_API_PAR_SRC_DIR}"
+ "${HDF5_SRC_BINARY_DIR}"
+ "${HDF5_TEST_BINARY_DIR}"
+ "$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>"
+ )
+ target_compile_options (
+ h5_api_test_parallel_${api_test_extra}
+ PRIVATE
+ "${HDF5_CMAKE_C_FLAGS}"
+ )
+ target_compile_definitions (
+ h5_api_test_parallel_${api_test_extra}
+ PRIVATE
+ $<$<CONFIG:Developer>:${HDF5_DEVELOPER_DEFS}>
+ )
+ if (NOT BUILD_SHARED_LIBS)
+ TARGET_C_PROPERTIES (h5_api_test_parallel_${api_test_extra} STATIC)
+ target_link_libraries (
+ h5_api_test_parallel_${api_test_extra}
+ PRIVATE
+ ${HDF5_TEST_LIB_TARGET}
+ ${HDF5_LIB_TARGET}
+ "$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:MPI::MPI_C>"
+ )
+ else ()
+ TARGET_C_PROPERTIES (h5_api_test_parallel_${api_test_extra} SHARED)
+ target_link_libraries (
+ h5_api_test_parallel_${api_test_extra}
+ PRIVATE
+ ${HDF5_TEST_LIBSH_TARGET}
+ ${HDF5_LIBSH_TARGET}
+ "$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:MPI::MPI_C>"
+ )
+ endif ()
+ set_target_properties (
+ h5_api_test_parallel_${api_test_extra}
+ PROPERTIES
+ FOLDER test/par/API
+ )
+ # Add Target to clang-format
+ if (HDF5_ENABLE_FORMATTERS)
+ clang_format (HDF5_TEST_h5_api_test_parallel_${api_test_extra}_FORMAT h5_api_test_parallel_${api_test_extra})
+ endif ()
+endforeach ()
+
+#-----------------------------------------------------------------------------
+# Add tests if HDF5 parallel testing is enabled
+#-----------------------------------------------------------------------------
+if (HDF5_TEST_PARALLEL)
+ if (HDF5_TEST_API_ENABLE_DRIVER)
+ if ("${HDF5_TEST_API_SERVER}" STREQUAL "")
+ message (FATAL_ERROR "Please set HDF5_TEST_API_SERVER to point to a server executable for the test driver program.")
+ endif ()
+
+ # Driver options
+ if (HDF5_TEST_API_SERVER_ALLOW_ERRORS)
+ set (HDF5_TEST_API_DRIVER_EXTRA_FLAGS --allow-server-errors)
+ endif ()
+ if (HDF5_TEST_API_CLIENT_HELPER)
+ set (HDF5_TEST_API_DRIVER_EXTRA_FLAGS ${HDF5_TEST_API_DRIVER_EXTRA_FLAGS}
+ --client-helper ${HDF5_TEST_API_CLIENT_HELPER}
+ )
+ endif ()
+ if (HDF5_TEST_API_CLIENT_INIT)
+ set (HDF5_TEST_API_DRIVER_EXTRA_FLAGS ${HDF5_TEST_API_DRIVER_EXTRA_FLAGS}
+ --client-init ${HDF5_TEST_API_CLIENT_INIT}
+ )
+ endif ()
+
+ set(last_api_test "")
+ foreach (api_test ${HDF5_API_TESTS})
+ add_test (
+ NAME "h5_api_test_parallel_${api_test}"
+ COMMAND $<TARGET_FILE:h5_api_test_driver>
+ --server ${HDF5_TEST_API_SERVER}
+ --client $<TARGET_FILE:h5_api_test_parallel> "${api_test}"
+ --serial
+ ${HDF5_TEST_API_DRIVER_EXTRA_FLAGS}
+ )
+
+ set_tests_properties("h5_api_test_parallel_${api_test}" PROPERTIES DEPENDS "${last_api_test}")
+
+ set(last_api_test "h5_api_test_parallel_${api_test}")
+ endforeach ()
+
+ foreach (hdf5_test ${HDF5_API_PAR_TESTS_EXTRA})
+ add_test (
+ NAME "h5_api_test_parallel_${hdf5_test}"
+ COMMAND $<TARGET_FILE:h5_api_test_driver>
+ --server ${HDF5_TEST_API_SERVER}
+ --client $<TARGET_FILE:h5_api_test_parallel_${hdf5_test}>
+ --serial
+ ${HDF5_TEST_API_DRIVER_EXTRA_FLAGS}
+ )
+ endforeach ()
+
+ # Hook external tests to same test suite
+ foreach (ext_api_test ${HDF5_API_EXT_PARALLEL_TESTS})
+ add_test (
+ NAME "h5_api_ext_test_parallel_${ext_api_test}"
+ COMMAND $<TARGET_FILE:h5_api_test_driver>
+ --server ${HDF5_TEST_API_SERVER}
+ --client $<TARGET_FILE:${ext_api_test}>
+ --serial
+ ${HDF5_TEST_API_DRIVER_EXTRA_FLAGS}
+ )
+ endforeach ()
+ else ()
+ set(last_api_test "")
+ foreach (api_test ${HDF5_API_TESTS})
+ add_test (
+ NAME "h5_api_test_parallel_${api_test}"
+ COMMAND ${MPIEXEC} ${MPIEXEC_NUMPROC_FLAG} ${MPIEXEC_MAX_NUMPROCS}
+ ${MPIEXEC_PREFLAGS} $<TARGET_FILE:h5_api_test_parallel> "${api_test}"
+ ${MPIEXEC_POSTFLAGS}
+ )
+
+ set_tests_properties("h5_api_test_parallel_${api_test}" PROPERTIES DEPENDS "${last_api_test}")
+
+ set(last_api_test "h5_api_test_parallel_${api_test}")
+ endforeach ()
+
+ foreach (hdf5_test ${HDF5_API_PAR_TESTS_EXTRA})
+ add_test (
+ NAME "h5_api_test_parallel_${hdf5_test}"
+ COMMAND ${MPIEXEC} ${MPIEXEC_NUMPROC_FLAG} ${MPIEXEC_MAX_NUMPROCS}
+ ${MPIEXEC_PREFLAGS} $<TARGET_FILE:h5_api_test_parallel_${hdf5_test}>
+ ${MPIEXEC_POSTFLAGS}
+ )
+ endforeach ()
+ endif ()
+endif ()
diff --git a/testpar/API/H5_api_async_test_parallel.c b/testpar/API/H5_api_async_test_parallel.c
new file mode 100644
index 0000000..dcb5e8d
--- /dev/null
+++ b/testpar/API/H5_api_async_test_parallel.c
@@ -0,0 +1,3668 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include "H5_api_async_test_parallel.h"
+
+#ifdef H5ESpublic_H
+
+static int test_one_dataset_io(void);
+static int test_multi_dataset_io(void);
+static int test_multi_file_dataset_io(void);
+static int test_multi_file_grp_dset_io(void);
+static int test_set_extent(void);
+static int test_attribute_exists(void);
+static int test_attribute_io(void);
+static int test_attribute_io_tconv(void);
+static int test_attribute_io_compound(void);
+static int test_group(void);
+static int test_link(void);
+static int test_ocopy_orefresh(void);
+static int test_file_reopen(void);
+
+/*
+ * The array of parallel async tests to be performed.
+ */
+static int (*par_async_tests[])(void) = {
+ test_one_dataset_io,
+ test_multi_dataset_io,
+ test_multi_file_dataset_io,
+ test_multi_file_grp_dset_io,
+ test_set_extent,
+ test_attribute_exists,
+ test_attribute_io,
+ test_attribute_io_tconv,
+ test_attribute_io_compound,
+ test_group,
+ test_link,
+ test_ocopy_orefresh,
+ test_file_reopen,
+};
+
+hbool_t coll_metadata_read = TRUE;
+
+/* Highest "printf" file created (starting at 0) */
+int max_printf_file = -1;
+
+/*
+ * Create file and dataset. Each rank writes to a portion
+ * of the dataset.
+ */
+#define ONE_DATASET_IO_TEST_SPACE_RANK 2
+static int
+test_one_dataset_io(void)
+{
+ hsize_t *dims = NULL;
+ hsize_t start[ONE_DATASET_IO_TEST_SPACE_RANK];
+ hsize_t stride[ONE_DATASET_IO_TEST_SPACE_RANK];
+ hsize_t count[ONE_DATASET_IO_TEST_SPACE_RANK];
+ hsize_t block[ONE_DATASET_IO_TEST_SPACE_RANK];
+ hbool_t op_failed = false;
+ hbool_t is_native_vol = false;
+ size_t i, data_size, num_in_progress;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ hid_t es_id = H5I_INVALID_HID;
+ int *write_buf = NULL;
+ int *read_buf = NULL;
+
+ TESTING_MULTIPART("single dataset I/O")
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH)) {
+ if (MAINPROCESS) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, dataset, or flush aren't supported with this connector\n");
+ }
+
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, coll_metadata_read)) < 0)
+ TEST_ERROR;
+
+ /* Create dataspace */
+ if (generate_random_parallel_dimensions(ONE_DATASET_IO_TEST_SPACE_RANK, &dims) < 0)
+ TEST_ERROR;
+
+ if ((space_id = H5Screate_simple(ONE_DATASET_IO_TEST_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ /* Create event stack */
+ if ((es_id = H5EScreate()) < 0)
+ TEST_ERROR;
+
+ /* Create file asynchronously */
+ if ((file_id = H5Fcreate_async(PAR_ASYNC_API_TEST_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Find out if the native connector is used */
+ if (H5VLobject_is_native(file_id, &is_native_vol) < 0)
+ TEST_ERROR;
+
+ /* Create the dataset asynchronously */
+ if ((dset_id = H5Dcreate_async(file_id, "dset", H5T_NATIVE_INT, space_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Calculate size of data buffers - first dimension is skipped in calculation */
+ for (i = 1, data_size = 1; i < ONE_DATASET_IO_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= sizeof(int);
+
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ TEST_ERROR;
+ }
+
+ if (NULL == (read_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ TEST_ERROR;
+ }
+
+ /* Select this rank's portion of the dataspace */
+ for (i = 0; i < ONE_DATASET_IO_TEST_SPACE_RANK; i++) {
+ if (i == 0) {
+ start[i] = (hsize_t)mpi_rank;
+ block[i] = 1;
+ }
+ else {
+ start[i] = 0;
+ block[i] = dims[i];
+ }
+
+ stride[i] = 1;
+ count[i] = 1;
+ }
+
+ if (H5Sselect_hyperslab(space_id, H5S_SELECT_SET, start, stride, count, block) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't select hyperslab for dataset write\n");
+ goto error;
+ }
+
+ /* Setup memory space for write_buf */
+ {
+ hsize_t mdims[] = {data_size / sizeof(int)};
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ goto error;
+ }
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(single_dset_eswait)
+ {
+ TESTING_2("synchronization using H5ESwait()");
+
+ /* Initialize write_buf */
+ for (i = 0; i < data_size / sizeof(int); i++)
+ ((int *)write_buf)[i] = mpi_rank;
+
+ /* Write the dataset asynchronously */
+ if (H5Dwrite_async(dset_id, H5T_NATIVE_INT, mspace_id, space_id, H5P_DEFAULT, write_buf, es_id) <
+ 0)
+ PART_TEST_ERROR(single_dset_eswait);
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(single_dset_eswait);
+ if (op_failed)
+ PART_TEST_ERROR(single_dset_eswait);
+
+ /* Read the dataset asynchronously */
+ if (H5Dread_async(dset_id, H5T_NATIVE_INT, mspace_id, space_id, H5P_DEFAULT, read_buf, es_id) < 0)
+ PART_TEST_ERROR(single_dset_eswait);
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(single_dset_eswait);
+ if (op_failed)
+ PART_TEST_ERROR(single_dset_eswait);
+
+ /* Verify the read data */
+ for (i = 0; i < data_size / sizeof(int); i++)
+ if (write_buf[i] != read_buf[i]) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ PART_ERROR(single_dset_eswait);
+ } /* end if */
+
+ PASSED();
+ }
+ PART_END(single_dset_eswait);
+
+ PART_BEGIN(single_dset_dclose)
+ {
+ TESTING_2("synchronization using H5Dclose()");
+
+ /* Initialize write_buf */
+ for (i = 0; i < data_size / sizeof(int); i++)
+ ((int *)write_buf)[i] = (int)i;
+
+ /* Write the dataset asynchronously */
+ if (H5Dwrite_async(dset_id, H5T_NATIVE_INT, mspace_id, space_id, H5P_DEFAULT, write_buf, es_id) <
+ 0)
+ PART_TEST_ERROR(single_dset_dclose);
+
+ /* Close the dataset synchronously */
+ if (H5Dclose(dset_id) < 0)
+ PART_TEST_ERROR(single_dset_dclose);
+
+ /* Re-open the dataset asynchronously */
+ if ((dset_id = H5Dopen_async(file_id, "dset", H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(single_dset_dclose);
+
+ /* Read the dataset asynchronously */
+ if (H5Dread_async(dset_id, H5T_NATIVE_INT, mspace_id, space_id, H5P_DEFAULT, read_buf, es_id) < 0)
+ PART_TEST_ERROR(single_dset_dclose);
+
+ /* Close the dataset synchronously */
+ if (H5Dclose(dset_id) < 0)
+ PART_TEST_ERROR(single_dset_dclose);
+
+ /* Verify the read data */
+ for (i = 0; i < data_size / sizeof(int); i++)
+ if (write_buf[i] != read_buf[i]) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ PART_ERROR(single_dset_dclose);
+ } /* end if */
+
+ /* Re-open the dataset asynchronously */
+ if ((dset_id = H5Dopen_async(file_id, "dset", H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(single_dset_dclose);
+
+ PASSED();
+ }
+ PART_END(single_dset_dclose);
+
+ PART_BEGIN(single_dset_dflush)
+ {
+ TESTING_2("synchronization using H5Oflush_async()");
+
+ /* Initialize write_buf */
+ for (i = 0; i < data_size / sizeof(int); i++)
+ ((int *)write_buf)[i] = 10 * (int)i;
+
+ /* Write the dataset asynchronously */
+ if (H5Dwrite_async(dset_id, H5T_NATIVE_INT, mspace_id, space_id, H5P_DEFAULT, write_buf, es_id) <
+ 0)
+ PART_TEST_ERROR(single_dset_dflush);
+
+ /* Flush the dataset asynchronously. This will effectively work as a
+ * barrier, guaranteeing the read takes place after the write. Skip this
+ * function because it isn't supported for the native vol in parallel. */
+ if (!is_native_vol && H5Oflush_async(dset_id, es_id) < 0)
+ PART_TEST_ERROR(single_dset_dflush);
+
+ /* Read the dataset asynchronously */
+ if (H5Dread_async(dset_id, H5T_NATIVE_INT, mspace_id, space_id, H5P_DEFAULT, read_buf, es_id) < 0)
+ PART_TEST_ERROR(single_dset_dflush);
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(single_dset_dflush);
+ if (op_failed)
+ PART_TEST_ERROR(single_dset_dflush);
+
+ /* Verify the read data */
+ for (i = 0; i < data_size / sizeof(int); i++)
+ if (write_buf[i] != read_buf[i]) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ PART_ERROR(single_dset_dflush);
+ } /* end if */
+
+ PASSED();
+ }
+ PART_END(single_dset_dflush);
+
+ PART_BEGIN(single_dset_fclose)
+ {
+ TESTING_2("synchronization using H5Fclose()");
+
+ /* Initialize write_buf */
+ for (i = 0; i < data_size / sizeof(int); i++)
+ ((int *)write_buf)[i] = (int)i + 5;
+
+ /* Write the dataset asynchronously */
+ if (H5Dwrite_async(dset_id, H5T_NATIVE_INT, mspace_id, space_id, H5P_DEFAULT, write_buf, es_id) <
+ 0)
+ PART_TEST_ERROR(single_dset_fclose);
+
+ /* Close the dataset asynchronously */
+ if (H5Dclose_async(dset_id, es_id) < 0)
+ PART_TEST_ERROR(single_dset_fclose);
+
+ /* Close the file synchronously */
+ if (H5Fclose(file_id) < 0)
+ PART_TEST_ERROR(single_dset_fclose);
+
+ /* Reopen the file asynchronously. */
+ if ((file_id = H5Fopen_async(PAR_ASYNC_API_TEST_FILE, H5F_ACC_RDONLY, fapl_id, es_id)) < 0)
+ PART_TEST_ERROR(single_dset_fclose);
+
+ /* Re-open the dataset asynchronously */
+ if ((dset_id = H5Dopen_async(file_id, "dset", H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(single_dset_fclose);
+
+ /* Read the dataset asynchronously */
+ if (H5Dread_async(dset_id, H5T_NATIVE_INT, mspace_id, space_id, H5P_DEFAULT, read_buf, es_id) < 0)
+ PART_TEST_ERROR(single_dset_fclose);
+
+ /* Close the dataset asynchronously */
+ if (H5Dclose_async(dset_id, es_id) < 0)
+ PART_TEST_ERROR(single_dset_fclose);
+
+ /* Close the file synchronously */
+ if (H5Fclose(file_id) < 0)
+ PART_TEST_ERROR(single_dset_fclose);
+
+ /* Verify the read data */
+ for (i = 0; i < data_size / sizeof(int); i++)
+ if (write_buf[i] != read_buf[i]) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ PART_ERROR(single_dset_fclose);
+ } /* end if */
+
+ PASSED();
+ }
+ PART_END(single_dset_fclose);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(mspace_id) < 0)
+ TEST_ERROR;
+ if (H5ESclose(es_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ if (write_buf)
+ HDfree(write_buf);
+ if (dims)
+ HDfree(dims);
+ H5Sclose(space_id);
+ H5Sclose(mspace_id);
+ H5Dclose(dset_id);
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed);
+ H5ESclose(es_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+#undef ONE_DATASET_IO_TEST_SPACE_RANK
+
+/*
+ * Create file and multiple datasets. Each rank writes to a
+ * portion of each dataset and reads back their portion of
+ * each dataset.
+ */
+#define MULTI_DATASET_IO_TEST_SPACE_RANK 2
+#define MULTI_DATASET_IO_TEST_NDSETS 5
+static int
+test_multi_dataset_io(void)
+{
+ hsize_t *dims = NULL;
+ hsize_t start[MULTI_DATASET_IO_TEST_SPACE_RANK];
+ hsize_t stride[MULTI_DATASET_IO_TEST_SPACE_RANK];
+ hsize_t count[MULTI_DATASET_IO_TEST_SPACE_RANK];
+ hsize_t block[MULTI_DATASET_IO_TEST_SPACE_RANK];
+ hbool_t op_failed;
+ size_t i, j, data_size, num_in_progress;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t dset_id[MULTI_DATASET_IO_TEST_NDSETS] = {H5I_INVALID_HID, H5I_INVALID_HID, H5I_INVALID_HID,
+ H5I_INVALID_HID, H5I_INVALID_HID};
+ hid_t space_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ hid_t es_id = H5I_INVALID_HID;
+ char dset_name[32];
+ int *write_buf = NULL;
+ int *read_buf = NULL;
+
+ TESTING_MULTIPART("multi dataset I/O")
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH)) {
+ if (MAINPROCESS) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, dataset, or flush aren't supported with this connector\n");
+ }
+
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, coll_metadata_read)) < 0)
+ TEST_ERROR;
+
+ /* Create dataspace */
+ if (generate_random_parallel_dimensions(MULTI_DATASET_IO_TEST_SPACE_RANK, &dims) < 0)
+ TEST_ERROR;
+
+ /* Create dataspace */
+ if ((space_id = H5Screate_simple(MULTI_DATASET_IO_TEST_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ /* Create event stack */
+ if ((es_id = H5EScreate()) < 0)
+ TEST_ERROR;
+
+ /* Create file asynchronously */
+ if ((file_id = H5Fcreate_async(PAR_ASYNC_API_TEST_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Calculate size of data buffers - first dimension is skipped in calculation */
+ for (i = 1, data_size = 1; i < MULTI_DATASET_IO_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= sizeof(int);
+ data_size *= MULTI_DATASET_IO_TEST_NDSETS;
+
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ TEST_ERROR;
+ }
+
+ if (NULL == (read_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ TEST_ERROR;
+ }
+
+ /* Select this rank's portion of the dataspace */
+ for (i = 0; i < MULTI_DATASET_IO_TEST_SPACE_RANK; i++) {
+ if (i == 0) {
+ start[i] = (hsize_t)mpi_rank;
+ block[i] = 1;
+ }
+ else {
+ start[i] = 0;
+ block[i] = dims[i];
+ }
+
+ stride[i] = 1;
+ count[i] = 1;
+ }
+
+ if (H5Sselect_hyperslab(space_id, H5S_SELECT_SET, start, stride, count, block) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't select hyperslab for dataset write\n");
+ goto error;
+ }
+
+ /* Setup memory space for write_buf */
+ {
+ hsize_t mdims[] = {data_size / MULTI_DATASET_IO_TEST_NDSETS / sizeof(int)};
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ goto error;
+ }
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(multi_dset_open)
+ {
+ size_t buf_start_idx;
+
+ TESTING_2("keeping datasets open");
+
+ /* Loop over datasets */
+ for (i = 0; i < MULTI_DATASET_IO_TEST_NDSETS; i++) {
+ size_t buf_end_idx;
+
+ /* Set dataset name */
+ sprintf(dset_name, "dset%d", (int)i);
+
+ /* Create the dataset asynchronously */
+ if ((dset_id[i] = H5Dcreate_async(file_id, dset_name, H5T_NATIVE_INT, space_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_dset_open);
+
+ /* Initialize write_buf. Must use a new slice of write_buf for
+ * each dset since we can't overwrite the buffers until I/O is done. */
+ buf_start_idx = i * (data_size / MULTI_DATASET_IO_TEST_NDSETS / sizeof(int));
+ buf_end_idx = buf_start_idx + (data_size / MULTI_DATASET_IO_TEST_NDSETS / sizeof(int));
+ for (j = buf_start_idx; j < buf_end_idx; j++)
+ ((int *)write_buf)[j] = mpi_rank;
+
+ /* Write the dataset asynchronously */
+ if (H5Dwrite_async(dset_id[i], H5T_NATIVE_INT, mspace_id, space_id, H5P_DEFAULT,
+ &write_buf[buf_start_idx], es_id) < 0)
+ PART_TEST_ERROR(multi_dset_open);
+ } /* end for */
+
+ /* Flush the file asynchronously. This will effectively work as a
+ * barrier, guaranteeing the read takes place after the write. */
+ if (H5Fflush_async(file_id, H5F_SCOPE_LOCAL, es_id) < 0)
+ PART_TEST_ERROR(multi_dset_open);
+
+ /* Loop over datasets */
+ for (i = 0; i < MULTI_DATASET_IO_TEST_NDSETS; i++) {
+ buf_start_idx = i * (data_size / MULTI_DATASET_IO_TEST_NDSETS / sizeof(int));
+
+ /* Read the dataset asynchronously */
+ if (H5Dread_async(dset_id[i], H5T_NATIVE_INT, mspace_id, space_id, H5P_DEFAULT,
+ &read_buf[buf_start_idx], es_id) < 0)
+ PART_TEST_ERROR(multi_dset_open);
+ } /* end for */
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(multi_dset_open);
+ if (op_failed)
+ PART_TEST_ERROR(multi_dset_open);
+
+ /* Verify the read data */
+ for (i = 0; i < data_size / sizeof(int); i++)
+ if (write_buf[i] != read_buf[i]) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ PART_ERROR(multi_dset_open);
+ } /* end if */
+
+ /* Close the datasets */
+ for (i = 0; i < MULTI_DATASET_IO_TEST_NDSETS; i++)
+ if (H5Dclose(dset_id[i]) < 0)
+ PART_TEST_ERROR(multi_dset_open);
+
+ PASSED();
+ }
+ PART_END(multi_dset_open);
+
+ PART_BEGIN(multi_dset_close)
+ {
+ size_t buf_start_idx;
+
+ TESTING_2("closing datasets between I/O");
+
+ /* Loop over datasets */
+ for (i = 0; i < MULTI_DATASET_IO_TEST_NDSETS; i++) {
+ size_t buf_end_idx;
+
+ /* Set dataset name */
+ sprintf(dset_name, "dset%d", (int)i);
+
+ /* Open the dataset asynchronously */
+ if ((dset_id[0] = H5Dopen_async(file_id, dset_name, H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_dset_close);
+
+ /* Initialize write_buf. */
+ buf_start_idx = i * (data_size / MULTI_DATASET_IO_TEST_NDSETS / sizeof(int));
+ buf_end_idx = buf_start_idx + (data_size / MULTI_DATASET_IO_TEST_NDSETS / sizeof(int));
+ for (j = buf_start_idx; j < buf_end_idx; j++)
+ ((int *)write_buf)[j] = mpi_rank * 10;
+
+ /* Write the dataset asynchronously */
+ if (H5Dwrite_async(dset_id[0], H5T_NATIVE_INT, mspace_id, space_id, H5P_DEFAULT,
+ &write_buf[buf_start_idx], es_id) < 0)
+ PART_TEST_ERROR(multi_dset_close);
+
+ /* Close the dataset asynchronously */
+ if (H5Dclose_async(dset_id[0], es_id) < 0)
+ PART_TEST_ERROR(multi_dset_close);
+ } /* end for */
+
+ /* Flush the file asynchronously. This will effectively work as a
+ * barrier, guaranteeing the read takes place after the write. */
+ if (H5Fflush_async(file_id, H5F_SCOPE_LOCAL, es_id) < 0)
+ PART_TEST_ERROR(multi_dset_close);
+
+ /* Loop over datasets */
+ for (i = 0; i < MULTI_DATASET_IO_TEST_NDSETS; i++) {
+ /* Set dataset name */
+ sprintf(dset_name, "dset%d", (int)i);
+
+ /* Open the dataset asynchronously */
+ if ((dset_id[0] = H5Dopen_async(file_id, dset_name, H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_dset_close);
+
+ /* Read the dataset asynchronously */
+ buf_start_idx = i * (data_size / MULTI_DATASET_IO_TEST_NDSETS / sizeof(int));
+ if (H5Dread_async(dset_id[0], H5T_NATIVE_INT, mspace_id, space_id, H5P_DEFAULT,
+ &read_buf[buf_start_idx], es_id) < 0)
+ PART_TEST_ERROR(multi_dset_close);
+
+ /* Close the dataset asynchronously */
+ if (H5Dclose_async(dset_id[0], es_id) < 0)
+ PART_TEST_ERROR(multi_dset_close);
+ } /* end for */
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(multi_dset_close);
+ if (op_failed)
+ PART_TEST_ERROR(multi_dset_close);
+
+ /* Verify the read data */
+ for (i = 0; i < data_size / sizeof(int); i++)
+ if (write_buf[i] != read_buf[i]) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ PART_ERROR(multi_dset_close);
+ } /* end if */
+
+ PASSED();
+ }
+ PART_END(multi_dset_close);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(mspace_id) < 0)
+ TEST_ERROR;
+ if (H5ESclose(es_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ if (write_buf)
+ HDfree(write_buf);
+ if (dims)
+ HDfree(dims);
+ H5Sclose(space_id);
+ H5Sclose(mspace_id);
+ for (i = 0; i < MULTI_DATASET_IO_TEST_NDSETS; i++)
+ H5Dclose(dset_id[i]);
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed);
+ H5ESclose(es_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+#undef MULTI_DATASET_IO_TEST_SPACE_RANK
+#undef MULTI_DATASET_IO_TEST_NDSETS
+
+/*
+ * Create multiple files, each with a single dataset. Each rank writes
+ * to a portion of each dataset and reads from a portion of each dataset.
+ */
+#define MULTI_FILE_DATASET_IO_TEST_SPACE_RANK 2
+#define MULTI_FILE_DATASET_IO_TEST_NFILES 5
+static int
+test_multi_file_dataset_io(void)
+{
+ hsize_t *dims = NULL;
+ hsize_t start[MULTI_FILE_DATASET_IO_TEST_SPACE_RANK];
+ hsize_t stride[MULTI_FILE_DATASET_IO_TEST_SPACE_RANK];
+ hsize_t count[MULTI_FILE_DATASET_IO_TEST_SPACE_RANK];
+ hsize_t block[MULTI_FILE_DATASET_IO_TEST_SPACE_RANK];
+ hbool_t op_failed = false;
+ hbool_t is_native_vol = false;
+ size_t i, j, data_size, num_in_progress;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t file_id[MULTI_FILE_DATASET_IO_TEST_NFILES] = {H5I_INVALID_HID, H5I_INVALID_HID, H5I_INVALID_HID,
+ H5I_INVALID_HID, H5I_INVALID_HID};
+ hid_t dset_id[MULTI_FILE_DATASET_IO_TEST_NFILES] = {H5I_INVALID_HID, H5I_INVALID_HID, H5I_INVALID_HID,
+ H5I_INVALID_HID, H5I_INVALID_HID};
+ hid_t space_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ hid_t es_id = H5I_INVALID_HID;
+ char file_name[32];
+ int *write_buf = NULL;
+ int *read_buf = NULL;
+
+ TESTING_MULTIPART("multi file dataset I/O")
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH)) {
+ if (MAINPROCESS) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, dataset, or flush aren't supported with this connector\n");
+ }
+
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, coll_metadata_read)) < 0)
+ TEST_ERROR;
+
+ /* Create dataspace */
+ if (generate_random_parallel_dimensions(MULTI_FILE_DATASET_IO_TEST_SPACE_RANK, &dims) < 0)
+ TEST_ERROR;
+
+ /* Create dataspace */
+ if ((space_id = H5Screate_simple(MULTI_FILE_DATASET_IO_TEST_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ /* Create event stack */
+ if ((es_id = H5EScreate()) < 0)
+ TEST_ERROR;
+
+ /* Calculate size of data buffers - first dimension is skipped in calculation */
+ for (i = 1, data_size = 1; i < MULTI_FILE_DATASET_IO_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= sizeof(int);
+ data_size *= MULTI_FILE_DATASET_IO_TEST_NFILES;
+
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ TEST_ERROR;
+ }
+
+ if (NULL == (read_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ TEST_ERROR;
+ }
+
+ /* Select this rank's portion of the dataspace */
+ for (i = 0; i < MULTI_FILE_DATASET_IO_TEST_SPACE_RANK; i++) {
+ if (i == 0) {
+ start[i] = (hsize_t)mpi_rank;
+ block[i] = 1;
+ }
+ else {
+ start[i] = 0;
+ block[i] = dims[i];
+ }
+
+ stride[i] = 1;
+ count[i] = 1;
+ }
+
+ if (H5Sselect_hyperslab(space_id, H5S_SELECT_SET, start, stride, count, block) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't select hyperslab for dataset write\n");
+ goto error;
+ }
+
+ /* Setup memory space for write_buf */
+ {
+ hsize_t mdims[] = {data_size / MULTI_FILE_DATASET_IO_TEST_NFILES / sizeof(int)};
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ goto error;
+ }
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(multi_file_dset_open)
+ {
+ size_t buf_start_idx;
+
+ TESTING_2("keeping files and datasets open");
+
+ /* Loop over files */
+ for (i = 0; i < MULTI_FILE_DATASET_IO_TEST_NFILES; i++) {
+ size_t buf_end_idx;
+
+ /* Set file name */
+ sprintf(file_name, PAR_ASYNC_API_TEST_FILE_PRINTF, (int)i);
+
+ /* Create file asynchronously */
+ if ((file_id[i] = H5Fcreate_async(file_name, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_dset_open);
+ if ((int)i > max_printf_file)
+ max_printf_file = (int)i;
+
+ /* Create the dataset asynchronously */
+ if ((dset_id[i] = H5Dcreate_async(file_id[i], "dset", H5T_NATIVE_INT, space_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_dset_open);
+
+ /* Initialize write_buf. Must use a new slice of write_buf for
+ * each dset since we can't overwrite the buffers until I/O is done. */
+ buf_start_idx = i * (data_size / MULTI_FILE_DATASET_IO_TEST_NFILES / sizeof(int));
+ buf_end_idx = buf_start_idx + (data_size / MULTI_FILE_DATASET_IO_TEST_NFILES / sizeof(int));
+ for (j = buf_start_idx; j < buf_end_idx; j++)
+ ((int *)write_buf)[j] = mpi_rank;
+
+ /* Write the dataset asynchronously */
+ if (H5Dwrite_async(dset_id[i], H5T_NATIVE_INT, mspace_id, space_id, H5P_DEFAULT,
+ &write_buf[buf_start_idx], es_id) < 0)
+ PART_TEST_ERROR(multi_file_dset_open);
+ } /* end for */
+
+ /* Find out if the native connector is used */
+ if (H5VLobject_is_native(file_id[0], &is_native_vol) < 0)
+ PART_TEST_ERROR(multi_file_dset_open);
+
+ /* Loop over files */
+ for (i = 0; i < MULTI_FILE_DATASET_IO_TEST_NFILES; i++) {
+ /* Flush the dataset asynchronously. This will effectively work as a
+ * barrier, guaranteeing the read takes place after the write. Skip this
+ * function because it isn't supported for the native vol in parallel. */
+ if (!is_native_vol && H5Oflush_async(dset_id[i], es_id) < 0)
+ PART_TEST_ERROR(multi_file_dset_open);
+
+ /* Read the dataset asynchronously */
+ buf_start_idx = i * (data_size / MULTI_FILE_DATASET_IO_TEST_NFILES / sizeof(int));
+ if (H5Dread_async(dset_id[i], H5T_NATIVE_INT, mspace_id, space_id, H5P_DEFAULT,
+ &read_buf[buf_start_idx], es_id) < 0)
+ PART_TEST_ERROR(multi_file_dset_open);
+ } /* end for */
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(multi_file_dset_open);
+ if (op_failed)
+ PART_TEST_ERROR(multi_file_dset_open);
+
+ /* Verify the read data */
+ for (i = 0; i < data_size / sizeof(int); i++)
+ if (write_buf[i] != read_buf[i]) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ PART_ERROR(multi_file_dset_open);
+ } /* end if */
+
+ /* Close the datasets */
+ for (i = 0; i < MULTI_FILE_DATASET_IO_TEST_NFILES; i++)
+ if (H5Dclose(dset_id[i]) < 0)
+ PART_TEST_ERROR(multi_file_dset_open);
+
+ PASSED();
+ }
+ PART_END(multi_file_dset_open);
+
+ PART_BEGIN(multi_file_dset_dclose)
+ {
+ size_t buf_start_idx;
+
+ TESTING_2("closing datasets between I/O");
+
+ /* Loop over files */
+ for (i = 0; i < MULTI_FILE_DATASET_IO_TEST_NFILES; i++) {
+ size_t buf_end_idx;
+
+ /* Open the dataset asynchronously */
+ if ((dset_id[0] = H5Dopen_async(file_id[i], "dset", H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_dset_dclose);
+
+ /* Initialize write_buf. */
+ buf_start_idx = i * (data_size / MULTI_FILE_DATASET_IO_TEST_NFILES / sizeof(int));
+ buf_end_idx = buf_start_idx + (data_size / MULTI_FILE_DATASET_IO_TEST_NFILES / sizeof(int));
+ for (j = buf_start_idx; j < buf_end_idx; j++)
+ ((int *)write_buf)[j] = mpi_rank * 10;
+
+ /* Write the dataset asynchronously */
+ if (H5Dwrite_async(dset_id[0], H5T_NATIVE_INT, mspace_id, space_id, H5P_DEFAULT,
+ &write_buf[buf_start_idx], es_id) < 0)
+ PART_TEST_ERROR(multi_file_dset_dclose);
+
+ /* Close the dataset asynchronously */
+ if (H5Dclose_async(dset_id[0], es_id) < 0)
+ PART_TEST_ERROR(multi_file_dset_dclose);
+ } /* end for */
+
+ /* Loop over files */
+ for (i = 0; i < MULTI_FILE_DATASET_IO_TEST_NFILES; i++) {
+ /* Flush the file asynchronously. This will effectively work as a
+ * barrier, guaranteeing the read takes place after the write. */
+ if (H5Fflush_async(file_id[i], H5F_SCOPE_LOCAL, es_id) < 0)
+ PART_TEST_ERROR(multi_file_dset_open);
+
+ /* Open the dataset asynchronously */
+ if ((dset_id[0] = H5Dopen_async(file_id[i], "dset", H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_dset_dclose);
+
+ /* Read the dataset asynchronously */
+ buf_start_idx = i * (data_size / MULTI_FILE_DATASET_IO_TEST_NFILES / sizeof(int));
+ if (H5Dread_async(dset_id[0], H5T_NATIVE_INT, mspace_id, space_id, H5P_DEFAULT,
+ &read_buf[buf_start_idx], es_id) < 0)
+ PART_TEST_ERROR(multi_file_dset_dclose);
+
+ /* Close the dataset asynchronously */
+ if (H5Dclose_async(dset_id[0], es_id) < 0)
+ PART_TEST_ERROR(multi_file_dset_dclose);
+ } /* end for */
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(multi_file_dset_dclose);
+ if (op_failed)
+ PART_TEST_ERROR(multi_file_dset_dclose);
+
+ /* Verify the read data */
+ for (i = 0; i < data_size / sizeof(int); i++)
+ if (write_buf[i] != read_buf[i]) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ PART_ERROR(multi_file_dset_dclose);
+ } /* end if */
+
+ /* Close the files */
+ for (i = 0; i < MULTI_FILE_DATASET_IO_TEST_NFILES; i++)
+ if (H5Fclose(file_id[i]) < 0)
+ PART_TEST_ERROR(multi_file_dset_dclose);
+
+ PASSED();
+ }
+ PART_END(multi_file_dset_dclose);
+
+ PART_BEGIN(multi_file_dset_fclose)
+ {
+ size_t buf_start_idx;
+
+ TESTING_2("closing files between I/O");
+
+ /* Loop over files */
+ for (i = 0; i < MULTI_FILE_DATASET_IO_TEST_NFILES; i++) {
+ size_t buf_end_idx;
+
+ /* Set file name */
+ sprintf(file_name, PAR_ASYNC_API_TEST_FILE_PRINTF, (int)i);
+
+ /* Open the file asynchronously */
+ if ((file_id[0] = H5Fopen_async(file_name, H5F_ACC_RDWR, fapl_id, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_dset_fclose);
+
+ /* Open the dataset asynchronously */
+ if ((dset_id[0] = H5Dopen_async(file_id[0], "dset", H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_dset_fclose);
+
+ /* Initialize write_buf. */
+ buf_start_idx = i * (data_size / MULTI_FILE_DATASET_IO_TEST_NFILES / sizeof(int));
+ buf_end_idx = buf_start_idx + (data_size / MULTI_FILE_DATASET_IO_TEST_NFILES / sizeof(int));
+ for (j = buf_start_idx; j < buf_end_idx; j++)
+ ((int *)write_buf)[j] = mpi_rank + 5;
+
+ /* Write the dataset asynchronously */
+ if (H5Dwrite_async(dset_id[0], H5T_NATIVE_INT, mspace_id, space_id, H5P_DEFAULT,
+ &write_buf[buf_start_idx], es_id) < 0)
+ PART_TEST_ERROR(multi_file_dset_fclose);
+
+ /* Close the dataset asynchronously */
+ if (H5Dclose_async(dset_id[0], es_id) < 0)
+ PART_TEST_ERROR(multi_file_dset_fclose);
+
+ /* Close the file asynchronously */
+ if (H5Fclose_async(file_id[0], es_id) < 0)
+ PART_TEST_ERROR(multi_file_dset_fclose);
+ } /* end for */
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(multi_file_dset_fclose);
+ if (op_failed)
+ PART_TEST_ERROR(multi_file_dset_fclose);
+
+ /* Loop over files */
+ for (i = 0; i < MULTI_FILE_DATASET_IO_TEST_NFILES; i++) {
+ /* Set file name */
+ sprintf(file_name, PAR_ASYNC_API_TEST_FILE_PRINTF, (int)i);
+
+ /* Open the file asynchronously */
+ if ((file_id[0] = H5Fopen_async(file_name, H5F_ACC_RDONLY, fapl_id, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_dset_fclose);
+
+ /* Open the dataset asynchronously */
+ if ((dset_id[0] = H5Dopen_async(file_id[0], "dset", H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_dset_fclose);
+
+ /* Read the dataset asynchronously */
+ buf_start_idx = i * (data_size / MULTI_FILE_DATASET_IO_TEST_NFILES / sizeof(int));
+ if (H5Dread_async(dset_id[0], H5T_NATIVE_INT, mspace_id, space_id, H5P_DEFAULT,
+ &read_buf[buf_start_idx], es_id) < 0)
+ PART_TEST_ERROR(multi_file_dset_fclose);
+
+ /* Close the dataset asynchronously */
+ if (H5Dclose_async(dset_id[0], es_id) < 0)
+ PART_TEST_ERROR(multi_file_dset_fclose);
+
+ /* Close the file asynchronously */
+ if (H5Fclose_async(file_id[0], es_id) < 0)
+ PART_TEST_ERROR(multi_file_dset_fclose);
+ } /* end for */
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(multi_file_dset_fclose);
+ if (op_failed)
+ PART_TEST_ERROR(multi_file_dset_fclose);
+
+ /* Verify the read data */
+ for (i = 0; i < data_size / sizeof(int); i++)
+ if (write_buf[i] != read_buf[i]) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ PART_ERROR(multi_file_dset_fclose);
+ } /* end if */
+
+ PASSED();
+ }
+ PART_END(multi_file_dset_fclose);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(mspace_id) < 0)
+ TEST_ERROR;
+ if (H5ESclose(es_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ if (write_buf)
+ HDfree(write_buf);
+ if (dims)
+ HDfree(dims);
+ H5Sclose(space_id);
+ H5Sclose(mspace_id);
+ for (i = 0; i < MULTI_FILE_DATASET_IO_TEST_NFILES; i++) {
+ H5Dclose(dset_id[i]);
+ H5Fclose(file_id[i]);
+ }
+ H5Pclose(fapl_id);
+ H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed);
+ H5ESclose(es_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+#undef MULTI_FILE_DATASET_IO_TEST_SPACE_RANK
+#undef MULTI_FILE_DATASET_IO_TEST_NFILES
+
+/*
+ * Create multiple files, each with a single group and dataset. Each rank
+ * writes to a portion of each dataset and reads from a portion of each dataset.
+ */
+#define MULTI_FILE_GRP_DSET_IO_TEST_SPACE_RANK 2
+#define MULTI_FILE_GRP_DSET_IO_TEST_NFILES 5
+static int
+test_multi_file_grp_dset_io(void)
+{
+ hsize_t *dims = NULL;
+ hsize_t start[MULTI_FILE_GRP_DSET_IO_TEST_SPACE_RANK];
+ hsize_t stride[MULTI_FILE_GRP_DSET_IO_TEST_SPACE_RANK];
+ hsize_t count[MULTI_FILE_GRP_DSET_IO_TEST_SPACE_RANK];
+ hsize_t block[MULTI_FILE_GRP_DSET_IO_TEST_SPACE_RANK];
+ hbool_t op_failed;
+ size_t i, j, data_size, num_in_progress;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t grp_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ hid_t es_id = H5I_INVALID_HID;
+ char file_name[32];
+ int *write_buf = NULL;
+ int *read_buf = NULL;
+
+ TESTING_MULTIPART("multi file dataset I/O with groups")
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ if (MAINPROCESS) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ }
+
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, coll_metadata_read)) < 0)
+ TEST_ERROR;
+
+ /* Create dataspace */
+ if (generate_random_parallel_dimensions(MULTI_FILE_GRP_DSET_IO_TEST_SPACE_RANK, &dims) < 0)
+ TEST_ERROR;
+
+ /* Create dataspace */
+ if ((space_id = H5Screate_simple(MULTI_FILE_GRP_DSET_IO_TEST_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ /* Create event stack */
+ if ((es_id = H5EScreate()) < 0)
+ TEST_ERROR;
+
+ /* Calculate size of data buffers - first dimension is skipped in calculation */
+ for (i = 1, data_size = 1; i < MULTI_FILE_GRP_DSET_IO_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= sizeof(int);
+ data_size *= MULTI_FILE_GRP_DSET_IO_TEST_NFILES;
+
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ TEST_ERROR;
+ }
+
+ if (NULL == (read_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ TEST_ERROR;
+ }
+
+ /* Select this rank's portion of the dataspace */
+ for (i = 0; i < MULTI_FILE_GRP_DSET_IO_TEST_SPACE_RANK; i++) {
+ if (i == 0) {
+ start[i] = (hsize_t)mpi_rank;
+ block[i] = 1;
+ }
+ else {
+ start[i] = 0;
+ block[i] = dims[i];
+ }
+
+ stride[i] = 1;
+ count[i] = 1;
+ }
+
+ if (H5Sselect_hyperslab(space_id, H5S_SELECT_SET, start, stride, count, block) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't select hyperslab for dataset write\n");
+ goto error;
+ }
+
+ /* Setup memory space for write_buf */
+ {
+ hsize_t mdims[] = {data_size / MULTI_FILE_GRP_DSET_IO_TEST_NFILES / sizeof(int)};
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ goto error;
+ }
+ }
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(multi_file_grp_dset_no_kick)
+ {
+ size_t buf_start_idx;
+
+ TESTING_2("without intermediate calls to H5ESwait()");
+
+ /* Loop over files */
+ for (i = 0; i < MULTI_FILE_GRP_DSET_IO_TEST_NFILES; i++) {
+ size_t buf_end_idx;
+
+ /* Set file name */
+ sprintf(file_name, PAR_ASYNC_API_TEST_FILE_PRINTF, (int)i);
+
+ /* Create file asynchronously */
+ if ((file_id = H5Fcreate_async(file_name, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+ if ((int)i > max_printf_file)
+ max_printf_file = (int)i;
+
+ /* Create the group asynchronously */
+ if ((grp_id = H5Gcreate_async(file_id, "grp", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT, es_id)) <
+ 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+
+ /* Create the dataset asynchronously */
+ if ((dset_id = H5Dcreate_async(grp_id, "dset", H5T_NATIVE_INT, space_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+
+ /* Initialize write_buf. Must use a new slice of write_buf for
+ * each dset since we can't overwrite the buffers until I/O is done. */
+ buf_start_idx = i * (data_size / MULTI_FILE_GRP_DSET_IO_TEST_NFILES / sizeof(int));
+ buf_end_idx = buf_start_idx + (data_size / MULTI_FILE_GRP_DSET_IO_TEST_NFILES / sizeof(int));
+ for (j = buf_start_idx; j < buf_end_idx; j++)
+ ((int *)write_buf)[j] = mpi_rank;
+
+ /* Write the dataset asynchronously */
+ if (H5Dwrite_async(dset_id, H5T_NATIVE_INT, mspace_id, space_id, H5P_DEFAULT,
+ &write_buf[buf_start_idx], es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+
+ /* Close the dataset asynchronously */
+ if (H5Dclose_async(dset_id, es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+
+ /* Close the group asynchronously */
+ if (H5Gclose_async(grp_id, es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+
+ /* Close the file asynchronously */
+ if (H5Fclose_async(file_id, es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+ } /* end for */
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+ if (op_failed)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+
+ /* Loop over files */
+ for (i = 0; i < MULTI_FILE_GRP_DSET_IO_TEST_NFILES; i++) {
+ /* Set file name */
+ sprintf(file_name, PAR_ASYNC_API_TEST_FILE_PRINTF, (int)i);
+
+ /* Open the file asynchronously */
+ if ((file_id = H5Fopen_async(file_name, H5F_ACC_RDONLY, fapl_id, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+
+ /* Open the group asynchronously */
+ if ((grp_id = H5Gopen_async(file_id, "grp", H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+
+ /* Open the dataset asynchronously */
+ if ((dset_id = H5Dopen_async(grp_id, "dset", H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+
+ /* Read the dataset asynchronously */
+ buf_start_idx = i * (data_size / MULTI_FILE_GRP_DSET_IO_TEST_NFILES / sizeof(int));
+ if (H5Dread_async(dset_id, H5T_NATIVE_INT, mspace_id, space_id, H5P_DEFAULT,
+ &read_buf[buf_start_idx], es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+
+ /* Close the dataset asynchronously */
+ if (H5Dclose_async(dset_id, es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+
+ /* Close the group asynchronously */
+ if (H5Gclose_async(grp_id, es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+
+ /* Close the file asynchronously */
+ if (H5Fclose_async(file_id, es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+ } /* end for */
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+ if (op_failed)
+ PART_TEST_ERROR(multi_file_grp_dset_no_kick);
+
+ /* Verify the read data */
+ for (i = 0; i < data_size / sizeof(int); i++)
+ if (write_buf[i] != read_buf[i]) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ PART_ERROR(multi_file_grp_dset_no_kick);
+ } /* end if */
+
+ PASSED();
+ }
+ PART_END(multi_file_grp_dset_no_kick);
+
+ PART_BEGIN(multi_file_grp_dset_kick)
+ {
+ size_t buf_start_idx;
+
+ TESTING_2("with intermediate calls to H5ESwait() (0 timeout)");
+
+ /* Loop over files */
+ for (i = 0; i < MULTI_FILE_GRP_DSET_IO_TEST_NFILES; i++) {
+ size_t buf_end_idx;
+
+ /* Set file name */
+ sprintf(file_name, PAR_ASYNC_API_TEST_FILE_PRINTF, (int)i);
+
+ /* Create file asynchronously */
+ if ((file_id = H5Fcreate_async(file_name, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+ if ((int)i > max_printf_file)
+ max_printf_file = (int)i;
+
+ /* Create the group asynchronously */
+ if ((grp_id = H5Gcreate_async(file_id, "grp", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT, es_id)) <
+ 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Create the dataset asynchronously */
+ if ((dset_id = H5Dcreate_async(grp_id, "dset", H5T_NATIVE_INT, space_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Initialize write_buf. Must use a new slice of write_buf for
+ * each dset since we can't overwrite the buffers until I/O is done. */
+ buf_start_idx = i * (data_size / MULTI_FILE_GRP_DSET_IO_TEST_NFILES / sizeof(int));
+ buf_end_idx = buf_start_idx + (data_size / MULTI_FILE_GRP_DSET_IO_TEST_NFILES / sizeof(int));
+ for (j = buf_start_idx; j < buf_end_idx; j++)
+ ((int *)write_buf)[j] = mpi_rank;
+
+ /* Write the dataset asynchronously */
+ if (H5Dwrite_async(dset_id, H5T_NATIVE_INT, mspace_id, space_id, H5P_DEFAULT,
+ &write_buf[buf_start_idx], es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Close the dataset asynchronously */
+ if (H5Dclose_async(dset_id, es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Close the group asynchronously */
+ if (H5Gclose_async(grp_id, es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Close the file asynchronously */
+ if (H5Fclose_async(file_id, es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Kick the event stack to make progress */
+ if (H5ESwait(es_id, 0, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+ if (op_failed)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+ } /* end for */
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+ if (op_failed)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Loop over files */
+ for (i = 0; i < MULTI_FILE_GRP_DSET_IO_TEST_NFILES; i++) {
+ /* Set file name */
+ sprintf(file_name, PAR_ASYNC_API_TEST_FILE_PRINTF, (int)i);
+
+ /* Open the file asynchronously */
+ if ((file_id = H5Fopen_async(file_name, H5F_ACC_RDONLY, fapl_id, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Open the group asynchronously */
+ if ((grp_id = H5Gopen_async(file_id, "grp", H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Open the dataset asynchronously */
+ if ((dset_id = H5Dopen_async(grp_id, "dset", H5P_DEFAULT, es_id)) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Read the dataset asynchronously */
+ buf_start_idx = i * (data_size / MULTI_FILE_GRP_DSET_IO_TEST_NFILES / sizeof(int));
+ if (H5Dread_async(dset_id, H5T_NATIVE_INT, mspace_id, space_id, H5P_DEFAULT,
+ &read_buf[buf_start_idx], es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Close the dataset asynchronously */
+ if (H5Dclose_async(dset_id, es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Close the group asynchronously */
+ if (H5Gclose_async(grp_id, es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Close the file asynchronously */
+ if (H5Fclose_async(file_id, es_id) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Kick the event stack to make progress */
+ if (H5ESwait(es_id, 0, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+ if (op_failed)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+ } /* end for */
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+ if (op_failed)
+ PART_TEST_ERROR(multi_file_grp_dset_kick);
+
+ /* Verify the read data */
+ for (i = 0; i < data_size / sizeof(int); i++)
+ if (write_buf[i] != read_buf[i]) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ PART_ERROR(multi_file_grp_dset_kick);
+ } /* end if */
+
+ PASSED();
+ }
+ PART_END(multi_file_grp_dset_kick);
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(mspace_id) < 0)
+ TEST_ERROR;
+ if (H5ESclose(es_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ if (write_buf)
+ HDfree(write_buf);
+ if (dims)
+ HDfree(dims);
+ H5Sclose(space_id);
+ H5Sclose(mspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(grp_id);
+ H5Fclose(file_id);
+ H5Pclose(fapl_id);
+ H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed);
+ H5ESclose(es_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+#undef MULTI_FILE_GRP_DSET_IO_TEST_SPACE_RANK
+#undef MULTI_FILE_GRP_DSET_IO_TEST_NFILES
+
+/*
+ * Creates a single file and dataset, then each rank writes to a portion
+ * of the dataset. Next, the dataset is continually extended in the first
+ * dimension by 1 "row" per mpi rank and partially written to by each rank.
+ * Finally, each rank reads from a portion of the dataset.
+ */
+#define SET_EXTENT_TEST_SPACE_RANK 2
+#define SET_EXTENT_TEST_NUM_EXTENDS 6
+static int
+test_set_extent(void)
+{
+ hsize_t *dims = NULL;
+ hsize_t *maxdims = NULL;
+ hsize_t *cdims = NULL;
+ hsize_t start[SET_EXTENT_TEST_SPACE_RANK];
+ hsize_t stride[SET_EXTENT_TEST_SPACE_RANK];
+ hsize_t count[SET_EXTENT_TEST_SPACE_RANK];
+ hsize_t block[SET_EXTENT_TEST_SPACE_RANK];
+ hbool_t op_failed = false;
+ hbool_t is_native_vol = false;
+ size_t i, j, data_size, num_in_progress;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dcpl_id = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ hid_t space_id_out = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ hid_t es_id = H5I_INVALID_HID;
+ htri_t tri_ret;
+ int *write_buf = NULL;
+ int *read_buf = NULL;
+
+ TESTING("extending dataset");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, dataset, dataset more, or flush aren't supported "
+ "with this connector\n");
+ }
+
+ return 0;
+ }
+
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, coll_metadata_read)) < 0)
+ TEST_ERROR;
+
+ /* Create dataspace */
+ if (generate_random_parallel_dimensions(SET_EXTENT_TEST_SPACE_RANK, &dims) < 0)
+ TEST_ERROR;
+
+ if (NULL == (maxdims = HDmalloc(SET_EXTENT_TEST_SPACE_RANK * sizeof(hsize_t)))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate max dataspace dimension buffer\n");
+ TEST_ERROR;
+ }
+
+ if (NULL == (cdims = HDmalloc(SET_EXTENT_TEST_SPACE_RANK * sizeof(hsize_t)))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate chunk dimension buffer\n");
+ TEST_ERROR;
+ }
+
+ for (i = 0; i < SET_EXTENT_TEST_SPACE_RANK; i++) {
+ maxdims[i] = (i == 0) ? dims[i] + (hsize_t)(SET_EXTENT_TEST_NUM_EXTENDS * mpi_size) : dims[i];
+ cdims[i] = (dims[i] == 1) ? 1 : dims[i] / 2;
+ }
+
+ /* Create file dataspace */
+ if ((space_id = H5Screate_simple(SET_EXTENT_TEST_SPACE_RANK, dims, maxdims)) < 0)
+ TEST_ERROR;
+
+ /* Create DCPL */
+ if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR;
+
+ /* Set chunking */
+ if (H5Pset_chunk(dcpl_id, SET_EXTENT_TEST_SPACE_RANK, cdims) < 0)
+ TEST_ERROR;
+
+ /* Create event stack */
+ if ((es_id = H5EScreate()) < 0)
+ TEST_ERROR;
+
+ /* Create file asynchronously */
+ if ((file_id = H5Fcreate_async(PAR_ASYNC_API_TEST_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Find out if the native connector is used */
+ if (H5VLobject_is_native(file_id, &is_native_vol) < 0)
+ TEST_ERROR;
+
+ /* Create the dataset asynchronously */
+ if ((dset_id = H5Dcreate_async(file_id, "dset", H5T_NATIVE_INT, space_id, H5P_DEFAULT, dcpl_id,
+ H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Calculate size of data buffers - first dimension is skipped in calculation */
+ for (i = 1, data_size = 1; i < SET_EXTENT_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= sizeof(int);
+ data_size *= SET_EXTENT_TEST_NUM_EXTENDS;
+
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ TEST_ERROR;
+ }
+
+ if (NULL == (read_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ TEST_ERROR;
+ }
+
+ /* Select this rank's portion of the dataspace */
+ for (i = 0; i < SET_EXTENT_TEST_SPACE_RANK; i++) {
+ if (i == 0) {
+ start[i] = (hsize_t)mpi_rank;
+ block[i] = 1;
+ }
+ else {
+ start[i] = 0;
+ block[i] = dims[i];
+ }
+
+ stride[i] = 1;
+ count[i] = 1;
+ }
+
+ if (H5Sselect_hyperslab(space_id, H5S_SELECT_SET, start, stride, count, block) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't select hyperslab for dataset write\n");
+ goto error;
+ }
+
+ /* Setup memory space for write_buf */
+ {
+ hsize_t mdims[] = {data_size / SET_EXTENT_TEST_NUM_EXTENDS / sizeof(int)};
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ goto error;
+ }
+ }
+
+ /* Initialize write_buf */
+ for (i = 0; i < data_size / sizeof(int); i++)
+ ((int *)write_buf)[i] = mpi_rank;
+
+ /* Extend the dataset in the first dimension n times, extending by 1 "row" per
+ * mpi rank involved on each iteration. Each rank will claim one of the new
+ * "rows" for I/O in an interleaved fashion. */
+ for (i = 0; i < SET_EXTENT_TEST_NUM_EXTENDS; i++) {
+ /* No need to extend on the first iteration */
+ if (i) {
+ /* Extend datapace */
+ dims[0] += (hsize_t)mpi_size;
+ if (H5Sset_extent_simple(space_id, SET_EXTENT_TEST_SPACE_RANK, dims, maxdims) < 0)
+ TEST_ERROR;
+
+ /* Extend dataset asynchronously */
+ if (H5Dset_extent_async(dset_id, dims, es_id) < 0)
+ TEST_ERROR;
+
+ /* Select hyperslab in file space to match new region */
+ for (j = 0; j < SET_EXTENT_TEST_SPACE_RANK; j++) {
+ if (j == 0) {
+ start[j] = (hsize_t)mpi_rank;
+ block[j] = 1;
+ stride[j] = (hsize_t)mpi_size;
+ count[j] = i + 1;
+ }
+ else {
+ start[j] = 0;
+ block[j] = dims[j];
+ stride[j] = 1;
+ count[j] = 1;
+ }
+ }
+
+ if (H5Sselect_hyperslab(space_id, H5S_SELECT_SET, start, stride, count, block) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't select hyperslab for dataset write\n");
+ goto error;
+ }
+
+ /* Adjust memory dataspace to match as well */
+ {
+ hsize_t mdims[] = {(i + 1) * (data_size / SET_EXTENT_TEST_NUM_EXTENDS / sizeof(int))};
+
+ if (H5Sset_extent_simple(mspace_id, 1, mdims, NULL) < 0)
+ TEST_ERROR;
+
+ if (H5Sselect_all(mspace_id) < 0)
+ TEST_ERROR;
+ }
+ } /* end if */
+
+ /* Get dataset dataspace */
+ if ((space_id_out = H5Dget_space_async(dset_id, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Verify extent is correct */
+ if ((tri_ret = H5Sextent_equal(space_id, space_id_out)) < 0)
+ TEST_ERROR;
+ if (!tri_ret)
+ FAIL_PUTS_ERROR(" dataspaces are not equal\n");
+
+ /* Close output dataspace */
+ if (H5Sclose(space_id_out) < 0)
+ TEST_ERROR;
+
+ /* Write the dataset slice asynchronously */
+ if (H5Dwrite_async(dset_id, H5T_NATIVE_INT, mspace_id, space_id, H5P_DEFAULT, write_buf, es_id) < 0)
+ TEST_ERROR;
+ }
+
+ /* Flush the dataset asynchronously. This will effectively work as a
+ * barrier, guaranteeing the read takes place after the write. Skip this
+ * function because it isn't supported for the native vol in parallel. */
+ if (!is_native_vol && H5Oflush_async(dset_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Read the entire dataset asynchronously */
+ if (H5Dread_async(dset_id, H5T_NATIVE_INT, mspace_id, space_id, H5P_DEFAULT, read_buf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Verify the read data */
+ for (i = 0; i < data_size / sizeof(int); i++)
+ if (write_buf[i] != read_buf[i]) {
+ H5_FAILED();
+ HDprintf(" data verification failed, expected %d but got %d\n", write_buf[i], read_buf[i]);
+ goto error;
+ } /* end if */
+
+ /* Close dataset asynchronously */
+ if (H5Dclose_async(dset_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Open dataset asynchronously */
+ if ((dset_id = H5Dopen_async(file_id, "dset", H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Get dataset dataspace asynchronously */
+ if ((space_id_out = H5Dget_space_async(dset_id, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Verify the extents match */
+ if ((tri_ret = H5Sextent_equal(space_id, space_id_out)) < 0)
+ TEST_ERROR;
+ if (!tri_ret)
+ FAIL_PUTS_ERROR(" dataspaces are not equal\n");
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+
+ if (cdims) {
+ HDfree(cdims);
+ cdims = NULL;
+ }
+
+ if (maxdims) {
+ HDfree(maxdims);
+ maxdims = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(mspace_id) < 0)
+ TEST_ERROR;
+ if (H5Pclose(dcpl_id) < 0)
+ TEST_ERROR;
+ if (H5ESclose(es_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ if (write_buf)
+ HDfree(write_buf);
+ if (cdims)
+ HDfree(cdims);
+ if (maxdims)
+ HDfree(maxdims);
+ if (dims)
+ HDfree(dims);
+ H5Sclose(space_id);
+ H5Sclose(mspace_id);
+ H5Sclose(space_id_out);
+ H5Dclose(dset_id);
+ H5Pclose(dcpl_id);
+ H5Fclose(file_id);
+ H5Pclose(fapl_id);
+ H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed);
+ H5ESclose(es_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+#undef SET_EXTENT_TEST_SPACE_RANK
+#undef SET_EXTENT_TEST_NUM_EXTENDS
+
+/*
+ * Creates an attribute on a dataset. All ranks check to see
+ * if the attribute exists before and after creating the
+ * attribute on the dataset.
+ */
+#define ATTRIBUTE_EXISTS_TEST_SPACE_RANK 2
+static int
+test_attribute_exists(void)
+{
+ hsize_t *dims = NULL;
+ hbool_t op_failed = false;
+ hbool_t is_native_vol = false;
+ size_t num_in_progress;
+ hbool_t exists1 = false;
+ hbool_t exists2 = false;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ hid_t es_id = H5I_INVALID_HID;
+
+ TESTING("H5Aexists()");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ if (MAINPROCESS) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, dataset, dataset more, attribute, or flush aren't "
+ "supported with this connector\n");
+ }
+
+ return 0;
+ }
+
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, coll_metadata_read)) < 0)
+ TEST_ERROR;
+
+ /* Create dataspace */
+ if (generate_random_parallel_dimensions(ATTRIBUTE_EXISTS_TEST_SPACE_RANK, &dims) < 0)
+ TEST_ERROR;
+
+ /* Create dataspace */
+ if ((space_id = H5Screate_simple(ATTRIBUTE_EXISTS_TEST_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ /* Create event stack */
+ if ((es_id = H5EScreate()) < 0)
+ TEST_ERROR;
+
+ /* Open file asynchronously */
+ if ((file_id = H5Fopen_async(PAR_ASYNC_API_TEST_FILE, H5F_ACC_RDWR, fapl_id, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Find out if the native connector is used */
+ if (H5VLobject_is_native(file_id, &is_native_vol) < 0)
+ TEST_ERROR;
+
+ /* Create the dataset asynchronously */
+ if ((dset_id = H5Dcreate_async(file_id, "attr_exists_dset", H5T_NATIVE_INT, space_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Check if the attribute exists asynchronously */
+ if (H5Aexists_async(dset_id, "attr", &exists1, es_id) < 0)
+ TEST_ERROR;
+
+ /* Flush the dataset asynchronously. This will effectively work as a
+ * barrier, guaranteeing the create takes place after the existence check.
+ * Skip this function because it isn't supported for the native vol in parallel.
+ */
+ if (!is_native_vol && H5Oflush_async(dset_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Create the attribute asynchronously */
+ if ((attr_id =
+ H5Acreate_async(dset_id, "attr", H5T_NATIVE_INT, space_id, H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Flush the dataset asynchronously. This will effectively work as a
+ * barrier, guaranteeing the existence check takes place after the create.
+ * Skip this function because it isn't supported for the native vol in parallel.
+ */
+ if (!is_native_vol && H5Oflush_async(dset_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Check if the attribute exists asynchronously */
+ if (H5Aexists_async(dset_id, "attr", &exists2, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Check if H5Aexists returned the correct values */
+ if (exists1)
+ FAIL_PUTS_ERROR(" H5Aexists returned TRUE for an attribute that should not exist")
+ if (!exists2)
+ FAIL_PUTS_ERROR(" H5Aexists returned FALSE for an attribute that should exist")
+
+ /* Close */
+ if (H5Aclose_async(attr_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose_async(dset_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose_async(file_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (H5ESclose(es_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (dims)
+ HDfree(dims);
+ H5Sclose(space_id);
+ H5Aclose(attr_id);
+ H5Dclose(dset_id);
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed);
+ H5ESclose(es_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+#undef ATTRIBUTE_EXISTS_TEST_SPACE_RANK
+
+/*
+ * Creates a file, dataset and attribute. Each rank writes to
+ * the attribute. Then, each rank reads the attribute and
+ * verifies the data is correct.
+ */
+#define ATTRIBUTE_IO_TEST_SPACE_RANK 2
+static int
+test_attribute_io(void)
+{
+ hsize_t *dims = NULL;
+ hbool_t op_failed = false;
+ hbool_t is_native_vol = false;
+ size_t num_in_progress;
+ size_t i, data_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ hid_t es_id = H5I_INVALID_HID;
+ int *write_buf = NULL;
+ int *read_buf = NULL;
+
+ TESTING("attribute I/O");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ if (MAINPROCESS) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, dataset, dataset more, attribute, or flush aren't "
+ "supported with this connector\n");
+ }
+
+ return 0;
+ }
+
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, coll_metadata_read)) < 0)
+ TEST_ERROR;
+
+ /* Create dataspace */
+ if (generate_random_parallel_dimensions(ATTRIBUTE_IO_TEST_SPACE_RANK, &dims) < 0)
+ TEST_ERROR;
+
+ /* Create dataspace */
+ if ((space_id = H5Screate_simple(ATTRIBUTE_IO_TEST_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ /* Create event stack */
+ if ((es_id = H5EScreate()) < 0)
+ TEST_ERROR;
+
+ /* Open file asynchronously */
+ if ((file_id = H5Fopen_async(PAR_ASYNC_API_TEST_FILE, H5F_ACC_RDWR, fapl_id, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Find out if the native connector is used */
+ if (H5VLobject_is_native(file_id, &is_native_vol) < 0)
+ TEST_ERROR;
+
+ /* Create the dataset asynchronously */
+ if ((dset_id = H5Dcreate_async(file_id, "attr_dset", H5T_NATIVE_INT, space_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Create the attribute asynchronously */
+ if ((attr_id =
+ H5Acreate_async(dset_id, "attr", H5T_NATIVE_INT, space_id, H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Calculate size of data buffers */
+ for (i = 0, data_size = 1; i < ATTRIBUTE_IO_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= sizeof(int);
+
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for attribute write\n");
+ TEST_ERROR;
+ }
+
+ if (NULL == (read_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for attribute read\n");
+ TEST_ERROR;
+ }
+
+ /* Initialize write_buf. */
+ for (i = 0; i < data_size / sizeof(int); i++)
+ write_buf[i] = 10 * (int)i;
+
+ /* Write the attribute asynchronously */
+ if (H5Awrite_async(attr_id, H5T_NATIVE_INT, write_buf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Flush the dataset asynchronously. This will effectively work as a
+ * barrier, guaranteeing the read takes place after the write.
+ * Skip this function because it isn't supported for the native vol in parallel.
+ */
+ if (!is_native_vol && H5Oflush_async(dset_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Read the attribute asynchronously */
+ if (H5Aread_async(attr_id, H5T_NATIVE_INT, read_buf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Verify the read data */
+ for (i = 0; i < data_size / sizeof(int); i++)
+ if (write_buf[i] != read_buf[i]) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ goto error;
+ } /* end if */
+
+ /* Close the attribute asynchronously */
+ if (H5Aclose_async(attr_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Open the attribute asynchronously */
+ if ((attr_id = H5Aopen_async(dset_id, "attr", H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Read the attribute asynchronously */
+ if (H5Aread_async(attr_id, H5T_NATIVE_INT, read_buf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Verify the read data */
+ for (i = 0; i < data_size / sizeof(int); i++)
+ if (write_buf[i] != read_buf[i]) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ goto error;
+ } /* end if */
+
+ /* Close out of order to see if it trips things up */
+ if (H5Dclose_async(dset_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Aclose_async(attr_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose_async(file_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (H5ESclose(es_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ if (write_buf)
+ HDfree(write_buf);
+ if (dims)
+ HDfree(dims);
+ H5Sclose(space_id);
+ H5Aclose(attr_id);
+ H5Dclose(dset_id);
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed);
+ H5ESclose(es_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * Creates a file, dataset and attribute in parallel. Each rank writes to
+ * the attribute with datatype conversion involved, then reads back the
+ * attribute and verifies the data is correct.
+ */
+#define ATTRIBUTE_IO_TCONV_TEST_SPACE_RANK 2
+static int
+test_attribute_io_tconv(void)
+{
+ hsize_t *dims = NULL;
+ hbool_t op_failed;
+ size_t num_in_progress;
+ size_t i, data_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ hid_t es_id = H5I_INVALID_HID;
+ int *write_buf = NULL;
+ int *read_buf = NULL;
+
+ TESTING("attribute I/O with type conversion");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ if (MAINPROCESS) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, attribute, or flush aren't supported with this "
+ "connector\n");
+ }
+
+ return 0;
+ }
+
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, coll_metadata_read)) < 0)
+ TEST_ERROR;
+
+ /* Create dataspace */
+ if (generate_random_parallel_dimensions(ATTRIBUTE_IO_TCONV_TEST_SPACE_RANK, &dims) < 0)
+ TEST_ERROR;
+
+ /* Create dataspace */
+ if ((space_id = H5Screate_simple(ATTRIBUTE_IO_TCONV_TEST_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ /* Create event stack */
+ if ((es_id = H5EScreate()) < 0)
+ TEST_ERROR;
+
+ /* Open file asynchronously */
+ if ((file_id = H5Fopen_async(PAR_ASYNC_API_TEST_FILE, H5F_ACC_RDWR, fapl_id, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Create the attribute asynchronously by name */
+ if ((attr_id = H5Acreate_by_name_async(file_id, "attr_dset", "attr_tconv", H5T_STD_U16BE, space_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Calculate size of data buffers */
+ for (i = 0, data_size = 1; i < ATTRIBUTE_IO_TCONV_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= sizeof(int);
+
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for attribute write\n");
+ TEST_ERROR;
+ }
+
+ if (NULL == (read_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for attribute read\n");
+ TEST_ERROR;
+ }
+
+ /* Initialize write_buf. */
+ for (i = 0; i < data_size / sizeof(int); i++)
+ write_buf[i] = 10 * (int)i;
+
+ /* Write the attribute asynchronously */
+ if (H5Awrite_async(attr_id, H5T_NATIVE_INT, write_buf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Flush the dataset asynchronously. This will effectively work as a
+ * barrier, guaranteeing the read takes place after the write. */
+ if (H5Fflush_async(file_id, H5F_SCOPE_LOCAL, es_id) < 0)
+ TEST_ERROR;
+
+ /* Read the attribute asynchronously */
+ if (H5Aread_async(attr_id, H5T_NATIVE_INT, read_buf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Verify the read data */
+ for (i = 0; i < data_size / sizeof(int); i++)
+ if (write_buf[i] != read_buf[i]) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ goto error;
+ } /* end if */
+
+ /* Close the attribute asynchronously */
+ if (H5Aclose_async(attr_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Open the attribute asynchronously */
+ if ((attr_id =
+ H5Aopen_by_name_async(file_id, "attr_dset", "attr_tconv", H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Read the attribute asynchronously */
+ if (H5Aread_async(attr_id, H5T_NATIVE_INT, read_buf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Verify the read data */
+ for (i = 0; i < data_size / sizeof(int); i++)
+ if (write_buf[i] != read_buf[i]) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ goto error;
+ } /* end if */
+
+ /* Close */
+ if (H5Aclose_async(attr_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose_async(file_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5ESclose(es_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ if (write_buf)
+ HDfree(write_buf);
+ if (dims)
+ HDfree(dims);
+ H5Sclose(space_id);
+ H5Aclose(attr_id);
+ H5Dclose(dset_id);
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed);
+ H5ESclose(es_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * Creates a file, dataset and attribute in parallel. Each rank writes to
+ * the attribute with a compound datatype, then reads back the attribute
+ * and verifies the data is correct.
+ */
+typedef struct tattr_cmpd_t {
+ int a;
+ int b;
+} tattr_cmpd_t;
+
+#define ATTRIBUTE_IO_COMPOUND_TEST_SPACE_RANK 2
+static int
+test_attribute_io_compound(void)
+{
+ hsize_t *dims = NULL;
+ hbool_t op_failed;
+ size_t num_in_progress;
+ size_t i, data_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ hid_t mtype_id = H5I_INVALID_HID;
+ hid_t ftype_id = H5I_INVALID_HID;
+ hid_t mtypea_id = H5I_INVALID_HID;
+ hid_t mtypeb_id = H5I_INVALID_HID;
+ hid_t es_id = H5I_INVALID_HID;
+ tattr_cmpd_t *write_buf = NULL;
+ tattr_cmpd_t *read_buf = NULL;
+ tattr_cmpd_t *fbuf = NULL;
+
+ TESTING("attribute I/O with compound type conversion");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ if (MAINPROCESS) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, dataset, dataset more, attribute, or flush aren't "
+ "supported with this connector\n");
+ }
+
+ return 0;
+ }
+
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, coll_metadata_read)) < 0)
+ TEST_ERROR;
+
+ /* Create dataspace */
+ if (generate_random_parallel_dimensions(ATTRIBUTE_IO_COMPOUND_TEST_SPACE_RANK, &dims) < 0)
+ TEST_ERROR;
+
+ /* Create datatype */
+ if ((mtype_id = H5Tcreate(H5T_COMPOUND, sizeof(tattr_cmpd_t))) < 0)
+ TEST_ERROR;
+ if (H5Tinsert(mtype_id, "a_name", HOFFSET(tattr_cmpd_t, a), H5T_NATIVE_INT) < 0)
+ TEST_ERROR;
+ if (H5Tinsert(mtype_id, "b_name", HOFFSET(tattr_cmpd_t, b), H5T_NATIVE_INT) < 0)
+ TEST_ERROR;
+
+ if ((mtypea_id = H5Tcreate(H5T_COMPOUND, sizeof(tattr_cmpd_t))) < 0)
+ TEST_ERROR;
+ if (H5Tinsert(mtypea_id, "a_name", HOFFSET(tattr_cmpd_t, a), H5T_NATIVE_INT) < 0)
+ TEST_ERROR;
+
+ if ((mtypeb_id = H5Tcreate(H5T_COMPOUND, sizeof(tattr_cmpd_t))) < 0)
+ TEST_ERROR;
+ if (H5Tinsert(mtypeb_id, "b_name", HOFFSET(tattr_cmpd_t, b), H5T_NATIVE_INT) < 0)
+ TEST_ERROR;
+
+ if ((ftype_id = H5Tcreate(H5T_COMPOUND, 2 + 8)) < 0)
+ TEST_ERROR;
+ if (H5Tinsert(ftype_id, "a_name", 0, H5T_STD_U16BE) < 0)
+ TEST_ERROR;
+ if (H5Tinsert(ftype_id, "b_name", 2, H5T_STD_I64LE) < 0)
+ TEST_ERROR;
+
+ /* Create dataspace */
+ if ((space_id = H5Screate_simple(ATTRIBUTE_IO_COMPOUND_TEST_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ /* Create event stack */
+ if ((es_id = H5EScreate()) < 0)
+ TEST_ERROR;
+
+ /* Open file asynchronously */
+ if ((file_id = H5Fopen_async(PAR_ASYNC_API_TEST_FILE, H5F_ACC_RDWR, fapl_id, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Create the attribute asynchronously by name */
+ if ((attr_id = H5Acreate_by_name_async(file_id, "attr_dset", "attr_cmpd", ftype_id, space_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Calculate size of data buffers */
+ for (i = 0, data_size = 1; i < ATTRIBUTE_IO_COMPOUND_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= sizeof(tattr_cmpd_t);
+
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for attribute write\n");
+ TEST_ERROR;
+ }
+
+ if (NULL == (read_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for attribute read\n");
+ TEST_ERROR;
+ }
+
+ if (NULL == (fbuf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for attribute read verification\n");
+ TEST_ERROR;
+ }
+
+ /* Initialize write_buf. */
+ for (i = 0; i < data_size / sizeof(tattr_cmpd_t); i++) {
+ write_buf[i].a = 10 * (int)i;
+ write_buf[i].b = (10 * (int)i) + 1;
+ }
+
+ /* Write the attribute asynchronously */
+ if (H5Awrite_async(attr_id, mtype_id, write_buf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Update fbuf */
+ for (i = 0; i < data_size / sizeof(tattr_cmpd_t); i++) {
+ fbuf[i].a = write_buf[i].a;
+ fbuf[i].b = write_buf[i].b;
+ }
+
+ /* Flush the dataset asynchronously. This will effectively work as a
+ * barrier, guaranteeing the read takes place after the write. */
+ if (H5Fflush_async(file_id, H5F_SCOPE_LOCAL, es_id) < 0)
+ TEST_ERROR;
+
+ /* Read the attribute asynchronously */
+ if (H5Aread_async(attr_id, mtype_id, read_buf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Verify the read data */
+ for (i = 0; i < data_size / sizeof(tattr_cmpd_t); i++) {
+ if (read_buf[i].a != fbuf[i].a) {
+ H5_FAILED();
+ HDprintf(" data verification failed for field 'a'\n");
+ goto error;
+ } /* end if */
+ if (read_buf[i].b != fbuf[i].b) {
+ H5_FAILED();
+ HDprintf(" data verification failed for field 'b'\n");
+ goto error;
+ } /* end if */
+ }
+
+ /* Clear the read buffer */
+ for (i = 0; i < data_size / sizeof(tattr_cmpd_t); i++) {
+ read_buf[i].a = -2;
+ read_buf[i].b = -2;
+ }
+
+ /* Read the attribute asynchronously (element a only) */
+ if (H5Aread_async(attr_id, mtypea_id, read_buf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Verify the read data */
+ for (i = 0; i < data_size / sizeof(tattr_cmpd_t); i++) {
+ if (read_buf[i].a != fbuf[i].a) {
+ H5_FAILED();
+ HDprintf(" data verification failed for field 'a'\n");
+ goto error;
+ } /* end if */
+ if (read_buf[i].b != -2) {
+ H5_FAILED();
+ HDprintf(" data verification failed for field 'b'\n");
+ goto error;
+ } /* end if */
+ }
+
+ /* Clear the read buffer */
+ for (i = 0; i < data_size / sizeof(tattr_cmpd_t); i++) {
+ read_buf[i].a = -2;
+ read_buf[i].b = -2;
+ }
+
+ /* Read the attribute asynchronously (element b only) */
+ if (H5Aread_async(attr_id, mtypeb_id, read_buf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Verify the read data */
+ for (i = 0; i < data_size / sizeof(tattr_cmpd_t); i++) {
+ if (read_buf[i].a != -2) {
+ H5_FAILED();
+ HDprintf(" data verification failed for field 'a'\n");
+ goto error;
+ } /* end if */
+ if (read_buf[i].b != fbuf[i].b) {
+ H5_FAILED();
+ HDprintf(" data verification failed for field 'b'\n");
+ goto error;
+ } /* end if */
+ }
+
+ if (MPI_SUCCESS != MPI_Barrier(MPI_COMM_WORLD)) {
+ H5_FAILED();
+ HDprintf(" MPI_Barrier failed\n");
+ goto error;
+ }
+
+ /* Update write_buf */
+ for (i = 0; i < data_size / sizeof(tattr_cmpd_t); i++) {
+ write_buf[i].a += 2 * 6 * 10;
+ write_buf[i].b += 2 * 6 * 10;
+ }
+
+ /* Write the attribute asynchronously (element a only) */
+ if (H5Awrite_async(attr_id, mtypea_id, write_buf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Update fbuf */
+ for (i = 0; i < data_size / sizeof(tattr_cmpd_t); i++) {
+ fbuf[i].a = write_buf[i].a;
+ }
+
+ /* Flush the dataset asynchronously. This will effectively work as a
+ * barrier, guaranteeing the read takes place after the write. */
+ if (H5Fflush_async(file_id, H5F_SCOPE_LOCAL, es_id) < 0)
+ TEST_ERROR;
+
+ /* Clear the read buffer */
+ for (i = 0; i < data_size / sizeof(tattr_cmpd_t); i++) {
+ read_buf[i].a = -2;
+ read_buf[i].b = -2;
+ }
+
+ /* Read the attribute asynchronously */
+ if (H5Aread_async(attr_id, mtype_id, read_buf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Verify the read data */
+ for (i = 0; i < data_size / sizeof(tattr_cmpd_t); i++) {
+ if (read_buf[i].a != fbuf[i].a) {
+ H5_FAILED();
+ HDprintf(" data verification failed for field 'a'\n");
+ goto error;
+ } /* end if */
+ if (read_buf[i].b != fbuf[i].b) {
+ H5_FAILED();
+ HDprintf(" data verification failed for field 'b'\n");
+ goto error;
+ } /* end if */
+ }
+
+ if (MPI_SUCCESS != MPI_Barrier(MPI_COMM_WORLD)) {
+ H5_FAILED();
+ HDprintf(" MPI_Barrier failed\n");
+ goto error;
+ }
+
+ /* Update write_buf */
+ for (i = 0; i < data_size / sizeof(tattr_cmpd_t); i++) {
+ write_buf[i].a += 2 * 6 * 10;
+ write_buf[i].b += 2 * 6 * 10;
+ }
+
+ /* Write the attribute asynchronously (element b only) */
+ if (H5Awrite_async(attr_id, mtypeb_id, write_buf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Update fbuf */
+ for (i = 0; i < data_size / sizeof(tattr_cmpd_t); i++) {
+ fbuf[i].b = write_buf[i].b;
+ }
+
+ /* Flush the dataset asynchronously. This will effectively work as a
+ * barrier, guaranteeing the read takes place after the write. */
+ if (H5Fflush_async(file_id, H5F_SCOPE_LOCAL, es_id) < 0)
+ TEST_ERROR;
+
+ /* Clear the read buffer */
+ for (i = 0; i < data_size / sizeof(tattr_cmpd_t); i++) {
+ read_buf[i].a = -2;
+ read_buf[i].b = -2;
+ }
+
+ /* Read the attribute asynchronously */
+ if (H5Aread_async(attr_id, mtype_id, read_buf, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Verify the read data */
+ for (i = 0; i < data_size / sizeof(tattr_cmpd_t); i++) {
+ if (read_buf[i].a != fbuf[i].a) {
+ H5_FAILED();
+ HDprintf(" data verification failed for field 'a'\n");
+ goto error;
+ } /* end if */
+ if (read_buf[i].b != fbuf[i].b) {
+ H5_FAILED();
+ HDprintf(" data verification failed for field 'b'\n");
+ goto error;
+ } /* end if */
+ }
+
+ /* Close */
+ if (H5Aclose_async(attr_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose_async(file_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(space_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(mtype_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(ftype_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(mtypea_id) < 0)
+ TEST_ERROR;
+ if (H5Tclose(mtypeb_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+
+ if (fbuf) {
+ HDfree(fbuf);
+ fbuf = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5ESclose(es_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ if (write_buf)
+ HDfree(write_buf);
+ if (fbuf)
+ HDfree(fbuf);
+ if (dims)
+ HDfree(dims);
+ H5Sclose(space_id);
+ H5Tclose(mtype_id);
+ H5Tclose(ftype_id);
+ H5Tclose(mtypea_id);
+ H5Tclose(mtypeb_id);
+ H5Aclose(attr_id);
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed);
+ H5ESclose(es_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * Tests async group interfaces in parallel
+ */
+static int
+test_group(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t parent_group_id = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t subgroup_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+ hid_t es_id = H5I_INVALID_HID;
+ H5G_info_t info1;
+ H5G_info_t info2;
+ H5G_info_t info3;
+ size_t num_in_progress;
+ hbool_t op_failed;
+
+ TESTING("group operations");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_MORE) || !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ if (MAINPROCESS) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, group more, creation order, or flush aren't "
+ "supported with this connector\n");
+ }
+
+ return 0;
+ }
+
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, coll_metadata_read)) < 0)
+ TEST_ERROR;
+
+ /* Create GCPL */
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0)
+ TEST_ERROR;
+
+ /* Track creation order */
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0)
+ TEST_ERROR;
+
+ /* Create event stack */
+ if ((es_id = H5EScreate()) < 0)
+ TEST_ERROR;
+
+ /* Open file asynchronously */
+ if ((file_id = H5Fopen_async(PAR_ASYNC_API_TEST_FILE, H5F_ACC_RDWR, fapl_id, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Create the parent group asynchronously */
+ if ((parent_group_id =
+ H5Gcreate_async(file_id, "group_parent", H5P_DEFAULT, gcpl_id, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Create 3 subgroups asynchronously, the first with no sub-subgroups, the
+ * second with 1, and the third with 2 */
+ if ((group_id =
+ H5Gcreate_async(parent_group_id, "group1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+ if (H5Gclose_async(group_id, es_id) < 0)
+ TEST_ERROR;
+
+ if ((group_id =
+ H5Gcreate_async(parent_group_id, "group2", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+ if ((subgroup_id = H5Gcreate_async(group_id, "subgroup1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT, es_id)) <
+ 0)
+ TEST_ERROR;
+ if (H5Gclose_async(subgroup_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose_async(group_id, es_id) < 0)
+ TEST_ERROR;
+
+ if ((group_id =
+ H5Gcreate_async(parent_group_id, "group3", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+ if ((subgroup_id = H5Gcreate_async(group_id, "subgroup1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT, es_id)) <
+ 0)
+ TEST_ERROR;
+ if (H5Gclose_async(subgroup_id, es_id) < 0)
+ TEST_ERROR;
+ if ((subgroup_id = H5Gcreate_async(group_id, "subgroup2", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT, es_id)) <
+ 0)
+ TEST_ERROR;
+ if (H5Gclose_async(subgroup_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose_async(group_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Flush the file asynchronously. This will effectively work as a barrier,
+ * guaranteeing the read takes place after the write. */
+ if (H5Fflush_async(file_id, H5F_SCOPE_LOCAL, es_id) < 0)
+ TEST_ERROR;
+
+ /* Test H5Gget_info_async */
+ /* Open group1 asynchronously */
+ if ((group_id = H5Gopen_async(parent_group_id, "group1", H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Get info */
+ if (H5Gget_info_async(group_id, &info1, es_id) < 0)
+ TEST_ERROR;
+
+ /* Test H5Gget_info_by_idx_async */
+ if (H5Gget_info_by_idx_async(parent_group_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 1, &info2,
+ H5P_DEFAULT, es_id) < 0)
+ TEST_ERROR;
+
+ /* Test H5Gget_info_by_name_async */
+ if (H5Gget_info_by_name_async(parent_group_id, "group3", &info3, H5P_DEFAULT, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Verify group infos */
+ if (info1.nlinks != 0)
+ FAIL_PUTS_ERROR(" incorrect number of links")
+ if (info2.nlinks != 1)
+ FAIL_PUTS_ERROR(" incorrect number of links")
+ if (info3.nlinks != 2)
+ FAIL_PUTS_ERROR(" incorrect number of links")
+
+ /* Close */
+ if (H5Gclose_async(group_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose_async(parent_group_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose_async(file_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (H5ESclose(es_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(subgroup_id);
+ H5Gclose(group_id);
+ H5Gclose(parent_group_id);
+ H5Fclose(file_id);
+ H5Pclose(fapl_id);
+ H5Pclose(gcpl_id);
+ H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed);
+ H5ESclose(es_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * Tests async link interfaces in parallel
+ */
+static int
+test_link(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t parent_group_id = H5I_INVALID_HID;
+ hid_t group_id = H5I_INVALID_HID;
+ hid_t gcpl_id = H5I_INVALID_HID;
+ hid_t es_id = H5I_INVALID_HID;
+ hbool_t existsh1;
+ hbool_t existsh2;
+ hbool_t existsh3;
+ hbool_t existss1;
+ hbool_t existss2;
+ hbool_t existss3;
+ size_t num_in_progress;
+ hbool_t op_failed = false;
+ hbool_t is_native_vol = false;
+
+ TESTING("link operations");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_LINK_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_HARD_LINKS) || !(vol_cap_flags_g & H5VL_CAP_FLAG_SOFT_LINKS) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_CREATION_ORDER)) {
+ if (MAINPROCESS) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, link, hard link, soft link, flush, or creation order "
+ "aren't supported with this connector\n");
+ }
+
+ return 0;
+ }
+
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, coll_metadata_read)) < 0)
+ TEST_ERROR;
+
+ /* Create GCPL */
+ if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0)
+ TEST_ERROR;
+
+ /* Track creation order */
+ if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED) < 0)
+ TEST_ERROR;
+
+ /* Create event stack */
+ if ((es_id = H5EScreate()) < 0)
+ TEST_ERROR;
+
+ /* Open file asynchronously */
+ if ((file_id = H5Fopen_async(PAR_ASYNC_API_TEST_FILE, H5F_ACC_RDWR, fapl_id, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Find out if the native connector is used */
+ if (H5VLobject_is_native(file_id, &is_native_vol) < 0)
+ TEST_ERROR;
+
+ /* Create the parent group asynchronously */
+ if ((parent_group_id =
+ H5Gcreate_async(file_id, "link_parent", H5P_DEFAULT, gcpl_id, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Create subgroup asynchronously. */
+ if ((group_id = H5Gcreate_async(parent_group_id, "group", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT, es_id)) <
+ 0)
+ TEST_ERROR;
+ if (H5Gclose_async(group_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Flush the parent group asynchronously. This will effectively work as a
+ * barrier, guaranteeing the link to the subgroup is visible to later tasks.
+ * Skip this function for the native vol because it isn't supported in parallel.
+ */
+ if (!is_native_vol && H5Oflush_async(parent_group_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Create hard link asynchronously */
+ if (H5Lcreate_hard_async(parent_group_id, "group", parent_group_id, "hard_link", H5P_DEFAULT, H5P_DEFAULT,
+ es_id) < 0)
+ TEST_ERROR;
+
+ /* Flush the parent group asynchronously. This will effectively work as a
+ * barrier, guaranteeing the soft link create takes place after the hard
+ * link create. Skip this function for the native vol because it isn't supported in parallel.
+ */
+ if (!is_native_vol && H5Oflush_async(parent_group_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Create soft link asynchronously */
+ if (H5Lcreate_soft_async("/link_parent/group", parent_group_id, "soft_link", H5P_DEFAULT, H5P_DEFAULT,
+ es_id) < 0)
+ TEST_ERROR;
+
+ /* Flush the parent group asynchronously. This will effectively work as a
+ * barrier, guaranteeing the read takes place after the writes.
+ * Skip this function for the native vol because it isn't supported in parallel.
+ */
+ if (!is_native_vol && H5Oflush_async(parent_group_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (MPI_SUCCESS != MPI_Barrier(MPI_COMM_WORLD)) {
+ H5_FAILED();
+ HDprintf(" MPI_Barrier failed\n");
+ goto error;
+ }
+
+ /* Check if hard link exists */
+ if (H5Lexists_async(parent_group_id, "hard_link", &existsh1, H5P_DEFAULT, es_id) < 0)
+ TEST_ERROR;
+
+ /* Check if soft link exists */
+ if (H5Lexists_async(parent_group_id, "soft_link", &existss1, H5P_DEFAULT, es_id) < 0)
+ TEST_ERROR;
+
+ /* Flush the parent group asynchronously. This will effectively work as a
+ * barrier, guaranteeing the delete takes place after the reads.
+ * Skip this function for the native vol because it isn't supported in parallel.
+ */
+ if (!is_native_vol && H5Oflush_async(parent_group_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Delete soft link by index */
+ if (H5Ldelete_by_idx_async(parent_group_id, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 2, H5P_DEFAULT, es_id) <
+ 0)
+ TEST_ERROR;
+
+ /* Flush the parent group asynchronously. This will effectively work as a
+ * barrier, guaranteeing the read takes place after the delete.
+ * Skip this function for the native vol because it isn't supported in parallel.
+ */
+ if (!is_native_vol && H5Oflush_async(parent_group_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (MPI_SUCCESS != MPI_Barrier(MPI_COMM_WORLD)) {
+ H5_FAILED();
+ HDprintf(" MPI_Barrier failed\n");
+ goto error;
+ }
+
+ /* Check if hard link exists */
+ if (H5Lexists_async(parent_group_id, "hard_link", &existsh2, H5P_DEFAULT, es_id) < 0)
+ TEST_ERROR;
+
+ /* Check if soft link exists */
+ if (H5Lexists_async(parent_group_id, "soft_link", &existss2, H5P_DEFAULT, es_id) < 0)
+ TEST_ERROR;
+
+ /* Flush the parent group asynchronously. This will effectively work as a
+ * barrier, guaranteeing the delete takes place after the reads.
+ * Skip this function for the native vol because it isn't supported in parallel.
+ */
+ if (!is_native_vol && H5Oflush_async(parent_group_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Delete hard link */
+ if (H5Ldelete_async(parent_group_id, "hard_link", H5P_DEFAULT, es_id) < 0)
+ TEST_ERROR;
+
+ /* Flush the parent group asynchronously. This will effectively work as a
+ * barrier, guaranteeing the read takes place after the delete.
+ * Skip this function for the native vol because it isn't supported in parallel.
+ */
+ if (!is_native_vol && H5Oflush_async(parent_group_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (MPI_SUCCESS != MPI_Barrier(MPI_COMM_WORLD)) {
+ H5_FAILED();
+ HDprintf(" MPI_Barrier failed\n");
+ goto error;
+ }
+
+ /* Check if hard link exists */
+ if (H5Lexists_async(parent_group_id, "hard_link", &existsh3, H5P_DEFAULT, es_id) < 0)
+ TEST_ERROR;
+
+ /* Check if soft link exists */
+ if (H5Lexists_async(parent_group_id, "soft_link", &existss3, H5P_DEFAULT, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Check if existence returns were correct */
+ if (!existsh1)
+ FAIL_PUTS_ERROR(" link exists returned FALSE for link that should exist")
+ if (!existss1)
+ FAIL_PUTS_ERROR(" link exists returned FALSE for link that should exist")
+ if (!existsh2)
+ FAIL_PUTS_ERROR(" link exists returned FALSE for link that should exist")
+ if (existss2)
+ FAIL_PUTS_ERROR(" link exists returned TRUE for link that should not exist")
+ if (existsh3)
+ FAIL_PUTS_ERROR(" link exists returned TRUE for link that should not exist")
+ if (existsh3)
+ FAIL_PUTS_ERROR(" link exists returned TRUE for link that should not exist")
+
+ /* Close */
+ if (H5Gclose_async(parent_group_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose_async(file_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Pclose(gcpl_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (H5ESclose(es_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Gclose(group_id);
+ H5Gclose(parent_group_id);
+ H5Fclose(file_id);
+ H5Pclose(fapl_id);
+ H5Pclose(gcpl_id);
+ H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed);
+ H5ESclose(es_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * Tests H5Ocopy_async and H5Orefresh_async in parallel
+ */
+#define OCOPY_REFRESH_TEST_SPACE_RANK 2
+static int
+test_ocopy_orefresh(void)
+{
+ hsize_t *dims = NULL;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t parent_group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ hid_t es_id = H5I_INVALID_HID;
+ size_t num_in_progress;
+ hbool_t op_failed = false;
+ hbool_t is_native_vol = false;
+
+ TESTING("H5Ocopy() and H5Orefresh()");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_OBJECT_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH)) {
+ if (MAINPROCESS) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, object more, flush, or refresh "
+ "aren't supported with this connector\n");
+ }
+
+ return 0;
+ }
+
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, coll_metadata_read)) < 0)
+ TEST_ERROR;
+
+ /* Create dataspace */
+ if (generate_random_parallel_dimensions(OCOPY_REFRESH_TEST_SPACE_RANK, &dims) < 0)
+ TEST_ERROR;
+
+ /* Create dataspace */
+ if ((space_id = H5Screate_simple(OCOPY_REFRESH_TEST_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ /* Create event stack */
+ if ((es_id = H5EScreate()) < 0)
+ TEST_ERROR;
+
+ /* Open file asynchronously */
+ if ((file_id = H5Fopen_async(PAR_ASYNC_API_TEST_FILE, H5F_ACC_RDWR, fapl_id, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Find out if the native connector is used */
+ if (H5VLobject_is_native(file_id, &is_native_vol) < 0)
+ TEST_ERROR;
+
+ /* Create the parent group asynchronously */
+ if ((parent_group_id =
+ H5Gcreate_async(file_id, "ocopy_parent", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Create dataset asynchronously. */
+ if ((dset_id = H5Dcreate_async(parent_group_id, "dset", H5T_NATIVE_INT, space_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+ if (H5Dclose_async(dset_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Flush the parent group asynchronously. This will effectively work as a
+ * barrier, guaranteeing the copy takes place after dataset create.
+ * Skip this function for the native vol because it isn't supported in parallel.
+ */
+ if (!is_native_vol && H5Oflush_async(parent_group_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Copy dataset */
+ if (H5Ocopy_async(parent_group_id, "dset", parent_group_id, "copied_dset", H5P_DEFAULT, H5P_DEFAULT,
+ es_id) < 0)
+ TEST_ERROR;
+
+ /* Flush the parent group asynchronously. This will effectively work as a
+ * barrier, guaranteeing the dataset open takes place copy.
+ * Skip this function for the native vol because it isn't supported in parallel.
+ */
+ if (!is_native_vol && H5Oflush_async(parent_group_id, es_id) < 0)
+ TEST_ERROR;
+
+ if (!coll_metadata_read) {
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (MPI_SUCCESS != MPI_Barrier(MPI_COMM_WORLD)) {
+ H5_FAILED();
+ HDprintf(" MPI_Barrier failed\n");
+ goto error;
+ }
+ }
+
+ /* Open the copied dataset asynchronously */
+ if ((dset_id = H5Dopen_async(parent_group_id, "copied_dset", H5P_DEFAULT, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Refresh the copied dataset asynchronously */
+ if (H5Orefresh(dset_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Close */
+ if (H5Dclose_async(dset_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose_async(parent_group_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose_async(file_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5ESclose(es_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (dims)
+ HDfree(dims);
+ H5Sclose(space_id);
+ H5Dclose(dset_id);
+ H5Gclose(parent_group_id);
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed);
+ H5ESclose(es_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+#undef OCOPY_REFRESH_TEST_SPACE_RANK
+
+/*
+ * Tests H5Freopen_async in parallel
+ */
+static int
+test_file_reopen(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t reopened_file_id = H5I_INVALID_HID;
+ hid_t es_id = H5I_INVALID_HID;
+ size_t num_in_progress;
+ hbool_t op_failed;
+
+ TESTING("H5Freopen()");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_MORE)) {
+ if (MAINPROCESS) {
+ SKIPPED();
+ HDprintf(" API functions for basic file or file more aren't supported with this connector\n");
+ }
+
+ return 0;
+ }
+
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, coll_metadata_read)) < 0)
+ TEST_ERROR;
+
+ /* Create event stack */
+ if ((es_id = H5EScreate()) < 0)
+ TEST_ERROR;
+
+ /* Open file asynchronously */
+ if ((file_id = H5Fopen_async(PAR_ASYNC_API_TEST_FILE, H5F_ACC_RDWR, fapl_id, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Reopen file asynchronously */
+ if ((reopened_file_id = H5Freopen_async(file_id, es_id)) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ /* Close */
+ if (H5Fclose_async(reopened_file_id, es_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose_async(file_id, es_id) < 0)
+ TEST_ERROR;
+
+ /* Wait for the event stack to complete */
+ if (H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed) < 0)
+ TEST_ERROR;
+ if (op_failed)
+ TEST_ERROR;
+
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5ESclose(es_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(reopened_file_id);
+ H5Fclose(file_id);
+ H5Pclose(fapl_id);
+ H5ESwait(es_id, H5_API_TEST_WAIT_FOREVER, &num_in_progress, &op_failed);
+ H5ESclose(es_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * Cleanup temporary test files
+ */
+static void
+cleanup_files(void)
+{
+ char file_name[64];
+ int i;
+
+ if (MAINPROCESS) {
+ H5Fdelete(PAR_ASYNC_API_TEST_FILE, H5P_DEFAULT);
+ for (i = 0; i <= max_printf_file; i++) {
+ snprintf(file_name, 64, PAR_ASYNC_API_TEST_FILE_PRINTF, i);
+ H5Fdelete(file_name, H5P_DEFAULT);
+ } /* end for */
+ }
+}
+
+int
+H5_api_async_test_parallel(void)
+{
+ size_t i;
+ int nerrors;
+
+ if (MAINPROCESS) {
+ HDprintf("**********************************************\n");
+ HDprintf("* *\n");
+ HDprintf("* API Parallel Async Tests *\n");
+ HDprintf("* *\n");
+ HDprintf("**********************************************\n\n");
+ }
+
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_ASYNC)) {
+ if (MAINPROCESS) {
+ SKIPPED();
+ HDprintf(" Async APIs aren't supported with this connector\n");
+ }
+
+ return 0;
+ }
+
+ for (i = 0, nerrors = 0; i < ARRAY_LENGTH(par_async_tests); i++) {
+ nerrors += (*par_async_tests[i])() ? 1 : 0;
+
+ if (MPI_SUCCESS != MPI_Barrier(MPI_COMM_WORLD)) {
+ if (MAINPROCESS)
+ HDprintf(" MPI_Barrier() failed!\n");
+ }
+ }
+
+ if (MAINPROCESS) {
+ HDprintf("\n");
+ HDprintf("Cleaning up testing files\n");
+ }
+
+ cleanup_files();
+
+ if (MAINPROCESS) {
+ HDprintf("\n * Re-testing with independent metadata reads *\n");
+ }
+
+ coll_metadata_read = FALSE;
+
+ for (i = 0, nerrors = 0; i < ARRAY_LENGTH(par_async_tests); i++) {
+ nerrors += (*par_async_tests[i])() ? 1 : 0;
+
+ if (MPI_SUCCESS != MPI_Barrier(MPI_COMM_WORLD)) {
+ if (MAINPROCESS)
+ HDprintf(" MPI_Barrier() failed!\n");
+ }
+ }
+
+ if (MAINPROCESS) {
+ HDprintf("\n");
+ HDprintf("Cleaning up testing files\n");
+ }
+
+ cleanup_files();
+
+ return nerrors;
+}
+
+#else /* H5ESpublic_H */
+
+int
+H5_api_async_test_parallel(void)
+{
+ if (MAINPROCESS) {
+ HDprintf("**********************************************\n");
+ HDprintf("* *\n");
+ HDprintf("* API Parallel Async Tests *\n");
+ HDprintf("* *\n");
+ HDprintf("**********************************************\n\n");
+ }
+
+ HDprintf("SKIPPED due to no async support in HDF5 library\n");
+
+ return 0;
+}
+
+#endif
diff --git a/testpar/API/H5_api_async_test_parallel.h b/testpar/API/H5_api_async_test_parallel.h
new file mode 100644
index 0000000..9e4340c
--- /dev/null
+++ b/testpar/API/H5_api_async_test_parallel.h
@@ -0,0 +1,29 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifndef H5_API_ASYNC_TEST_PARALLEL_H_
+#define H5_API_ASYNC_TEST_PARALLEL_H_
+
+#include "H5_api_test_parallel.h"
+
+int H5_api_async_test_parallel(void);
+
+/********************************************************
+ * *
+ * API parallel async test defines *
+ * *
+ ********************************************************/
+
+#define PAR_ASYNC_API_TEST_FILE "H5_api_async_test_parallel.h5"
+#define PAR_ASYNC_API_TEST_FILE_PRINTF "H5_api_async_test_parallel_%d.h5"
+
+#endif /* H5_API_ASYNC_TEST_PARALLEL_H_ */
diff --git a/testpar/API/H5_api_attribute_test_parallel.c b/testpar/API/H5_api_attribute_test_parallel.c
new file mode 100644
index 0000000..cffbfcd
--- /dev/null
+++ b/testpar/API/H5_api_attribute_test_parallel.c
@@ -0,0 +1,47 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include "H5_api_attribute_test_parallel.h"
+
+/*
+ * The array of parallel attribute tests to be performed.
+ */
+static int (*par_attribute_tests[])(void) = {NULL};
+
+int
+H5_api_attribute_test_parallel(void)
+{
+ size_t i;
+ int nerrors;
+
+ if (MAINPROCESS) {
+ HDprintf("**********************************************\n");
+ HDprintf("* *\n");
+ HDprintf("* API Parallel Attribute Tests *\n");
+ HDprintf("* *\n");
+ HDprintf("**********************************************\n\n");
+ }
+
+ for (i = 0, nerrors = 0; i < ARRAY_LENGTH(par_attribute_tests); i++) {
+ /* nerrors += (*par_attribute_tests[i])() ? 1 : 0; */
+
+ if (MPI_SUCCESS != MPI_Barrier(MPI_COMM_WORLD)) {
+ if (MAINPROCESS)
+ HDprintf(" MPI_Barrier() failed!\n");
+ }
+ }
+
+ if (MAINPROCESS)
+ HDprintf("\n");
+
+ return nerrors;
+}
diff --git a/testpar/API/H5_api_attribute_test_parallel.h b/testpar/API/H5_api_attribute_test_parallel.h
new file mode 100644
index 0000000..81802ae
--- /dev/null
+++ b/testpar/API/H5_api_attribute_test_parallel.h
@@ -0,0 +1,20 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifndef H5_API_ATTRIBUTE_TEST_PARALLEL_H_
+#define H5_API_ATTRIBUTE_TEST_PARALLEL_H_
+
+#include "H5_api_test_parallel.h"
+
+int H5_api_attribute_test_parallel(void);
+
+#endif /* H5_API_ATTRIBUTE_TEST_PARALLEL_H_ */
diff --git a/testpar/API/H5_api_dataset_test_parallel.c b/testpar/API/H5_api_dataset_test_parallel.c
new file mode 100644
index 0000000..fd02a7f
--- /dev/null
+++ b/testpar/API/H5_api_dataset_test_parallel.c
@@ -0,0 +1,8149 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * XXX: Better documentation for each test about how the selections get
+ * split up among MPI ranks.
+ */
+#include "H5_api_dataset_test_parallel.h"
+
+static int test_write_dataset_data_verification(void);
+static int test_write_dataset_independent(void);
+static int test_write_dataset_one_proc_0_selection(void);
+static int test_write_dataset_one_proc_none_selection(void);
+static int test_write_dataset_one_proc_all_selection(void);
+static int test_write_dataset_hyper_file_all_mem(void);
+static int test_write_dataset_all_file_hyper_mem(void);
+static int test_write_dataset_point_file_all_mem(void);
+static int test_write_dataset_all_file_point_mem(void);
+static int test_write_dataset_hyper_file_point_mem(void);
+static int test_write_dataset_point_file_hyper_mem(void);
+static int test_read_dataset_one_proc_0_selection(void);
+static int test_read_dataset_one_proc_none_selection(void);
+static int test_read_dataset_one_proc_all_selection(void);
+static int test_read_dataset_hyper_file_all_mem(void);
+static int test_read_dataset_all_file_hyper_mem(void);
+static int test_read_dataset_point_file_all_mem(void);
+static int test_read_dataset_all_file_point_mem(void);
+static int test_read_dataset_hyper_file_point_mem(void);
+static int test_read_dataset_point_file_hyper_mem(void);
+
+/*
+ * Chunking tests
+ */
+static int test_write_multi_chunk_dataset_same_shape_read(void);
+static int test_write_multi_chunk_dataset_diff_shape_read(void);
+static int test_overwrite_multi_chunk_dataset_same_shape_read(void);
+static int test_overwrite_multi_chunk_dataset_diff_shape_read(void);
+
+/*
+ * The array of parallel dataset tests to be performed.
+ */
+static int (*par_dataset_tests[])(void) = {
+ test_write_dataset_data_verification,
+ test_write_dataset_independent,
+ test_write_dataset_one_proc_0_selection,
+ test_write_dataset_one_proc_none_selection,
+ test_write_dataset_one_proc_all_selection,
+ test_write_dataset_hyper_file_all_mem,
+ test_write_dataset_all_file_hyper_mem,
+ test_write_dataset_point_file_all_mem,
+ test_write_dataset_all_file_point_mem,
+ test_write_dataset_hyper_file_point_mem,
+ test_write_dataset_point_file_hyper_mem,
+ test_read_dataset_one_proc_0_selection,
+ test_read_dataset_one_proc_none_selection,
+ test_read_dataset_one_proc_all_selection,
+ test_read_dataset_hyper_file_all_mem,
+ test_read_dataset_all_file_hyper_mem,
+ test_read_dataset_point_file_all_mem,
+ test_read_dataset_all_file_point_mem,
+ test_read_dataset_hyper_file_point_mem,
+ test_read_dataset_point_file_hyper_mem,
+ test_write_multi_chunk_dataset_same_shape_read,
+ test_write_multi_chunk_dataset_diff_shape_read,
+ test_overwrite_multi_chunk_dataset_same_shape_read,
+ test_overwrite_multi_chunk_dataset_diff_shape_read,
+};
+
+/*
+ * A test to ensure that data is read back correctly from
+ * a dataset after it has been written in parallel. The test
+ * covers simple examples of using H5S_ALL selections,
+ * hyperslab selections and point selections.
+ */
+#define DATASET_WRITE_DATA_VERIFY_TEST_SPACE_RANK 3
+#define DATASET_WRITE_DATA_VERIFY_TEST_NUM_POINTS 10
+#define DATASET_WRITE_DATA_VERIFY_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_WRITE_DATA_VERIFY_TEST_DTYPE_SIZE sizeof(int)
+#define DATASET_WRITE_DATA_VERIFY_TEST_GROUP_NAME "dataset_write_data_verification_test"
+#define DATASET_WRITE_DATA_VERIFY_TEST_DSET_NAME1 "dataset_write_data_verification_all"
+#define DATASET_WRITE_DATA_VERIFY_TEST_DSET_NAME2 "dataset_write_data_verification_hyperslab"
+#define DATASET_WRITE_DATA_VERIFY_TEST_DSET_NAME3 "dataset_write_data_verification_points"
+static int
+test_write_dataset_data_verification(void)
+{
+ hssize_t space_npoints;
+ hsize_t *dims = NULL;
+ hsize_t start[DATASET_WRITE_DATA_VERIFY_TEST_SPACE_RANK];
+ hsize_t stride[DATASET_WRITE_DATA_VERIFY_TEST_SPACE_RANK];
+ hsize_t count[DATASET_WRITE_DATA_VERIFY_TEST_SPACE_RANK];
+ hsize_t block[DATASET_WRITE_DATA_VERIFY_TEST_SPACE_RANK];
+ hsize_t *points = NULL;
+ size_t i, data_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ void *write_buf = NULL;
+ void *read_buf = NULL;
+
+ TESTING_MULTIPART("verification of dataset data using H5Dwrite then H5Dread");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_WRITE_DATA_VERIFY_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", DATASET_WRITE_DATA_VERIFY_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if (generate_random_parallel_dimensions(DATASET_WRITE_DATA_VERIFY_TEST_SPACE_RANK, &dims) < 0)
+ TEST_ERROR;
+
+ if ((fspace_id = H5Screate_simple(DATASET_WRITE_DATA_VERIFY_TEST_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_WRITE_DATA_VERIFY_TEST_DSET_NAME1,
+ DATASET_WRITE_DATA_VERIFY_TEST_DSET_DTYPE, fspace_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_WRITE_DATA_VERIFY_TEST_DSET_NAME1);
+ goto error;
+ }
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_WRITE_DATA_VERIFY_TEST_DSET_NAME2,
+ DATASET_WRITE_DATA_VERIFY_TEST_DSET_DTYPE, fspace_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_WRITE_DATA_VERIFY_TEST_DSET_NAME2);
+ goto error;
+ }
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_WRITE_DATA_VERIFY_TEST_DSET_NAME3,
+ DATASET_WRITE_DATA_VERIFY_TEST_DSET_DTYPE, fspace_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_WRITE_DATA_VERIFY_TEST_DSET_NAME3);
+ goto error;
+ }
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Dwrite_all_read)
+ {
+ hbool_t op_failed = FALSE;
+
+ TESTING_2("H5Dwrite using H5S_ALL then H5Dread");
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_WRITE_DATA_VERIFY_TEST_DSET_NAME1, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_WRITE_DATA_VERIFY_TEST_DSET_NAME1);
+ PART_ERROR(H5Dwrite_all_read);
+ }
+
+ /*
+ * Write data to dataset on rank 0 only. All ranks will read the data back.
+ */
+ if (MAINPROCESS) {
+ for (i = 0, data_size = 1; i < DATASET_WRITE_DATA_VERIFY_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_WRITE_DATA_VERIFY_TEST_DTYPE_SIZE;
+
+ if (NULL != (write_buf = HDmalloc(data_size))) {
+ for (i = 0; i < data_size / DATASET_WRITE_DATA_VERIFY_TEST_DTYPE_SIZE; i++)
+ ((int *)write_buf)[i] = (int)i;
+
+ if (H5Dwrite(dset_id, DATASET_WRITE_DATA_VERIFY_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL,
+ H5P_DEFAULT, write_buf) < 0)
+ op_failed = TRUE;
+ }
+ else
+ op_failed = TRUE;
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+ }
+
+ if (MPI_SUCCESS !=
+ MPI_Allreduce(MPI_IN_PLACE, &op_failed, 1, MPI_C_BOOL, MPI_LAND, MPI_COMM_WORLD)) {
+ H5_FAILED();
+ HDprintf(" couldn't determine if dataset write on rank 0 succeeded\n");
+ PART_ERROR(H5Dwrite_all_read);
+ }
+
+ if (op_failed == TRUE) {
+ H5_FAILED();
+ HDprintf(" dataset write on rank 0 failed!\n");
+ PART_ERROR(H5Dwrite_all_read);
+ }
+
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ /*
+ * Close and re-open the file to ensure that the data gets written.
+ */
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close test's container group\n");
+ PART_ERROR(H5Dwrite_all_read);
+ }
+ if (H5Gclose(container_group) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close container group\n");
+ PART_ERROR(H5Dwrite_all_read);
+ }
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close file for data flushing\n");
+ PART_ERROR(H5Dwrite_all_read);
+ }
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't re-open file '%s'\n", H5_api_test_parallel_filename);
+ PART_ERROR(H5Dwrite_all_read);
+ }
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ PART_ERROR(H5Dwrite_all_read);
+ }
+ if ((group_id =
+ H5Gopen2(container_group, DATASET_WRITE_DATA_VERIFY_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container sub-group '%s'\n",
+ DATASET_WRITE_DATA_VERIFY_TEST_GROUP_NAME);
+ PART_ERROR(H5Dwrite_all_read);
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_WRITE_DATA_VERIFY_TEST_DSET_NAME1, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_WRITE_DATA_VERIFY_TEST_DSET_NAME1);
+ PART_ERROR(H5Dwrite_all_read);
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ PART_ERROR(H5Dwrite_all_read);
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ PART_ERROR(H5Dwrite_all_read);
+ }
+
+ if (NULL ==
+ (read_buf = HDmalloc((hsize_t)space_npoints * DATASET_WRITE_DATA_VERIFY_TEST_DTYPE_SIZE))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ PART_ERROR(H5Dwrite_all_read);
+ }
+
+ if (H5Dread(dset_id, DATASET_WRITE_DATA_VERIFY_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_WRITE_DATA_VERIFY_TEST_DSET_NAME1);
+ PART_ERROR(H5Dwrite_all_read);
+ }
+
+ for (i = 0; i < (hsize_t)space_npoints; i++)
+ if (((int *)read_buf)[i] != (int)i) {
+ H5_FAILED();
+ HDprintf(" H5S_ALL selection data verification failed\n");
+ PART_ERROR(H5Dwrite_all_read);
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dwrite_all_read);
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ PART_BEGIN(H5Dwrite_hyperslab_read)
+ {
+ TESTING_2("H5Dwrite using hyperslab selection then H5Dread");
+
+ for (i = 1, data_size = 1; i < DATASET_WRITE_DATA_VERIFY_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_WRITE_DATA_VERIFY_TEST_DTYPE_SIZE;
+
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ for (i = 0; i < data_size / DATASET_WRITE_DATA_VERIFY_TEST_DTYPE_SIZE; i++)
+ ((int *)write_buf)[i] = mpi_rank;
+
+ /* Each MPI rank writes to a single row in the second dimension
+ * and the entirety of the following dimensions. The combined
+ * selections from all MPI ranks spans the first dimension.
+ */
+ for (i = 0; i < DATASET_WRITE_DATA_VERIFY_TEST_SPACE_RANK; i++) {
+ if (i == 0) {
+ start[i] = (hsize_t)mpi_rank;
+ block[i] = 1;
+ }
+ else {
+ start[i] = 0;
+ block[i] = dims[i];
+ }
+
+ stride[i] = 1;
+ count[i] = 1;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_WRITE_DATA_VERIFY_TEST_DSET_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_WRITE_DATA_VERIFY_TEST_DSET_NAME2);
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ if (H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, stride, count, block) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't select hyperslab for dataset write\n");
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ {
+ hsize_t mdims[] = {data_size / DATASET_WRITE_DATA_VERIFY_TEST_DTYPE_SIZE};
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+ }
+
+ if (H5Dwrite(dset_id, DATASET_WRITE_DATA_VERIFY_TEST_DSET_DTYPE, mspace_id, fspace_id,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n", DATASET_WRITE_DATA_VERIFY_TEST_DSET_NAME2);
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+ if (mspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(mspace_id);
+ }
+ H5E_END_TRY;
+ mspace_id = H5I_INVALID_HID;
+ }
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ /*
+ * Close and re-open the file to ensure that the data gets written.
+ */
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close test's container group\n");
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+ if (H5Gclose(container_group) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close container group\n");
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close file for data flushing\n");
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't re-open file '%s'\n", H5_api_test_parallel_filename);
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+ if ((group_id =
+ H5Gopen2(container_group, DATASET_WRITE_DATA_VERIFY_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container sub-group '%s'\n",
+ DATASET_WRITE_DATA_VERIFY_TEST_GROUP_NAME);
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_WRITE_DATA_VERIFY_TEST_DSET_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_WRITE_DATA_VERIFY_TEST_DSET_NAME2);
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ if (NULL ==
+ (read_buf = HDmalloc((hsize_t)space_npoints * DATASET_WRITE_DATA_VERIFY_TEST_DTYPE_SIZE))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ if (H5Dread(dset_id, DATASET_WRITE_DATA_VERIFY_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_WRITE_DATA_VERIFY_TEST_DSET_NAME2);
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+
+ for (i = 0; i < (size_t)mpi_size; i++) {
+ size_t j;
+
+ for (j = 0; j < data_size / DATASET_WRITE_DATA_VERIFY_TEST_DTYPE_SIZE; j++) {
+ if (((int *)
+ read_buf)[j + (i * (data_size / DATASET_WRITE_DATA_VERIFY_TEST_DTYPE_SIZE))] !=
+ (int)i) {
+ H5_FAILED();
+ HDprintf(" hyperslab selection data verification failed\n");
+ PART_ERROR(H5Dwrite_hyperslab_read);
+ }
+ }
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dwrite_hyperslab_read);
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (mspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(mspace_id);
+ }
+ H5E_END_TRY;
+ mspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ PART_BEGIN(H5Dwrite_point_sel_read)
+ {
+ TESTING_2("H5Dwrite using point selection then H5Dread");
+
+ for (i = 1, data_size = 1; i < DATASET_WRITE_DATA_VERIFY_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_WRITE_DATA_VERIFY_TEST_DTYPE_SIZE;
+
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ /* Use different data than the previous test to ensure that the data actually changed. */
+ for (i = 0; i < data_size / DATASET_WRITE_DATA_VERIFY_TEST_DTYPE_SIZE; i++)
+ ((int *)write_buf)[i] = mpi_size - mpi_rank;
+
+ if (NULL == (points = HDmalloc(DATASET_WRITE_DATA_VERIFY_TEST_SPACE_RANK *
+ (data_size / DATASET_WRITE_DATA_VERIFY_TEST_DTYPE_SIZE) *
+ sizeof(hsize_t)))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for point selection\n");
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ /* Each MPI rank writes to a single row in the second dimension
+ * and the entirety of the following dimensions. The combined
+ * selections from all MPI ranks spans the first dimension.
+ */
+ for (i = 0; i < data_size / DATASET_WRITE_DATA_VERIFY_TEST_DTYPE_SIZE; i++) {
+ size_t j;
+
+ for (j = 0; j < DATASET_WRITE_DATA_VERIFY_TEST_SPACE_RANK; j++) {
+ size_t idx = (i * DATASET_WRITE_DATA_VERIFY_TEST_SPACE_RANK) + j;
+
+ if (j == 0)
+ points[idx] = (hsize_t)mpi_rank;
+ else if (j != DATASET_WRITE_DATA_VERIFY_TEST_SPACE_RANK - 1)
+ points[idx] = i / dims[j + 1];
+ else
+ points[idx] = i % dims[j];
+ }
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_WRITE_DATA_VERIFY_TEST_DSET_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_WRITE_DATA_VERIFY_TEST_DSET_NAME3);
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ if (H5Sselect_elements(fspace_id, H5S_SELECT_SET,
+ data_size / DATASET_WRITE_DATA_VERIFY_TEST_DTYPE_SIZE, points) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't select elements in dataspace\n");
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ {
+ hsize_t mdims[] = {data_size / DATASET_WRITE_DATA_VERIFY_TEST_DTYPE_SIZE};
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+ }
+
+ if (H5Dwrite(dset_id, DATASET_WRITE_DATA_VERIFY_TEST_DSET_DTYPE, mspace_id, fspace_id,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n", DATASET_WRITE_DATA_VERIFY_TEST_DSET_NAME3);
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+ if (mspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(mspace_id);
+ }
+ H5E_END_TRY;
+ mspace_id = H5I_INVALID_HID;
+ }
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ /*
+ * Close and re-open the file to ensure that the data gets written.
+ */
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close test's container group\n");
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+ if (H5Gclose(container_group) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close container group\n");
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close file for data flushing\n");
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't re-open file '%s'\n", H5_api_test_parallel_filename);
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+ if ((group_id =
+ H5Gopen2(container_group, DATASET_WRITE_DATA_VERIFY_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container sub-group '%s'\n",
+ DATASET_WRITE_DATA_VERIFY_TEST_GROUP_NAME);
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_WRITE_DATA_VERIFY_TEST_DSET_NAME3, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_WRITE_DATA_VERIFY_TEST_DSET_NAME3);
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ if (NULL ==
+ (read_buf = HDmalloc((hsize_t)space_npoints * DATASET_WRITE_DATA_VERIFY_TEST_DTYPE_SIZE))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ if (H5Dread(dset_id, DATASET_WRITE_DATA_VERIFY_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_WRITE_DATA_VERIFY_TEST_DSET_NAME3);
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+
+ for (i = 0; i < (size_t)mpi_size; i++) {
+ size_t j;
+
+ for (j = 0; j < data_size / DATASET_WRITE_DATA_VERIFY_TEST_DTYPE_SIZE; j++) {
+ if (((int *)
+ read_buf)[j + (i * (data_size / DATASET_WRITE_DATA_VERIFY_TEST_DTYPE_SIZE))] !=
+ (mpi_size - (int)i)) {
+ H5_FAILED();
+ HDprintf(" point selection data verification failed\n");
+ PART_ERROR(H5Dwrite_point_sel_read);
+ }
+ }
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ PASSED();
+ }
+ PART_END(H5Dwrite_point_sel_read);
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+ if (points) {
+ HDfree(points);
+ points = NULL;
+ }
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (mspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(mspace_id);
+ }
+ H5E_END_TRY;
+ mspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+
+ if (points) {
+ HDfree(points);
+ points = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ if (write_buf)
+ HDfree(write_buf);
+ if (points)
+ HDfree(points);
+ if (dims)
+ HDfree(dims);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to ensure that independent dataset writes function
+ * as expected. First, two datasets are created in the file.
+ * Then, the even MPI ranks first write to dataset 1, followed
+ * by dataset 2. The odd MPI ranks first write to dataset 2,
+ * followed by dataset 1. After this, the data is read back from
+ * each dataset and verified.
+ */
+#define DATASET_INDEPENDENT_WRITE_TEST_SPACE_RANK 3
+#define DATASET_INDEPENDENT_WRITE_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_INDEPENDENT_WRITE_TEST_DTYPE_SIZE sizeof(int)
+#define DATASET_INDEPENDENT_WRITE_TEST_GROUP_NAME "independent_dataset_write_test"
+#define DATASET_INDEPENDENT_WRITE_TEST_DSET_NAME1 "dset1"
+#define DATASET_INDEPENDENT_WRITE_TEST_DSET_NAME2 "dset2"
+static int
+test_write_dataset_independent(void)
+{
+ hssize_t space_npoints;
+ hsize_t *dims = NULL;
+ hsize_t start[DATASET_INDEPENDENT_WRITE_TEST_SPACE_RANK];
+ hsize_t stride[DATASET_INDEPENDENT_WRITE_TEST_SPACE_RANK];
+ hsize_t count[DATASET_INDEPENDENT_WRITE_TEST_SPACE_RANK];
+ hsize_t block[DATASET_INDEPENDENT_WRITE_TEST_SPACE_RANK];
+ size_t i, data_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id1 = H5I_INVALID_HID, dset_id2 = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ void *write_buf = NULL;
+ void *read_buf = NULL;
+
+ TESTING("independent writing to different datasets by different ranks");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_INDEPENDENT_WRITE_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n", DATASET_INDEPENDENT_WRITE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ /*
+ * Setup dimensions of overall datasets and slabs local
+ * to the MPI rank.
+ */
+ if (generate_random_parallel_dimensions(DATASET_INDEPENDENT_WRITE_TEST_SPACE_RANK, &dims) < 0)
+ TEST_ERROR;
+
+ if ((fspace_id = H5Screate_simple(DATASET_INDEPENDENT_WRITE_TEST_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ /* create a dataset collectively */
+ if ((dset_id1 = H5Dcreate2(group_id, DATASET_INDEPENDENT_WRITE_TEST_DSET_NAME1,
+ DATASET_INDEPENDENT_WRITE_TEST_DSET_DTYPE, fspace_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create first dataset\n");
+ goto error;
+ }
+ if ((dset_id2 = H5Dcreate2(group_id, DATASET_INDEPENDENT_WRITE_TEST_DSET_NAME2,
+ DATASET_INDEPENDENT_WRITE_TEST_DSET_DTYPE, fspace_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create second dataset\n");
+ goto error;
+ }
+
+ for (i = 1, data_size = 1; i < DATASET_INDEPENDENT_WRITE_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_INDEPENDENT_WRITE_TEST_DTYPE_SIZE;
+
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ goto error;
+ }
+
+ for (i = 0; i < data_size / DATASET_INDEPENDENT_WRITE_TEST_DTYPE_SIZE; i++)
+ ((int *)write_buf)[i] = mpi_rank;
+
+ for (i = 0; i < DATASET_INDEPENDENT_WRITE_TEST_SPACE_RANK; i++) {
+ if (i == 0) {
+ start[i] = (hsize_t)mpi_rank;
+ block[i] = 1;
+ }
+ else {
+ start[i] = 0;
+ block[i] = dims[i];
+ }
+
+ stride[i] = 1;
+ count[i] = 1;
+ }
+
+ if (H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, stride, count, block) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't select hyperslab for dataset write\n");
+ goto error;
+ }
+
+ {
+ hsize_t mdims[] = {data_size / DATASET_INDEPENDENT_WRITE_TEST_DTYPE_SIZE};
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ goto error;
+ }
+ }
+
+ /*
+ * To test the independent orders of writes between processes, all
+ * even number processes write to dataset1 first, then dataset2.
+ * All odd number processes write to dataset2 first, then dataset1.
+ */
+ BEGIN_INDEPENDENT_OP(dset_write)
+ {
+ if (mpi_rank % 2 == 0) {
+ if (H5Dwrite(dset_id1, DATASET_INDEPENDENT_WRITE_TEST_DSET_DTYPE, mspace_id, fspace_id,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" even ranks failed to write to dataset 1\n");
+ INDEPENDENT_OP_ERROR(dset_write);
+ }
+ if (H5Dwrite(dset_id2, DATASET_INDEPENDENT_WRITE_TEST_DSET_DTYPE, mspace_id, fspace_id,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" even ranks failed to write to dataset 2\n");
+ INDEPENDENT_OP_ERROR(dset_write);
+ }
+ }
+ else {
+ if (H5Dwrite(dset_id2, DATASET_INDEPENDENT_WRITE_TEST_DSET_DTYPE, mspace_id, fspace_id,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" odd ranks failed to write to dataset 2\n");
+ INDEPENDENT_OP_ERROR(dset_write);
+ }
+ if (H5Dwrite(dset_id1, DATASET_INDEPENDENT_WRITE_TEST_DSET_DTYPE, mspace_id, fspace_id,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" odd ranks failed to write to dataset 1\n");
+ INDEPENDENT_OP_ERROR(dset_write);
+ }
+ }
+ }
+ END_INDEPENDENT_OP(dset_write);
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+
+ H5Sclose(mspace_id);
+ mspace_id = H5I_INVALID_HID;
+ H5Sclose(fspace_id);
+ fspace_id = H5I_INVALID_HID;
+ H5Dclose(dset_id1);
+ dset_id1 = H5I_INVALID_HID;
+ H5Dclose(dset_id2);
+ dset_id2 = H5I_INVALID_HID;
+
+ /*
+ * Close and re-open the file to ensure that the data gets written.
+ */
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close test's container group\n");
+ goto error;
+ }
+ if (H5Gclose(container_group) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close container group\n");
+ goto error;
+ }
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close file for data flushing\n");
+ goto error;
+ }
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't re-open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+ if ((group_id = H5Gopen2(container_group, DATASET_INDEPENDENT_WRITE_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container sub-group '%s'\n", DATASET_INDEPENDENT_WRITE_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((dset_id1 = H5Dopen2(group_id, DATASET_INDEPENDENT_WRITE_TEST_DSET_NAME1, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_INDEPENDENT_WRITE_TEST_DSET_NAME1);
+ goto error;
+ }
+ if ((dset_id2 = H5Dopen2(group_id, DATASET_INDEPENDENT_WRITE_TEST_DSET_NAME2, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_INDEPENDENT_WRITE_TEST_DSET_NAME2);
+ goto error;
+ }
+
+ /*
+ * Verify that data has been written correctly.
+ */
+ if ((fspace_id = H5Dget_space(dset_id1)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ goto error;
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ goto error;
+ }
+
+ if (NULL == (read_buf = HDmalloc((hsize_t)space_npoints * DATASET_INDEPENDENT_WRITE_TEST_DTYPE_SIZE))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ goto error;
+ }
+
+ if (H5Dread(dset_id1, DATASET_INDEPENDENT_WRITE_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_INDEPENDENT_WRITE_TEST_DSET_NAME1);
+ goto error;
+ }
+
+ for (i = 0; i < (size_t)mpi_size; i++) {
+ size_t j;
+
+ for (j = 0; j < data_size / DATASET_INDEPENDENT_WRITE_TEST_DTYPE_SIZE; j++) {
+ if (((int *)read_buf)[j + (i * (data_size / DATASET_INDEPENDENT_WRITE_TEST_DTYPE_SIZE))] !=
+ (int)i) {
+ H5_FAILED();
+ HDprintf(" dataset 1 data verification failed\n");
+ goto error;
+ }
+ }
+ }
+
+ if (H5Dread(dset_id2, DATASET_INDEPENDENT_WRITE_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_INDEPENDENT_WRITE_TEST_DSET_NAME2);
+ goto error;
+ }
+
+ for (i = 0; i < (size_t)mpi_size; i++) {
+ size_t j;
+
+ for (j = 0; j < data_size / DATASET_INDEPENDENT_WRITE_TEST_DTYPE_SIZE; j++) {
+ if (((int *)read_buf)[j + (i * (data_size / DATASET_INDEPENDENT_WRITE_TEST_DTYPE_SIZE))] !=
+ (int)i) {
+ H5_FAILED();
+ HDprintf(" dataset 2 data verification failed\n");
+ goto error;
+ }
+ }
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id1) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id2) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ if (write_buf)
+ HDfree(write_buf);
+ if (dims)
+ HDfree(dims);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id1);
+ H5Dclose(dset_id2);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to ensure that a dataset can be written to by having
+ * one of the MPI ranks select 0 rows in a hyperslab selection.
+ */
+#define DATASET_WRITE_ONE_PROC_0_SEL_TEST_SPACE_RANK 2
+#define DATASET_WRITE_ONE_PROC_0_SEL_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_WRITE_ONE_PROC_0_SEL_TEST_DTYPE_SIZE sizeof(int)
+#define DATASET_WRITE_ONE_PROC_0_SEL_TEST_GROUP_NAME "one_rank_0_sel_write_test"
+#define DATASET_WRITE_ONE_PROC_0_SEL_TEST_DSET_NAME "one_rank_0_sel_dset"
+static int
+test_write_dataset_one_proc_0_selection(void)
+{
+ hssize_t space_npoints;
+ hsize_t *dims = NULL;
+ hsize_t start[DATASET_WRITE_ONE_PROC_0_SEL_TEST_SPACE_RANK];
+ hsize_t stride[DATASET_WRITE_ONE_PROC_0_SEL_TEST_SPACE_RANK];
+ hsize_t count[DATASET_WRITE_ONE_PROC_0_SEL_TEST_SPACE_RANK];
+ hsize_t block[DATASET_WRITE_ONE_PROC_0_SEL_TEST_SPACE_RANK];
+ size_t i, data_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ void *write_buf = NULL;
+ void *read_buf = NULL;
+
+ TESTING("write to dataset with one rank selecting 0 rows");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_WRITE_ONE_PROC_0_SEL_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_WRITE_ONE_PROC_0_SEL_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if (generate_random_parallel_dimensions(DATASET_WRITE_ONE_PROC_0_SEL_TEST_SPACE_RANK, &dims) < 0)
+ TEST_ERROR;
+
+ if ((fspace_id = H5Screate_simple(DATASET_WRITE_ONE_PROC_0_SEL_TEST_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_WRITE_ONE_PROC_0_SEL_TEST_DSET_NAME,
+ DATASET_WRITE_ONE_PROC_0_SEL_TEST_DSET_DTYPE, fspace_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_WRITE_ONE_PROC_0_SEL_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 1, data_size = 1; i < DATASET_WRITE_ONE_PROC_0_SEL_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_WRITE_ONE_PROC_0_SEL_TEST_DTYPE_SIZE;
+
+ BEGIN_INDEPENDENT_OP(write_buf_alloc)
+ {
+ if (!MAINPROCESS) {
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ INDEPENDENT_OP_ERROR(write_buf_alloc);
+ }
+
+ for (i = 0; i < data_size / DATASET_WRITE_ONE_PROC_0_SEL_TEST_DTYPE_SIZE; i++)
+ ((int *)write_buf)[i] = mpi_rank;
+ }
+ }
+ END_INDEPENDENT_OP(write_buf_alloc);
+
+ for (i = 0; i < DATASET_WRITE_ONE_PROC_0_SEL_TEST_SPACE_RANK; i++) {
+ if (i == 0) {
+ start[i] = (hsize_t)mpi_rank;
+ block[i] = MAINPROCESS ? 0 : 1;
+ }
+ else {
+ start[i] = 0;
+ block[i] = MAINPROCESS ? 0 : dims[i];
+ }
+
+ stride[i] = 1;
+ count[i] = MAINPROCESS ? 0 : 1;
+ }
+
+ if (H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, stride, count, block) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't select hyperslab for dataset write\n");
+ goto error;
+ }
+
+ {
+ hsize_t mdims[] = {data_size / DATASET_WRITE_ONE_PROC_0_SEL_TEST_DTYPE_SIZE};
+
+ if (MAINPROCESS)
+ mdims[0] = 0;
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ goto error;
+ }
+ }
+
+ BEGIN_INDEPENDENT_OP(dset_write)
+ {
+ if (H5Dwrite(dset_id, DATASET_WRITE_ONE_PROC_0_SEL_TEST_DSET_DTYPE, mspace_id, fspace_id, H5P_DEFAULT,
+ write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n", DATASET_WRITE_ONE_PROC_0_SEL_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_write);
+ }
+ }
+ END_INDEPENDENT_OP(dset_write);
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+ if (mspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(mspace_id);
+ }
+ H5E_END_TRY;
+ mspace_id = H5I_INVALID_HID;
+ }
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ /*
+ * Close and re-open the file to ensure that the data gets written.
+ */
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close test's container group\n");
+ goto error;
+ }
+ if (H5Gclose(container_group) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close container group\n");
+ goto error;
+ }
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close file for data flushing\n");
+ goto error;
+ }
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't re-open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+ if ((group_id = H5Gopen2(container_group, DATASET_WRITE_ONE_PROC_0_SEL_TEST_GROUP_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container sub-group '%s'\n",
+ DATASET_WRITE_ONE_PROC_0_SEL_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_WRITE_ONE_PROC_0_SEL_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_WRITE_ONE_PROC_0_SEL_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ goto error;
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ goto error;
+ }
+
+ if (NULL ==
+ (read_buf = HDmalloc((hsize_t)space_npoints * DATASET_WRITE_ONE_PROC_0_SEL_TEST_DTYPE_SIZE))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ goto error;
+ }
+
+ if (H5Dread(dset_id, DATASET_WRITE_ONE_PROC_0_SEL_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_WRITE_ONE_PROC_0_SEL_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < (size_t)mpi_size; i++) {
+ size_t j;
+
+ if (i != 0) {
+ for (j = 0; j < data_size / DATASET_WRITE_ONE_PROC_0_SEL_TEST_DTYPE_SIZE; j++) {
+ if (((int *)read_buf)[j + (i * (data_size / DATASET_WRITE_ONE_PROC_0_SEL_TEST_DTYPE_SIZE))] !=
+ (int)i) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ goto error;
+ }
+ }
+ }
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ if (write_buf)
+ HDfree(write_buf);
+ if (dims)
+ HDfree(dims);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to ensure that a dataset can be written to by having
+ * one of the MPI ranks call H5Sselect_none.
+ */
+#define DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_SPACE_RANK 2
+#define DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_DTYPE_SIZE sizeof(int)
+#define DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_GROUP_NAME "one_rank_none_sel_write_test"
+#define DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_DSET_NAME "one_rank_none_sel_dset"
+static int
+test_write_dataset_one_proc_none_selection(void)
+{
+ hssize_t space_npoints;
+ hsize_t *dims = NULL;
+ hsize_t start[DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_SPACE_RANK];
+ hsize_t stride[DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_SPACE_RANK];
+ hsize_t count[DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_SPACE_RANK];
+ hsize_t block[DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_SPACE_RANK];
+ size_t i, data_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ void *write_buf = NULL;
+ void *read_buf = NULL;
+
+ TESTING("write to dataset with one rank using 'none' selection");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if (generate_random_parallel_dimensions(DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_SPACE_RANK, &dims) < 0)
+ TEST_ERROR;
+
+ if ((fspace_id = H5Screate_simple(DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_DSET_NAME,
+ DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_DSET_DTYPE, fspace_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 1, data_size = 1; i < DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_DTYPE_SIZE;
+
+ BEGIN_INDEPENDENT_OP(write_buf_alloc)
+ {
+ if (!MAINPROCESS) {
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ INDEPENDENT_OP_ERROR(write_buf_alloc);
+ }
+
+ for (i = 0; i < data_size / DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_DTYPE_SIZE; i++)
+ ((int *)write_buf)[i] = mpi_rank;
+ }
+ }
+ END_INDEPENDENT_OP(write_buf_alloc);
+
+ for (i = 0; i < DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_SPACE_RANK; i++) {
+ if (i == 0) {
+ start[i] = (hsize_t)mpi_rank;
+ block[i] = 1;
+ }
+ else {
+ start[i] = 0;
+ block[i] = dims[i];
+ }
+
+ stride[i] = 1;
+ count[i] = 1;
+ }
+
+ BEGIN_INDEPENDENT_OP(set_space_sel)
+ {
+ if (MAINPROCESS) {
+ if (H5Sselect_none(fspace_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set 'none' selection for dataset write\n");
+ INDEPENDENT_OP_ERROR(set_space_sel);
+ }
+ }
+ else {
+ if (H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, stride, count, block) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't select hyperslab for dataset write\n");
+ INDEPENDENT_OP_ERROR(set_space_sel);
+ }
+ }
+ }
+ END_INDEPENDENT_OP(set_space_sel);
+
+ {
+ hsize_t mdims[] = {data_size / DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_DTYPE_SIZE};
+
+ if (MAINPROCESS)
+ mdims[0] = 0;
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ goto error;
+ }
+ }
+
+ BEGIN_INDEPENDENT_OP(dset_write)
+ {
+ if (H5Dwrite(dset_id, DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_DSET_DTYPE, mspace_id, fspace_id,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n", DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_write);
+ }
+ }
+ END_INDEPENDENT_OP(dset_write);
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+ if (mspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(mspace_id);
+ }
+ H5E_END_TRY;
+ mspace_id = H5I_INVALID_HID;
+ }
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ /*
+ * Close and re-open the file to ensure that the data gets written.
+ */
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close test's container group\n");
+ goto error;
+ }
+ if (H5Gclose(container_group) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close container group\n");
+ goto error;
+ }
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close file for data flushing\n");
+ goto error;
+ }
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't re-open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+ if ((group_id = H5Gopen2(container_group, DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_GROUP_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container sub-group '%s'\n",
+ DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ goto error;
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ goto error;
+ }
+
+ if (NULL ==
+ (read_buf = HDmalloc((hsize_t)space_npoints * DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_DTYPE_SIZE))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ goto error;
+ }
+
+ if (H5Dread(dset_id, DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < (size_t)mpi_size; i++) {
+ size_t j;
+
+ if (i != 0) {
+ for (j = 0; j < data_size / DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_DTYPE_SIZE; j++) {
+ if (((int *)
+ read_buf)[j + (i * (data_size / DATASET_WRITE_ONE_PROC_NONE_SEL_TEST_DTYPE_SIZE))] !=
+ (int)i) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ goto error;
+ }
+ }
+ }
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ if (write_buf)
+ HDfree(write_buf);
+ if (dims)
+ HDfree(dims);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to ensure that a dataset can be written to by having
+ * one of the MPI ranks use an ALL selection, while the other
+ * ranks write nothing.
+ */
+#define DATASET_WRITE_ONE_PROC_ALL_SEL_TEST_SPACE_RANK 2
+#define DATASET_WRITE_ONE_PROC_ALL_SEL_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_WRITE_ONE_PROC_ALL_SEL_TEST_DTYPE_SIZE sizeof(int)
+#define DATASET_WRITE_ONE_PROC_ALL_SEL_TEST_GROUP_NAME "one_rank_all_sel_write_test"
+#define DATASET_WRITE_ONE_PROC_ALL_SEL_TEST_DSET_NAME "one_rank_all_sel_dset"
+static int
+test_write_dataset_one_proc_all_selection(void)
+{
+ hssize_t space_npoints;
+ hsize_t *dims = NULL;
+ size_t i, data_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ void *write_buf = NULL;
+ void *read_buf = NULL;
+
+ TESTING("write to dataset with one rank using all selection; others none selection");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_WRITE_ONE_PROC_ALL_SEL_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_WRITE_ONE_PROC_ALL_SEL_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if (generate_random_parallel_dimensions(DATASET_WRITE_ONE_PROC_ALL_SEL_TEST_SPACE_RANK, &dims) < 0)
+ TEST_ERROR;
+
+ if ((fspace_id = H5Screate_simple(DATASET_WRITE_ONE_PROC_ALL_SEL_TEST_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_WRITE_ONE_PROC_ALL_SEL_TEST_DSET_NAME,
+ DATASET_WRITE_ONE_PROC_ALL_SEL_TEST_DSET_DTYPE, fspace_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_WRITE_ONE_PROC_ALL_SEL_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0, data_size = 1; i < DATASET_WRITE_ONE_PROC_ALL_SEL_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_WRITE_ONE_PROC_ALL_SEL_TEST_DTYPE_SIZE;
+
+ BEGIN_INDEPENDENT_OP(write_buf_alloc)
+ {
+ if (MAINPROCESS) {
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ INDEPENDENT_OP_ERROR(write_buf_alloc);
+ }
+
+ for (i = 0; i < data_size / DATASET_WRITE_ONE_PROC_ALL_SEL_TEST_DTYPE_SIZE; i++)
+ ((int *)write_buf)[i] = (int)i;
+ }
+ }
+ END_INDEPENDENT_OP(write_buf_alloc);
+
+ BEGIN_INDEPENDENT_OP(set_space_sel)
+ {
+ if (MAINPROCESS) {
+ if (H5Sselect_all(fspace_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set 'all' selection for dataset write\n");
+ INDEPENDENT_OP_ERROR(set_space_sel);
+ }
+ }
+ else {
+ if (H5Sselect_none(fspace_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set 'none' selection for dataset write\n");
+ INDEPENDENT_OP_ERROR(set_space_sel);
+ }
+ }
+ }
+ END_INDEPENDENT_OP(set_space_sel);
+
+ {
+ hsize_t mdims[] = {data_size / DATASET_WRITE_ONE_PROC_ALL_SEL_TEST_DTYPE_SIZE};
+
+ if (!MAINPROCESS)
+ mdims[0] = 0;
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ goto error;
+ }
+ }
+
+ BEGIN_INDEPENDENT_OP(dset_write)
+ {
+ if (H5Dwrite(dset_id, DATASET_WRITE_ONE_PROC_ALL_SEL_TEST_DSET_DTYPE, mspace_id, fspace_id,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n", DATASET_WRITE_ONE_PROC_ALL_SEL_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_write);
+ }
+ }
+ END_INDEPENDENT_OP(dset_write);
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+ if (mspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(mspace_id);
+ }
+ H5E_END_TRY;
+ mspace_id = H5I_INVALID_HID;
+ }
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ /*
+ * Close and re-open the file to ensure that the data gets written.
+ */
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close test's container group\n");
+ goto error;
+ }
+ if (H5Gclose(container_group) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close container group\n");
+ goto error;
+ }
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close file for data flushing\n");
+ goto error;
+ }
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't re-open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+ if ((group_id = H5Gopen2(container_group, DATASET_WRITE_ONE_PROC_ALL_SEL_TEST_GROUP_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container sub-group '%s'\n",
+ DATASET_WRITE_ONE_PROC_ALL_SEL_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_WRITE_ONE_PROC_ALL_SEL_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_WRITE_ONE_PROC_ALL_SEL_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ goto error;
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ goto error;
+ }
+
+ if (NULL ==
+ (read_buf = HDmalloc((hsize_t)space_npoints * DATASET_WRITE_ONE_PROC_ALL_SEL_TEST_DTYPE_SIZE))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ goto error;
+ }
+
+ if (H5Dread(dset_id, DATASET_WRITE_ONE_PROC_ALL_SEL_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_WRITE_ONE_PROC_ALL_SEL_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < data_size / DATASET_WRITE_ONE_PROC_ALL_SEL_TEST_DTYPE_SIZE; i++) {
+ if (((int *)read_buf)[i] != (int)i) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ goto error;
+ }
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ if (write_buf)
+ HDfree(write_buf);
+ if (dims)
+ HDfree(dims);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to ensure that a dataset can be written to by having
+ * a hyperslab selection in the file dataspace and an all selection
+ * in the memory dataspace.
+ *
+ * XXX: Currently pulls from invalid memory locations.
+ */
+#define DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_SPACE_RANK 2
+#define DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_DTYPE_SIZE sizeof(int)
+#define DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_GROUP_NAME "hyper_sel_file_all_sel_mem_write_test"
+#define DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_DSET_NAME "hyper_sel_file_all_sel_mem_dset"
+static int
+test_write_dataset_hyper_file_all_mem(void)
+{
+#ifdef BROKEN
+ hssize_t space_npoints;
+ hsize_t *dims = NULL;
+ hsize_t start[DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_SPACE_RANK];
+ hsize_t stride[DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_SPACE_RANK];
+ hsize_t count[DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_SPACE_RANK];
+ hsize_t block[DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_SPACE_RANK];
+ size_t i, data_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ void *write_buf = NULL;
+ void *read_buf = NULL;
+#endif
+
+ TESTING("write to dataset with hyperslab sel. for file space; all sel. for memory");
+
+#ifdef BROKEN
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if (generate_random_parallel_dimensions(DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_SPACE_RANK, &dims) < 0)
+ TEST_ERROR;
+
+ if ((fspace_id = H5Screate_simple(DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_DSET_NAME,
+ DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_DSET_DTYPE, fspace_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 1, data_size = 1; i < DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_DTYPE_SIZE;
+
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ goto error;
+ }
+
+ for (i = 0; i < data_size / DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_DTYPE_SIZE; i++)
+ ((int *)write_buf)[i] = mpi_rank;
+
+ for (i = 0; i < DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_SPACE_RANK; i++) {
+ if (i == 0) {
+ start[i] = mpi_rank;
+ block[i] = 1;
+ }
+ else {
+ start[i] = 0;
+ block[i] = dims[i];
+ }
+
+ stride[i] = 1;
+ count[i] = 1;
+ }
+
+ if (H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, stride, count, block) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't select hyperslab for dataset write\n");
+ goto error;
+ }
+
+ if (H5Dwrite(dset_id, DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_DSET_DTYPE, H5S_ALL, fspace_id, H5P_DEFAULT,
+ write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n", DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ /*
+ * Close and re-open the file to ensure that the data gets written.
+ */
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close test's container group\n");
+ goto error;
+ }
+ if (H5Gclose(container_group) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close container group\n");
+ goto error;
+ }
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close file for data flushing\n");
+ goto error;
+ }
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't re-open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+ if ((group_id =
+ H5Gopen2(container_group, DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container sub-group '%s'\n",
+ DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ goto error;
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ goto error;
+ }
+
+ if (NULL ==
+ (read_buf = HDmalloc((hsize_t)space_npoints * DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_DTYPE_SIZE))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ goto error;
+ }
+
+ if (H5Dread(dset_id, DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < (size_t)mpi_size; i++) {
+ size_t j;
+
+ for (j = 0; j < data_size / DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_DTYPE_SIZE; j++) {
+ if (((int *)read_buf)[j + (i * (data_size / DATASET_WRITE_HYPER_FILE_ALL_MEM_TEST_DTYPE_SIZE))] !=
+ (int)i) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ goto error;
+ }
+ }
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+#else
+ SKIPPED();
+#endif
+
+ return 0;
+
+#ifdef BROKEN
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ if (write_buf)
+ HDfree(write_buf);
+ if (dims)
+ HDfree(dims);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+#endif
+}
+
+/*
+ * A test to ensure that a dataset can be written to by having
+ * an all selection in the file dataspace and a hyperslab
+ * selection in the memory dataspace.
+ */
+#define DATASET_WRITE_ALL_FILE_HYPER_MEM_TEST_SPACE_RANK 2
+#define DATASET_WRITE_ALL_FILE_HYPER_MEM_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_WRITE_ALL_FILE_HYPER_MEM_TEST_DTYPE_SIZE sizeof(int)
+#define DATASET_WRITE_ALL_FILE_HYPER_MEM_TEST_GROUP_NAME "all_sel_file_hyper_sel_mem_write_test"
+#define DATASET_WRITE_ALL_FILE_HYPER_MEM_TEST_DSET_NAME "all_sel_file_hyper_sel_mem_dset"
+static int
+test_write_dataset_all_file_hyper_mem(void)
+{
+ hssize_t space_npoints;
+ hsize_t *dims = NULL;
+ size_t i, data_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ void *write_buf = NULL;
+ void *read_buf = NULL;
+
+ TESTING("write to dataset with all sel. for file space; hyperslab sel. for memory");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_WRITE_ALL_FILE_HYPER_MEM_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_WRITE_ALL_FILE_HYPER_MEM_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if (generate_random_parallel_dimensions(DATASET_WRITE_ALL_FILE_HYPER_MEM_TEST_SPACE_RANK, &dims) < 0)
+ TEST_ERROR;
+
+ if ((fspace_id = H5Screate_simple(DATASET_WRITE_ALL_FILE_HYPER_MEM_TEST_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_WRITE_ALL_FILE_HYPER_MEM_TEST_DSET_NAME,
+ DATASET_WRITE_ALL_FILE_HYPER_MEM_TEST_DSET_DTYPE, fspace_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_WRITE_ALL_FILE_HYPER_MEM_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0, data_size = 1; i < DATASET_WRITE_ALL_FILE_HYPER_MEM_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_WRITE_ALL_FILE_HYPER_MEM_TEST_DTYPE_SIZE;
+
+ BEGIN_INDEPENDENT_OP(write_buf_alloc)
+ {
+ if (MAINPROCESS) {
+ /*
+ * Allocate twice the amount of memory needed and leave "holes" in the memory
+ * buffer in order to prove that the mapping from hyperslab selection <-> all
+ * selection works correctly.
+ */
+ if (NULL == (write_buf = HDmalloc(2 * data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ INDEPENDENT_OP_ERROR(write_buf_alloc);
+ }
+
+ for (i = 0; i < 2 * (data_size / DATASET_WRITE_ALL_FILE_HYPER_MEM_TEST_DTYPE_SIZE); i++) {
+ /* Write actual data to even indices */
+ if (i % 2 == 0)
+ ((int *)write_buf)[i] = (int)((i / 2) + (i % 2));
+ else
+ ((int *)write_buf)[i] = 0;
+ }
+ }
+ }
+ END_INDEPENDENT_OP(write_buf_alloc);
+
+ /*
+ * Only have rank 0 perform the dataset write, as writing the entire dataset on all ranks
+ * might be stressful on system resources. There's also no guarantee as to what the outcome
+ * would be, since the writes would be overlapping with each other.
+ */
+ BEGIN_INDEPENDENT_OP(dset_write)
+ {
+ if (MAINPROCESS) {
+ hsize_t start[1] = {0};
+ hsize_t stride[1] = {2};
+ hsize_t count[1] = {data_size / DATASET_WRITE_ALL_FILE_HYPER_MEM_TEST_DTYPE_SIZE};
+ hsize_t block[1] = {1};
+ hsize_t mdims[] = {2 * (data_size / DATASET_WRITE_ALL_FILE_HYPER_MEM_TEST_DTYPE_SIZE)};
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ INDEPENDENT_OP_ERROR(dset_write);
+ }
+
+ if (H5Sselect_hyperslab(mspace_id, H5S_SELECT_SET, start, stride, count, block) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't select hyperslab for dataset write\n");
+ INDEPENDENT_OP_ERROR(dset_write);
+ }
+
+ if (H5Dwrite(dset_id, DATASET_WRITE_ALL_FILE_HYPER_MEM_TEST_DSET_DTYPE, mspace_id, H5S_ALL,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n",
+ DATASET_WRITE_ALL_FILE_HYPER_MEM_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_write);
+ }
+ }
+ }
+ END_INDEPENDENT_OP(dset_write);
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+ if (mspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(mspace_id);
+ }
+ H5E_END_TRY;
+ mspace_id = H5I_INVALID_HID;
+ }
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ /*
+ * Close and re-open the file to ensure that the data gets written.
+ */
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close test's container group\n");
+ goto error;
+ }
+ if (H5Gclose(container_group) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close container group\n");
+ goto error;
+ }
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close file for data flushing\n");
+ goto error;
+ }
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't re-open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+ if ((group_id =
+ H5Gopen2(container_group, DATASET_WRITE_ALL_FILE_HYPER_MEM_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container sub-group '%s'\n",
+ DATASET_WRITE_ALL_FILE_HYPER_MEM_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_WRITE_ALL_FILE_HYPER_MEM_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_WRITE_ALL_FILE_HYPER_MEM_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ goto error;
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ goto error;
+ }
+
+ if (NULL ==
+ (read_buf = HDmalloc((hsize_t)space_npoints * DATASET_WRITE_ALL_FILE_HYPER_MEM_TEST_DTYPE_SIZE))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ goto error;
+ }
+
+ if (H5Dread(dset_id, DATASET_WRITE_ALL_FILE_HYPER_MEM_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_WRITE_ALL_FILE_HYPER_MEM_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < data_size / DATASET_WRITE_ALL_FILE_HYPER_MEM_TEST_DTYPE_SIZE; i++) {
+ if (((int *)read_buf)[i] != (int)i) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ goto error;
+ }
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ if (write_buf)
+ HDfree(write_buf);
+ if (dims)
+ HDfree(dims);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to ensure that a dataset can be written to by having
+ * a point selection in the file dataspace and an all selection
+ * in the memory dataspace.
+ */
+static int
+test_write_dataset_point_file_all_mem(void)
+{
+ TESTING("write to dataset with point sel. for file space; all sel. for memory");
+
+ SKIPPED();
+
+ return 0;
+}
+
+/*
+ * A test to ensure that a dataset can be written to by having
+ * an all selection in the file dataspace and a point selection
+ * in the memory dataspace.
+ */
+#define DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_SPACE_RANK 2
+#define DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_DTYPE_SIZE sizeof(int)
+#define DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_GROUP_NAME "all_sel_file_point_sel_mem_write_test"
+#define DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_DSET_NAME "all_sel_file_point_sel_mem_dset"
+static int
+test_write_dataset_all_file_point_mem(void)
+{
+ hssize_t space_npoints;
+ hsize_t *points = NULL;
+ hsize_t *dims = NULL;
+ size_t i, data_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ void *write_buf = NULL;
+ void *read_buf = NULL;
+
+ TESTING("write to dataset with all sel. for file space; point sel. for memory");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_GROUP_NAME, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if (generate_random_parallel_dimensions(DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_SPACE_RANK, &dims) < 0)
+ TEST_ERROR;
+
+ if ((fspace_id = H5Screate_simple(DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_DSET_NAME,
+ DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_DSET_DTYPE, fspace_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0, data_size = 1; i < DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_DTYPE_SIZE;
+
+ BEGIN_INDEPENDENT_OP(write_buf_alloc)
+ {
+ if (MAINPROCESS) {
+ /*
+ * Allocate twice the amount of memory needed and leave "holes" in the memory
+ * buffer in order to prove that the mapping from point selection <-> all
+ * selection works correctly.
+ */
+ if (NULL == (write_buf = HDmalloc(2 * data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ INDEPENDENT_OP_ERROR(write_buf_alloc);
+ }
+
+ for (i = 0; i < 2 * (data_size / DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_DTYPE_SIZE); i++) {
+ /* Write actual data to even indices */
+ if (i % 2 == 0)
+ ((int *)write_buf)[i] = (int)((i / 2) + (i % 2));
+ else
+ ((int *)write_buf)[i] = 0;
+ }
+ }
+ }
+ END_INDEPENDENT_OP(write_buf_alloc);
+
+ /*
+ * Only have rank 0 perform the dataset write, as writing the entire dataset on all ranks
+ * might be stressful on system resources. There's also no guarantee as to what the outcome
+ * would be, since the writes would be overlapping with each other.
+ */
+ BEGIN_INDEPENDENT_OP(dset_write)
+ {
+ if (MAINPROCESS) {
+ hsize_t mdims[] = {2 * (data_size / DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_DTYPE_SIZE)};
+ int j;
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ INDEPENDENT_OP_ERROR(dset_write);
+ }
+
+ if (NULL == (points = HDmalloc((data_size / DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_DTYPE_SIZE) *
+ sizeof(hsize_t)))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for point selection\n");
+ INDEPENDENT_OP_ERROR(dset_write);
+ }
+
+ /* Select every other point in the 1-dimensional memory dataspace */
+ for (i = 0, j = 0; i < 2 * (data_size / DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_DTYPE_SIZE); i++) {
+ if (i % 2 == 0)
+ points[j++] = (hsize_t)i;
+ }
+
+ if (H5Sselect_elements(mspace_id, H5S_SELECT_SET,
+ data_size / DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_DTYPE_SIZE,
+ points) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set point selection for dataset write\n");
+ INDEPENDENT_OP_ERROR(dset_write);
+ }
+
+ if (H5Dwrite(dset_id, DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_DSET_DTYPE, mspace_id, H5S_ALL,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n",
+ DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_write);
+ }
+ }
+ }
+ END_INDEPENDENT_OP(dset_write);
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+ if (points) {
+ HDfree(points);
+ points = NULL;
+ }
+ if (mspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(mspace_id);
+ }
+ H5E_END_TRY;
+ mspace_id = H5I_INVALID_HID;
+ }
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ /*
+ * Close and re-open the file to ensure that the data gets written.
+ */
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close test's container group\n");
+ goto error;
+ }
+ if (H5Gclose(container_group) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close container group\n");
+ goto error;
+ }
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close file for data flushing\n");
+ goto error;
+ }
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't re-open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+ if ((group_id =
+ H5Gopen2(container_group, DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container sub-group '%s'\n",
+ DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ goto error;
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ goto error;
+ }
+
+ if (NULL ==
+ (read_buf = HDmalloc((hsize_t)space_npoints * DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_DTYPE_SIZE))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ goto error;
+ }
+
+ if (H5Dread(dset_id, DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < data_size / DATASET_WRITE_ALL_FILE_POINT_MEM_TEST_DTYPE_SIZE; i++) {
+ if (((int *)read_buf)[i] != (int)i) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ goto error;
+ }
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ if (write_buf)
+ HDfree(write_buf);
+ if (points)
+ HDfree(points);
+ if (dims)
+ HDfree(dims);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to ensure that a dataset can be written to by having
+ * a hyperslab selection in the file dataspace and a point
+ * selection in the memory dataspace.
+ */
+#define DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_SPACE_RANK 2
+#define DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_DTYPE_SIZE sizeof(int)
+#define DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_GROUP_NAME "hyper_sel_file_point_sel_mem_write_test"
+#define DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_DSET_NAME "hyper_sel_file_point_sel_mem_dset"
+static int
+test_write_dataset_hyper_file_point_mem(void)
+{
+ hssize_t space_npoints;
+ hsize_t *dims = NULL;
+ hsize_t *points = NULL;
+ hsize_t start[DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_SPACE_RANK];
+ hsize_t stride[DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_SPACE_RANK];
+ hsize_t count[DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_SPACE_RANK];
+ hsize_t block[DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_SPACE_RANK];
+ size_t i, data_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ void *write_buf = NULL;
+ void *read_buf = NULL;
+
+ TESTING("write to dataset with hyperslab sel. for file space; point sel. for memory");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if (generate_random_parallel_dimensions(DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_SPACE_RANK, &dims) < 0)
+ TEST_ERROR;
+
+ if ((fspace_id = H5Screate_simple(DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_DSET_NAME,
+ DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_DSET_DTYPE, fspace_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 1, data_size = 1; i < DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_DTYPE_SIZE;
+
+ /*
+ * Allocate twice the amount of memory needed and leave "holes" in the memory
+ * buffer in order to prove that the mapping from point selection <-> hyperslab
+ * selection works correctly.
+ */
+ if (NULL == (write_buf = HDmalloc(2 * data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ goto error;
+ }
+
+ for (i = 0; i < 2 * (data_size / DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_DTYPE_SIZE); i++) {
+ /* Write actual data to even indices */
+ if (i % 2 == 0)
+ ((int *)write_buf)[i] = mpi_rank;
+ else
+ ((int *)write_buf)[i] = 0;
+ }
+
+ for (i = 0; i < DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_SPACE_RANK; i++) {
+ if (i == 0) {
+ start[i] = (hsize_t)mpi_rank;
+ block[i] = 1;
+ }
+ else {
+ start[i] = 0;
+ block[i] = dims[i];
+ }
+
+ stride[i] = 1;
+ count[i] = 1;
+ }
+
+ if (H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, stride, count, block) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't select hyperslab for dataset write\n");
+ goto error;
+ }
+
+ {
+ hsize_t mdims[] = {2 * (data_size / DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_DTYPE_SIZE)};
+ int j;
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ goto error;
+ }
+
+ if (NULL == (points = HDmalloc((data_size / DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_DTYPE_SIZE) *
+ sizeof(hsize_t)))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for point selection\n");
+ goto error;
+ }
+
+ /* Select every other point in the 1-dimensional memory dataspace */
+ for (i = 0, j = 0; i < 2 * (data_size / DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_DTYPE_SIZE); i++) {
+ if (i % 2 == 0)
+ points[j++] = (hsize_t)i;
+ }
+
+ if (H5Sselect_elements(mspace_id, H5S_SELECT_SET,
+ data_size / DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_DTYPE_SIZE, points) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set point selection for dataset write\n");
+ goto error;
+ }
+ }
+
+ if (H5Dwrite(dset_id, DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_DSET_DTYPE, mspace_id, fspace_id,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n", DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+ if (points) {
+ HDfree(points);
+ points = NULL;
+ }
+ if (mspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(mspace_id);
+ }
+ H5E_END_TRY;
+ mspace_id = H5I_INVALID_HID;
+ }
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ /*
+ * Close and re-open the file to ensure that the data gets written.
+ */
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close test's container group\n");
+ goto error;
+ }
+ if (H5Gclose(container_group) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close container group\n");
+ goto error;
+ }
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close file for data flushing\n");
+ goto error;
+ }
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't re-open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+ if ((group_id = H5Gopen2(container_group, DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_GROUP_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container sub-group '%s'\n",
+ DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ goto error;
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ goto error;
+ }
+
+ if (NULL ==
+ (read_buf = HDmalloc((hsize_t)space_npoints * DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_DTYPE_SIZE))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ goto error;
+ }
+
+ if (H5Dread(dset_id, DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < (size_t)mpi_size; i++) {
+ size_t j;
+
+ for (j = 0; j < data_size / DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_DTYPE_SIZE; j++) {
+ if (((int *)
+ read_buf)[j + (i * (data_size / DATASET_WRITE_HYPER_FILE_POINT_MEM_TEST_DTYPE_SIZE))] !=
+ (int)i) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ goto error;
+ }
+ }
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ if (write_buf)
+ HDfree(write_buf);
+ if (points)
+ HDfree(points);
+ if (dims)
+ HDfree(dims);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to ensure that a dataset can be written to by having
+ * a point selection in the file dataspace and a hyperslab
+ * selection in the memory dataspace.
+ */
+#define DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_SPACE_RANK 2
+#define DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_DTYPE_SIZE sizeof(int)
+#define DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_GROUP_NAME "point_sel_file_hyper_sel_mem_write_test"
+#define DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_DSET_NAME "point_sel_file_hyper_sel_mem_dset"
+static int
+test_write_dataset_point_file_hyper_mem(void)
+{
+ hssize_t space_npoints;
+ hsize_t *dims = NULL;
+ hsize_t *points = NULL;
+ size_t i, data_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ void *write_buf = NULL;
+ void *read_buf = NULL;
+
+ TESTING("write to dataset with point sel. for file space; hyperslab sel. for memory");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if (generate_random_parallel_dimensions(DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_SPACE_RANK, &dims) < 0)
+ TEST_ERROR;
+
+ if ((fspace_id = H5Screate_simple(DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_SPACE_RANK, dims, NULL)) < 0)
+ TEST_ERROR;
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_DSET_NAME,
+ DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_DSET_DTYPE, fspace_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 1, data_size = 1; i < DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_DTYPE_SIZE;
+
+ /*
+ * Allocate twice the amount of memory needed and leave "holes" in the memory
+ * buffer in order to prove that the mapping from hyperslab selection <-> point
+ * selection works correctly.
+ */
+ if (NULL == (write_buf = HDmalloc(2 * data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ goto error;
+ }
+
+ for (i = 0; i < 2 * (data_size / DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_DTYPE_SIZE); i++) {
+ /* Write actual data to even indices */
+ if (i % 2 == 0)
+ ((int *)write_buf)[i] = mpi_rank;
+ else
+ ((int *)write_buf)[i] = 0;
+ }
+
+ if (NULL == (points = HDmalloc((data_size / DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_DTYPE_SIZE) *
+ DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_SPACE_RANK * sizeof(hsize_t)))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for point selection\n");
+ goto error;
+ }
+
+ for (i = 0; i < data_size / DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_DTYPE_SIZE; i++) {
+ size_t j;
+
+ for (j = 0; j < DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_SPACE_RANK; j++) {
+ size_t idx = (i * (size_t)DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_SPACE_RANK) + j;
+
+ if (j == 0)
+ points[idx] = (hsize_t)mpi_rank;
+ else if (j != (size_t)DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_SPACE_RANK - 1)
+ points[idx] = i / dims[j + 1];
+ else
+ points[idx] = i % dims[j];
+ }
+ }
+
+ if (H5Sselect_elements(fspace_id, H5S_SELECT_SET,
+ data_size / DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_DTYPE_SIZE, points) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set point selection for dataset write\n");
+ goto error;
+ }
+
+ {
+ hsize_t start[1] = {0};
+ hsize_t stride[1] = {2};
+ hsize_t count[1] = {data_size / DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_DTYPE_SIZE};
+ hsize_t block[1] = {1};
+ hsize_t mdims[] = {2 * (data_size / DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_DTYPE_SIZE)};
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ goto error;
+ }
+
+ if (H5Sselect_hyperslab(mspace_id, H5S_SELECT_SET, start, stride, count, block) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set hyperslab selection for dataset write\n");
+ goto error;
+ }
+ }
+
+ if (H5Dwrite(dset_id, DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_DSET_DTYPE, mspace_id, fspace_id,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n", DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+ if (points) {
+ HDfree(points);
+ points = NULL;
+ }
+ if (mspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(mspace_id);
+ }
+ H5E_END_TRY;
+ mspace_id = H5I_INVALID_HID;
+ }
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ /*
+ * Close and re-open the file to ensure that the data gets written.
+ */
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close test's container group\n");
+ goto error;
+ }
+ if (H5Gclose(container_group) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close container group\n");
+ goto error;
+ }
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close file for data flushing\n");
+ goto error;
+ }
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't re-open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+ if ((group_id = H5Gopen2(container_group, DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_GROUP_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container sub-group '%s'\n",
+ DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ goto error;
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ goto error;
+ }
+
+ if (NULL ==
+ (read_buf = HDmalloc((hsize_t)space_npoints * DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_DTYPE_SIZE))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ goto error;
+ }
+
+ if (H5Dread(dset_id, DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < (size_t)mpi_size; i++) {
+ size_t j;
+
+ for (j = 0; j < data_size / DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_DTYPE_SIZE; j++) {
+ if (((int *)
+ read_buf)[j + (i * (data_size / DATASET_WRITE_POINT_FILE_HYPER_MEM_TEST_DTYPE_SIZE))] !=
+ (int)i) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ goto error;
+ }
+ }
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ if (write_buf)
+ HDfree(write_buf);
+ if (points)
+ HDfree(points);
+ if (dims)
+ HDfree(dims);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to ensure that a dataset can be read from by having
+ * one of the MPI ranks select 0 rows in a hyperslab selection.
+ */
+#define DATASET_READ_ONE_PROC_0_SEL_TEST_SPACE_RANK 2
+#define DATASET_READ_ONE_PROC_0_SEL_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_READ_ONE_PROC_0_SEL_TEST_DTYPE_SIZE sizeof(int)
+#define DATASET_READ_ONE_PROC_0_SEL_TEST_GROUP_NAME "one_rank_0_sel_read_test"
+#define DATASET_READ_ONE_PROC_0_SEL_TEST_DSET_NAME "one_rank_0_sel_dset"
+static int
+test_read_dataset_one_proc_0_selection(void)
+{
+ hssize_t space_npoints;
+ hsize_t *dims = NULL;
+ hsize_t start[DATASET_READ_ONE_PROC_0_SEL_TEST_SPACE_RANK];
+ hsize_t stride[DATASET_READ_ONE_PROC_0_SEL_TEST_SPACE_RANK];
+ hsize_t count[DATASET_READ_ONE_PROC_0_SEL_TEST_SPACE_RANK];
+ hsize_t block[DATASET_READ_ONE_PROC_0_SEL_TEST_SPACE_RANK];
+ size_t i, data_size, read_buf_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ void *write_buf = NULL;
+ void *read_buf = NULL;
+
+ TESTING("read from dataset with one rank selecting 0 rows");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if (generate_random_parallel_dimensions(DATASET_READ_ONE_PROC_0_SEL_TEST_SPACE_RANK, &dims) < 0)
+ TEST_ERROR;
+
+ /*
+ * Have rank 0 create the dataset and completely fill it with data.
+ */
+ BEGIN_INDEPENDENT_OP(dset_create)
+ {
+ if (MAINPROCESS) {
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_parallel_filename);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_READ_ONE_PROC_0_SEL_TEST_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_READ_ONE_PROC_0_SEL_TEST_GROUP_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_READ_ONE_PROC_0_SEL_TEST_SPACE_RANK, dims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create file dataspace for dataset\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_READ_ONE_PROC_0_SEL_TEST_DSET_NAME,
+ DATASET_READ_ONE_PROC_0_SEL_TEST_DSET_DTYPE, fspace_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_READ_ONE_PROC_0_SEL_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ for (i = 0, data_size = 1; i < DATASET_READ_ONE_PROC_0_SEL_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_READ_ONE_PROC_0_SEL_TEST_DTYPE_SIZE;
+
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ for (i = 0; i < (size_t)mpi_size; i++) {
+ size_t j;
+ size_t elem_per_proc = (data_size / DATASET_READ_ONE_PROC_0_SEL_TEST_DTYPE_SIZE) / dims[0];
+
+ for (j = 0; j < elem_per_proc; j++) {
+ size_t idx = (i * elem_per_proc) + j;
+
+ ((int *)write_buf)[idx] = (int)i;
+ }
+ }
+
+ {
+ hsize_t mdims[] = {data_size / DATASET_READ_ONE_PROC_0_SEL_TEST_DTYPE_SIZE};
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ }
+
+ if (H5Dwrite(dset_id, DATASET_READ_ONE_PROC_0_SEL_TEST_DSET_DTYPE, mspace_id, H5S_ALL,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n", DATASET_READ_ONE_PROC_0_SEL_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+ if (mspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(mspace_id);
+ }
+ H5E_END_TRY;
+ mspace_id = H5I_INVALID_HID;
+ }
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ /*
+ * Close and re-open the file to ensure that the data gets written.
+ */
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close test's container group\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ if (H5Gclose(container_group) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close container group\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close file for data flushing\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ }
+ }
+ END_INDEPENDENT_OP(dset_create);
+
+ /*
+ * Re-open file on all ranks.
+ */
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, TRUE)) < 0)
+ TEST_ERROR;
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't re-open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+ if ((group_id = H5Gopen2(container_group, DATASET_READ_ONE_PROC_0_SEL_TEST_GROUP_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container sub-group '%s'\n", DATASET_READ_ONE_PROC_0_SEL_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_READ_ONE_PROC_0_SEL_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_READ_ONE_PROC_0_SEL_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ goto error;
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ goto error;
+ }
+
+ BEGIN_INDEPENDENT_OP(read_buf_alloc)
+ {
+ if (!MAINPROCESS) {
+ read_buf_size =
+ ((size_t)(space_npoints / mpi_size) * DATASET_READ_ONE_PROC_0_SEL_TEST_DTYPE_SIZE);
+
+ if (NULL == (read_buf = HDmalloc(read_buf_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ INDEPENDENT_OP_ERROR(read_buf_alloc);
+ }
+ }
+ }
+ END_INDEPENDENT_OP(read_buf_alloc);
+
+ {
+ hsize_t mdims[] = {(hsize_t)space_npoints / (hsize_t)mpi_size};
+
+ if (MAINPROCESS)
+ mdims[0] = 0;
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ goto error;
+ }
+ }
+
+ for (i = 0; i < DATASET_READ_ONE_PROC_0_SEL_TEST_SPACE_RANK; i++) {
+ if (i == 0) {
+ start[i] = (hsize_t)mpi_rank;
+ block[i] = MAINPROCESS ? 0 : 1;
+ }
+ else {
+ start[i] = 0;
+ block[i] = MAINPROCESS ? 0 : dims[i];
+ }
+
+ stride[i] = 1;
+ count[i] = MAINPROCESS ? 0 : 1;
+ }
+
+ if (H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, stride, count, block) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't select hyperslab for dataset read\n");
+ goto error;
+ }
+
+ BEGIN_INDEPENDENT_OP(dset_read)
+ {
+ if (H5Dread(dset_id, DATASET_READ_ONE_PROC_0_SEL_TEST_DSET_DTYPE, mspace_id, fspace_id, H5P_DEFAULT,
+ read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_READ_ONE_PROC_0_SEL_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_read);
+ }
+ }
+ END_INDEPENDENT_OP(dset_read);
+
+ BEGIN_INDEPENDENT_OP(data_verify)
+ {
+ if (!MAINPROCESS) {
+ for (i = 0; i < (size_t)space_npoints / (size_t)mpi_size; i++) {
+ if (((int *)read_buf)[i] != mpi_rank) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ INDEPENDENT_OP_ERROR(data_verify);
+ }
+ }
+ }
+ }
+ END_INDEPENDENT_OP(data_verify);
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (H5Sclose(mspace_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ if (write_buf)
+ HDfree(write_buf);
+ if (dims)
+ HDfree(dims);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to ensure that a dataset can be read from by having
+ * one of the MPI ranks call H5Sselect_none.
+ */
+#define DATASET_READ_ONE_PROC_NONE_SEL_TEST_SPACE_RANK 2
+#define DATASET_READ_ONE_PROC_NONE_SEL_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_READ_ONE_PROC_NONE_SEL_TEST_DTYPE_SIZE sizeof(int)
+#define DATASET_READ_ONE_PROC_NONE_SEL_TEST_GROUP_NAME "one_rank_none_sel_read_test"
+#define DATASET_READ_ONE_PROC_NONE_SEL_TEST_DSET_NAME "one_rank_none_sel_dset"
+static int
+test_read_dataset_one_proc_none_selection(void)
+{
+ hssize_t space_npoints;
+ hsize_t *dims = NULL;
+ hsize_t start[DATASET_READ_ONE_PROC_NONE_SEL_TEST_SPACE_RANK];
+ hsize_t stride[DATASET_READ_ONE_PROC_NONE_SEL_TEST_SPACE_RANK];
+ hsize_t count[DATASET_READ_ONE_PROC_NONE_SEL_TEST_SPACE_RANK];
+ hsize_t block[DATASET_READ_ONE_PROC_NONE_SEL_TEST_SPACE_RANK];
+ size_t i, data_size, read_buf_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ void *write_buf = NULL;
+ void *read_buf = NULL;
+
+ TESTING("read from dataset with one rank using 'none' selection");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if (generate_random_parallel_dimensions(DATASET_READ_ONE_PROC_NONE_SEL_TEST_SPACE_RANK, &dims) < 0)
+ TEST_ERROR;
+
+ /*
+ * Have rank 0 create the dataset and completely fill it with data.
+ */
+ BEGIN_INDEPENDENT_OP(dset_create)
+ {
+ if (MAINPROCESS) {
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_parallel_filename);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_READ_ONE_PROC_NONE_SEL_TEST_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_READ_ONE_PROC_NONE_SEL_TEST_GROUP_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_READ_ONE_PROC_NONE_SEL_TEST_SPACE_RANK, dims, NULL)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" failed to create file dataspace for dataset\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_READ_ONE_PROC_NONE_SEL_TEST_DSET_NAME,
+ DATASET_READ_ONE_PROC_NONE_SEL_TEST_DSET_DTYPE, fspace_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_READ_ONE_PROC_NONE_SEL_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ for (i = 0, data_size = 1; i < DATASET_READ_ONE_PROC_NONE_SEL_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_READ_ONE_PROC_NONE_SEL_TEST_DTYPE_SIZE;
+
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ for (i = 0; i < (size_t)mpi_size; i++) {
+ size_t j;
+ size_t elem_per_proc = (data_size / DATASET_READ_ONE_PROC_NONE_SEL_TEST_DTYPE_SIZE) / dims[0];
+
+ for (j = 0; j < elem_per_proc; j++) {
+ size_t idx = (i * elem_per_proc) + j;
+
+ ((int *)write_buf)[idx] = (int)i;
+ }
+ }
+
+ {
+ hsize_t mdims[] = {data_size / DATASET_READ_ONE_PROC_NONE_SEL_TEST_DTYPE_SIZE};
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ }
+
+ if (H5Dwrite(dset_id, DATASET_READ_ONE_PROC_NONE_SEL_TEST_DSET_DTYPE, mspace_id, H5S_ALL,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n",
+ DATASET_READ_ONE_PROC_NONE_SEL_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+ if (mspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(mspace_id);
+ }
+ H5E_END_TRY;
+ mspace_id = H5I_INVALID_HID;
+ }
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ /*
+ * Close and re-open the file to ensure that the data gets written.
+ */
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close test's container group\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ if (H5Gclose(container_group) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close container group\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close file for data flushing\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ }
+ }
+ END_INDEPENDENT_OP(dset_create);
+
+ /*
+ * Re-open file on all ranks.
+ */
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, TRUE)) < 0)
+ TEST_ERROR;
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't re-open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+ if ((group_id = H5Gopen2(container_group, DATASET_READ_ONE_PROC_NONE_SEL_TEST_GROUP_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container sub-group '%s'\n",
+ DATASET_READ_ONE_PROC_NONE_SEL_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_READ_ONE_PROC_NONE_SEL_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_READ_ONE_PROC_NONE_SEL_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ goto error;
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ goto error;
+ }
+
+ BEGIN_INDEPENDENT_OP(read_buf_alloc)
+ {
+ if (!MAINPROCESS) {
+ read_buf_size =
+ ((size_t)(space_npoints / mpi_size) * DATASET_READ_ONE_PROC_NONE_SEL_TEST_DTYPE_SIZE);
+
+ if (NULL == (read_buf = HDmalloc(read_buf_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ INDEPENDENT_OP_ERROR(read_buf_alloc);
+ }
+ }
+ }
+ END_INDEPENDENT_OP(read_buf_alloc);
+
+ {
+ hsize_t mdims[] = {(hsize_t)space_npoints / (hsize_t)mpi_size};
+
+ if (MAINPROCESS)
+ mdims[0] = 0;
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ goto error;
+ }
+ }
+
+ for (i = 0; i < DATASET_READ_ONE_PROC_NONE_SEL_TEST_SPACE_RANK; i++) {
+ if (i == 0) {
+ start[i] = (hsize_t)mpi_rank;
+ block[i] = 1;
+ }
+ else {
+ start[i] = 0;
+ block[i] = dims[i];
+ }
+
+ stride[i] = 1;
+ count[i] = 1;
+ }
+
+ BEGIN_INDEPENDENT_OP(set_space_sel)
+ {
+ if (MAINPROCESS) {
+ if (H5Sselect_none(fspace_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set 'none' selection for dataset read\n");
+ INDEPENDENT_OP_ERROR(set_space_sel);
+ }
+ }
+ else {
+ if (H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, stride, count, block) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't select hyperslab for dataset read\n");
+ INDEPENDENT_OP_ERROR(set_space_sel);
+ }
+ }
+ }
+ END_INDEPENDENT_OP(set_space_sel);
+
+ BEGIN_INDEPENDENT_OP(dset_read)
+ {
+ if (H5Dread(dset_id, DATASET_READ_ONE_PROC_NONE_SEL_TEST_DSET_DTYPE, mspace_id, fspace_id,
+ H5P_DEFAULT, read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_READ_ONE_PROC_NONE_SEL_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_read);
+ }
+ }
+ END_INDEPENDENT_OP(dset_read);
+
+ BEGIN_INDEPENDENT_OP(data_verify)
+ {
+ if (!MAINPROCESS) {
+ for (i = 0; i < (size_t)space_npoints / (size_t)mpi_size; i++) {
+ if (((int *)read_buf)[i] != mpi_rank) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ INDEPENDENT_OP_ERROR(data_verify);
+ }
+ }
+ }
+ }
+ END_INDEPENDENT_OP(data_verify);
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (H5Sclose(mspace_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ if (write_buf)
+ HDfree(write_buf);
+ if (dims)
+ HDfree(dims);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to ensure that a dataset can be read from by having
+ * one of the MPI ranks use an ALL selection, while the other
+ * ranks read nothing.
+ */
+#define DATASET_READ_ONE_PROC_ALL_SEL_TEST_SPACE_RANK 2
+#define DATASET_READ_ONE_PROC_ALL_SEL_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_READ_ONE_PROC_ALL_SEL_TEST_DTYPE_SIZE sizeof(int)
+#define DATASET_READ_ONE_PROC_ALL_SEL_TEST_GROUP_NAME "one_rank_all_sel_read_test"
+#define DATASET_READ_ONE_PROC_ALL_SEL_TEST_DSET_NAME "one_rank_all_sel_dset"
+static int
+test_read_dataset_one_proc_all_selection(void)
+{
+ hssize_t space_npoints;
+ hsize_t *dims = NULL;
+ size_t i, data_size, read_buf_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ void *write_buf = NULL;
+ void *read_buf = NULL;
+
+ TESTING("read from dataset with one rank using all selection; others none selection");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if (generate_random_parallel_dimensions(DATASET_READ_ONE_PROC_ALL_SEL_TEST_SPACE_RANK, &dims) < 0)
+ TEST_ERROR;
+
+ /*
+ * Have rank 0 create the dataset and completely fill it with data.
+ */
+ BEGIN_INDEPENDENT_OP(dset_create)
+ {
+ if (MAINPROCESS) {
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_parallel_filename);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_READ_ONE_PROC_ALL_SEL_TEST_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_READ_ONE_PROC_ALL_SEL_TEST_GROUP_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_READ_ONE_PROC_ALL_SEL_TEST_SPACE_RANK, dims, NULL)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" failed to create file dataspace for dataset\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_READ_ONE_PROC_ALL_SEL_TEST_DSET_NAME,
+ DATASET_READ_ONE_PROC_ALL_SEL_TEST_DSET_DTYPE, fspace_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n", DATASET_READ_ONE_PROC_ALL_SEL_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ for (i = 0, data_size = 1; i < DATASET_READ_ONE_PROC_ALL_SEL_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_READ_ONE_PROC_ALL_SEL_TEST_DTYPE_SIZE;
+
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ for (i = 0; i < (size_t)mpi_size; i++) {
+ size_t j;
+ size_t elem_per_proc = (data_size / DATASET_READ_ONE_PROC_ALL_SEL_TEST_DTYPE_SIZE) / dims[0];
+
+ for (j = 0; j < elem_per_proc; j++) {
+ size_t idx = (i * elem_per_proc) + j;
+
+ ((int *)write_buf)[idx] = (int)i;
+ }
+ }
+
+ {
+ hsize_t mdims[] = {data_size / DATASET_READ_ONE_PROC_ALL_SEL_TEST_DTYPE_SIZE};
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ }
+
+ if (H5Dwrite(dset_id, DATASET_READ_ONE_PROC_ALL_SEL_TEST_DSET_DTYPE, mspace_id, H5S_ALL,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n",
+ DATASET_READ_ONE_PROC_ALL_SEL_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+ if (mspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(mspace_id);
+ }
+ H5E_END_TRY;
+ mspace_id = H5I_INVALID_HID;
+ }
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ /*
+ * Close and re-open the file to ensure that the data gets written.
+ */
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close test's container group\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ if (H5Gclose(container_group) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close container group\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close file for data flushing\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ }
+ }
+ END_INDEPENDENT_OP(dset_create);
+
+ /*
+ * Re-open file on all ranks.
+ */
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, TRUE)) < 0)
+ TEST_ERROR;
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't re-open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+ if ((group_id = H5Gopen2(container_group, DATASET_READ_ONE_PROC_ALL_SEL_TEST_GROUP_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container sub-group '%s'\n",
+ DATASET_READ_ONE_PROC_ALL_SEL_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_READ_ONE_PROC_ALL_SEL_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_READ_ONE_PROC_ALL_SEL_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ goto error;
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ goto error;
+ }
+
+ BEGIN_INDEPENDENT_OP(read_buf_alloc)
+ {
+ if (MAINPROCESS) {
+ read_buf_size = (size_t)space_npoints * DATASET_READ_ONE_PROC_ALL_SEL_TEST_DTYPE_SIZE;
+
+ if (NULL == (read_buf = HDmalloc(read_buf_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ INDEPENDENT_OP_ERROR(read_buf_alloc);
+ }
+ }
+ }
+ END_INDEPENDENT_OP(read_buf_alloc);
+
+ {
+ hsize_t mdims[] = {(hsize_t)space_npoints};
+
+ if (!MAINPROCESS)
+ mdims[0] = 0;
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ goto error;
+ }
+ }
+
+ BEGIN_INDEPENDENT_OP(set_space_sel)
+ {
+ if (MAINPROCESS) {
+ if (H5Sselect_all(fspace_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set 'all' selection for dataset read\n");
+ INDEPENDENT_OP_ERROR(set_space_sel);
+ }
+ }
+ else {
+ if (H5Sselect_none(fspace_id) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set 'none' selection for dataset read\n");
+ INDEPENDENT_OP_ERROR(set_space_sel);
+ }
+ }
+ }
+ END_INDEPENDENT_OP(set_space_sel);
+
+ BEGIN_INDEPENDENT_OP(dset_read)
+ {
+ if (H5Dread(dset_id, DATASET_READ_ONE_PROC_ALL_SEL_TEST_DSET_DTYPE, mspace_id, fspace_id, H5P_DEFAULT,
+ read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_READ_ONE_PROC_ALL_SEL_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_read);
+ }
+ }
+ END_INDEPENDENT_OP(dset_read);
+
+ BEGIN_INDEPENDENT_OP(data_verify)
+ {
+ if (MAINPROCESS) {
+ for (i = 0; i < (size_t)mpi_size; i++) {
+ size_t j;
+ size_t elem_per_proc = (size_t)(space_npoints / mpi_size);
+
+ for (j = 0; j < elem_per_proc; j++) {
+ int idx = (int)((i * elem_per_proc) + j);
+
+ if (((int *)read_buf)[idx] != (int)i) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ INDEPENDENT_OP_ERROR(data_verify);
+ }
+ }
+ }
+ }
+ }
+ END_INDEPENDENT_OP(data_verify);
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (H5Sclose(mspace_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ if (write_buf)
+ HDfree(write_buf);
+ if (dims)
+ HDfree(dims);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to ensure that a dataset can be read from by having
+ * a hyperslab selection in the file dataspace and an all
+ * selection in the memory dataspace.
+ */
+static int
+test_read_dataset_hyper_file_all_mem(void)
+{
+ TESTING("read from dataset with hyperslab sel. for file space; all sel. for memory");
+
+ SKIPPED();
+
+ return 0;
+}
+
+/*
+ * A test to ensure that a dataset can be read from by having
+ * an all selection in the file dataspace and a hyperslab
+ * selection in the memory dataspace.
+ */
+#define DATASET_READ_ALL_FILE_HYPER_MEM_TEST_SPACE_RANK 2
+#define DATASET_READ_ALL_FILE_HYPER_MEM_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_READ_ALL_FILE_HYPER_MEM_TEST_DTYPE_SIZE sizeof(int)
+#define DATASET_READ_ALL_FILE_HYPER_MEM_TEST_GROUP_NAME "all_sel_file_hyper_sel_mem_read_test"
+#define DATASET_READ_ALL_FILE_HYPER_MEM_TEST_DSET_NAME "all_sel_file_hyper_sel_mem_dset"
+static int
+test_read_dataset_all_file_hyper_mem(void)
+{
+ hssize_t space_npoints;
+ hsize_t *dims = NULL;
+ size_t i, data_size, read_buf_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ void *write_buf = NULL;
+ void *read_buf = NULL;
+
+ TESTING("read from dataset with all sel. for file space; hyperslab sel. for memory");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if (generate_random_parallel_dimensions(DATASET_READ_ALL_FILE_HYPER_MEM_TEST_SPACE_RANK, &dims) < 0)
+ TEST_ERROR;
+
+ /*
+ * Have rank 0 create the dataset and completely fill it with data.
+ */
+ BEGIN_INDEPENDENT_OP(dset_create)
+ {
+ if (MAINPROCESS) {
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_parallel_filename);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_READ_ALL_FILE_HYPER_MEM_TEST_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_READ_ALL_FILE_HYPER_MEM_TEST_GROUP_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_READ_ALL_FILE_HYPER_MEM_TEST_SPACE_RANK, dims, NULL)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" failed to create file dataspace for dataset\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_READ_ALL_FILE_HYPER_MEM_TEST_DSET_NAME,
+ DATASET_READ_ALL_FILE_HYPER_MEM_TEST_DSET_DTYPE, fspace_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n",
+ DATASET_READ_ALL_FILE_HYPER_MEM_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ for (i = 0, data_size = 1; i < DATASET_READ_ALL_FILE_HYPER_MEM_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_READ_ALL_FILE_HYPER_MEM_TEST_DTYPE_SIZE;
+
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ for (i = 0; i < (size_t)mpi_size; i++) {
+ size_t j;
+ size_t elem_per_proc =
+ (data_size / DATASET_READ_ALL_FILE_HYPER_MEM_TEST_DTYPE_SIZE) / dims[0];
+
+ for (j = 0; j < elem_per_proc; j++) {
+ size_t idx = (i * elem_per_proc) + j;
+
+ ((int *)write_buf)[idx] = (int)i;
+ }
+ }
+
+ {
+ hsize_t mdims[] = {data_size / DATASET_READ_ALL_FILE_HYPER_MEM_TEST_DTYPE_SIZE};
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ }
+
+ if (H5Dwrite(dset_id, DATASET_READ_ALL_FILE_HYPER_MEM_TEST_DSET_DTYPE, mspace_id, H5S_ALL,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n",
+ DATASET_READ_ALL_FILE_HYPER_MEM_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+ if (mspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(mspace_id);
+ }
+ H5E_END_TRY;
+ mspace_id = H5I_INVALID_HID;
+ }
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ /*
+ * Close and re-open the file to ensure that the data gets written.
+ */
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close test's container group\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ if (H5Gclose(container_group) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close container group\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close file for data flushing\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ }
+ }
+ END_INDEPENDENT_OP(dset_create);
+
+ /*
+ * Re-open file on all ranks.
+ */
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, TRUE)) < 0)
+ TEST_ERROR;
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't re-open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+ if ((group_id = H5Gopen2(container_group, DATASET_READ_ALL_FILE_HYPER_MEM_TEST_GROUP_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container sub-group '%s'\n",
+ DATASET_READ_ALL_FILE_HYPER_MEM_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_READ_ALL_FILE_HYPER_MEM_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_READ_ALL_FILE_HYPER_MEM_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ goto error;
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ goto error;
+ }
+
+ /*
+ * Only have rank 0 perform the dataset read, as reading the entire dataset on all ranks
+ * might be stressful on system resources.
+ */
+ BEGIN_INDEPENDENT_OP(dset_read)
+ {
+ if (MAINPROCESS) {
+ hsize_t start[1] = {0};
+ hsize_t stride[1] = {2};
+ hsize_t count[1] = {(hsize_t)space_npoints};
+ hsize_t block[1] = {1};
+ hsize_t mdims[] = {(hsize_t)(2 * space_npoints)};
+
+ /*
+ * Allocate twice the amount of memory needed and leave "holes" in the memory
+ * buffer in order to prove that the mapping from all selection <-> hyperslab
+ * selection works correctly.
+ */
+ read_buf_size = (size_t)(2 * space_npoints) * DATASET_READ_ALL_FILE_HYPER_MEM_TEST_DTYPE_SIZE;
+ if (NULL == (read_buf = HDcalloc(1, read_buf_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ INDEPENDENT_OP_ERROR(dset_read);
+ }
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ INDEPENDENT_OP_ERROR(dset_read);
+ }
+
+ if (H5Sselect_hyperslab(mspace_id, H5S_SELECT_SET, start, stride, count, block) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't select hyperslab for dataset read\n");
+ INDEPENDENT_OP_ERROR(dset_read);
+ }
+
+ if (H5Dread(dset_id, DATASET_READ_ALL_FILE_HYPER_MEM_TEST_DSET_DTYPE, mspace_id, H5S_ALL,
+ H5P_DEFAULT, read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n",
+ DATASET_READ_ALL_FILE_HYPER_MEM_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_read);
+ }
+
+ for (i = 0; i < (size_t)mpi_size; i++) {
+ size_t j;
+ size_t elem_per_proc = (size_t)(space_npoints / mpi_size);
+
+ for (j = 0; j < 2 * elem_per_proc; j++) {
+ size_t idx = (i * 2 * elem_per_proc) + j;
+
+ if (j % 2 == 0) {
+ if (((int *)read_buf)[idx] != (int)i) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ INDEPENDENT_OP_ERROR(dset_read);
+ }
+ }
+ else {
+ if (((int *)read_buf)[idx] != 0) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ INDEPENDENT_OP_ERROR(dset_read);
+ }
+ }
+ }
+ }
+ }
+ }
+ END_INDEPENDENT_OP(dset_read);
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ if (write_buf)
+ HDfree(write_buf);
+ if (dims)
+ HDfree(dims);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to ensure that a dataset can be read from by having
+ * a point selection in the file dataspace and an all selection
+ * in the memory dataspace.
+ */
+static int
+test_read_dataset_point_file_all_mem(void)
+{
+ TESTING("read from dataset with point sel. for file space; all sel. for memory");
+
+ SKIPPED();
+
+ return 0;
+}
+
+/*
+ * A test to ensure that a dataset can be read from by having
+ * an all selection in the file dataspace and a point selection
+ * in the memory dataspace.
+ */
+#define DATASET_READ_ALL_FILE_POINT_MEM_TEST_SPACE_RANK 2
+#define DATASET_READ_ALL_FILE_POINT_MEM_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_READ_ALL_FILE_POINT_MEM_TEST_DTYPE_SIZE sizeof(int)
+#define DATASET_READ_ALL_FILE_POINT_MEM_TEST_GROUP_NAME "all_sel_file_point_sel_mem_read_test"
+#define DATASET_READ_ALL_FILE_POINT_MEM_TEST_DSET_NAME "all_sel_file_point_sel_mem_dset"
+static int
+test_read_dataset_all_file_point_mem(void)
+{
+ hssize_t space_npoints;
+ hsize_t *points = NULL;
+ hsize_t *dims = NULL;
+ size_t i, data_size, read_buf_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ void *write_buf = NULL;
+ void *read_buf = NULL;
+
+ TESTING("read from dataset with all sel. for file space; point sel. for memory");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if (generate_random_parallel_dimensions(DATASET_READ_ALL_FILE_POINT_MEM_TEST_SPACE_RANK, &dims) < 0)
+ TEST_ERROR;
+
+ /*
+ * Have rank 0 create the dataset and completely fill it with data.
+ */
+ BEGIN_INDEPENDENT_OP(dset_create)
+ {
+ if (MAINPROCESS) {
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_parallel_filename);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_READ_ALL_FILE_POINT_MEM_TEST_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_READ_ALL_FILE_POINT_MEM_TEST_GROUP_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_READ_ALL_FILE_POINT_MEM_TEST_SPACE_RANK, dims, NULL)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" failed to create file dataspace for dataset\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_READ_ALL_FILE_POINT_MEM_TEST_DSET_NAME,
+ DATASET_READ_ALL_FILE_POINT_MEM_TEST_DSET_DTYPE, fspace_id, H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n",
+ DATASET_READ_ALL_FILE_POINT_MEM_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ for (i = 0, data_size = 1; i < DATASET_READ_ALL_FILE_POINT_MEM_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_READ_ALL_FILE_POINT_MEM_TEST_DTYPE_SIZE;
+
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ for (i = 0; i < (size_t)mpi_size; i++) {
+ size_t j;
+ size_t elem_per_proc =
+ (data_size / DATASET_READ_ALL_FILE_POINT_MEM_TEST_DTYPE_SIZE) / dims[0];
+
+ for (j = 0; j < elem_per_proc; j++) {
+ size_t idx = (i * elem_per_proc) + j;
+
+ ((int *)write_buf)[idx] = (int)i;
+ }
+ }
+
+ {
+ hsize_t mdims[] = {data_size / DATASET_READ_ALL_FILE_POINT_MEM_TEST_DTYPE_SIZE};
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ }
+
+ if (H5Dwrite(dset_id, DATASET_READ_ALL_FILE_POINT_MEM_TEST_DSET_DTYPE, mspace_id, H5S_ALL,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n",
+ DATASET_READ_ALL_FILE_POINT_MEM_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+ if (mspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(mspace_id);
+ }
+ H5E_END_TRY;
+ mspace_id = H5I_INVALID_HID;
+ }
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ /*
+ * Close and re-open the file to ensure that the data gets written.
+ */
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close test's container group\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ if (H5Gclose(container_group) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close container group\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close file for data flushing\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ }
+ }
+ END_INDEPENDENT_OP(dset_create);
+
+ /*
+ * Re-open file on all ranks.
+ */
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, TRUE)) < 0)
+ TEST_ERROR;
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't re-open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+ if ((group_id = H5Gopen2(container_group, DATASET_READ_ALL_FILE_POINT_MEM_TEST_GROUP_NAME, H5P_DEFAULT)) <
+ 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container sub-group '%s'\n",
+ DATASET_READ_ALL_FILE_POINT_MEM_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_READ_ALL_FILE_POINT_MEM_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_READ_ALL_FILE_POINT_MEM_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ goto error;
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ goto error;
+ }
+
+ /*
+ * Only have rank 0 perform the dataset read, as reading the entire dataset on all ranks
+ * might be stressful on system resources.
+ */
+ BEGIN_INDEPENDENT_OP(dset_read)
+ {
+ if (MAINPROCESS) {
+ hsize_t mdims[] = {(hsize_t)(2 * space_npoints)};
+ size_t j;
+
+ /*
+ * Allocate twice the amount of memory needed and leave "holes" in the memory
+ * buffer in order to prove that the mapping from all selection <-> point
+ * selection works correctly.
+ */
+ read_buf_size = (size_t)(2 * space_npoints) * DATASET_READ_ALL_FILE_POINT_MEM_TEST_DTYPE_SIZE;
+ if (NULL == (read_buf = HDcalloc(1, read_buf_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ INDEPENDENT_OP_ERROR(dset_read);
+ }
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ INDEPENDENT_OP_ERROR(dset_read);
+ }
+
+ if (NULL == (points = HDmalloc((size_t)space_npoints * sizeof(hsize_t)))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for point selection\n");
+ INDEPENDENT_OP_ERROR(dset_read);
+ }
+
+ /* Select every other point in the 1-dimensional memory dataspace */
+ for (i = 0, j = 0; i < 2 * (size_t)space_npoints; i++) {
+ if (i % 2 == 0)
+ points[j++] = (hsize_t)i;
+ }
+
+ if (H5Sselect_elements(mspace_id, H5S_SELECT_SET, (size_t)space_npoints, points) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set point selection for dataset read\n");
+ INDEPENDENT_OP_ERROR(dset_read);
+ }
+
+ if (H5Dread(dset_id, DATASET_READ_ALL_FILE_POINT_MEM_TEST_DSET_DTYPE, mspace_id, H5S_ALL,
+ H5P_DEFAULT, read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n",
+ DATASET_READ_ALL_FILE_POINT_MEM_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_read);
+ }
+
+ for (i = 0; i < (size_t)mpi_size; i++) {
+ size_t elem_per_proc = (size_t)(space_npoints / mpi_size);
+
+ for (j = 0; j < 2 * elem_per_proc; j++) {
+ size_t idx = (i * 2 * elem_per_proc) + j;
+
+ if (j % 2 == 0) {
+ if (((int *)read_buf)[idx] != (int)i) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ INDEPENDENT_OP_ERROR(dset_read);
+ }
+ }
+ else {
+ if (((int *)read_buf)[idx] != 0) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ INDEPENDENT_OP_ERROR(dset_read);
+ }
+ }
+ }
+ }
+ }
+ }
+ END_INDEPENDENT_OP(dset_read);
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (points) {
+ HDfree(points);
+ points = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ if (write_buf)
+ HDfree(write_buf);
+ if (points)
+ HDfree(points);
+ if (dims)
+ HDfree(dims);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to ensure that a dataset can be read from by having
+ * a hyperslab selection in the file dataspace and a point
+ * selection in the memory dataspace.
+ */
+#define DATASET_READ_HYPER_FILE_POINT_MEM_TEST_SPACE_RANK 2
+#define DATASET_READ_HYPER_FILE_POINT_MEM_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_READ_HYPER_FILE_POINT_MEM_TEST_DTYPE_SIZE sizeof(int)
+#define DATASET_READ_HYPER_FILE_POINT_MEM_TEST_GROUP_NAME "hyper_sel_file_point_sel_mem_read_test"
+#define DATASET_READ_HYPER_FILE_POINT_MEM_TEST_DSET_NAME "hyper_sel_file_point_sel_mem_dset"
+static int
+test_read_dataset_hyper_file_point_mem(void)
+{
+ hssize_t space_npoints;
+ hsize_t *dims = NULL;
+ hsize_t *points = NULL;
+ hsize_t start[DATASET_READ_HYPER_FILE_POINT_MEM_TEST_SPACE_RANK];
+ hsize_t stride[DATASET_READ_HYPER_FILE_POINT_MEM_TEST_SPACE_RANK];
+ hsize_t count[DATASET_READ_HYPER_FILE_POINT_MEM_TEST_SPACE_RANK];
+ hsize_t block[DATASET_READ_HYPER_FILE_POINT_MEM_TEST_SPACE_RANK];
+ size_t i, data_size, read_buf_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ void *write_buf = NULL;
+ void *read_buf = NULL;
+
+ TESTING("read from dataset with hyperslab sel. for file space; point sel. for memory");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if (generate_random_parallel_dimensions(DATASET_READ_HYPER_FILE_POINT_MEM_TEST_SPACE_RANK, &dims) < 0)
+ TEST_ERROR;
+
+ /*
+ * Have rank 0 create the dataset and completely fill it with data.
+ */
+ BEGIN_INDEPENDENT_OP(dset_create)
+ {
+ if (MAINPROCESS) {
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_parallel_filename);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_READ_HYPER_FILE_POINT_MEM_TEST_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_READ_HYPER_FILE_POINT_MEM_TEST_GROUP_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((fspace_id =
+ H5Screate_simple(DATASET_READ_HYPER_FILE_POINT_MEM_TEST_SPACE_RANK, dims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create file dataspace for dataset\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_READ_HYPER_FILE_POINT_MEM_TEST_DSET_NAME,
+ DATASET_READ_HYPER_FILE_POINT_MEM_TEST_DSET_DTYPE, fspace_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n",
+ DATASET_READ_HYPER_FILE_POINT_MEM_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ for (i = 0, data_size = 1; i < DATASET_READ_HYPER_FILE_POINT_MEM_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_READ_HYPER_FILE_POINT_MEM_TEST_DTYPE_SIZE;
+
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ for (i = 0; i < (size_t)mpi_size; i++) {
+ size_t j;
+ size_t elem_per_proc =
+ (data_size / DATASET_READ_HYPER_FILE_POINT_MEM_TEST_DTYPE_SIZE) / dims[0];
+
+ for (j = 0; j < elem_per_proc; j++) {
+ size_t idx = (i * elem_per_proc) + j;
+
+ ((int *)write_buf)[idx] = (int)i;
+ }
+ }
+
+ {
+ hsize_t mdims[] = {data_size / DATASET_READ_HYPER_FILE_POINT_MEM_TEST_DTYPE_SIZE};
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ }
+
+ if (H5Dwrite(dset_id, DATASET_READ_HYPER_FILE_POINT_MEM_TEST_DSET_DTYPE, mspace_id, H5S_ALL,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n",
+ DATASET_READ_HYPER_FILE_POINT_MEM_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+ if (mspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(mspace_id);
+ }
+ H5E_END_TRY;
+ mspace_id = H5I_INVALID_HID;
+ }
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ /*
+ * Close and re-open the file to ensure that the data gets written.
+ */
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close test's container group\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ if (H5Gclose(container_group) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close container group\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close file for data flushing\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ }
+ }
+ END_INDEPENDENT_OP(dset_create);
+
+ /*
+ * Re-open file on all ranks.
+ */
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, TRUE)) < 0)
+ TEST_ERROR;
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't re-open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+ if ((group_id =
+ H5Gopen2(container_group, DATASET_READ_HYPER_FILE_POINT_MEM_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container sub-group '%s'\n",
+ DATASET_READ_HYPER_FILE_POINT_MEM_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_READ_HYPER_FILE_POINT_MEM_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_READ_HYPER_FILE_POINT_MEM_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ goto error;
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ goto error;
+ }
+
+ /*
+ * Allocate twice the amount of memory needed and leave "holes" in the memory
+ * buffer in order to prove that the mapping from hyperslab selection <-> point
+ * selection works correctly.
+ */
+ read_buf_size = (2 * (size_t)(space_npoints / mpi_size) * DATASET_READ_ONE_PROC_NONE_SEL_TEST_DTYPE_SIZE);
+ if (NULL == (read_buf = HDcalloc(1, read_buf_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ goto error;
+ }
+
+ for (i = 0; i < DATASET_READ_HYPER_FILE_POINT_MEM_TEST_SPACE_RANK; i++) {
+ if (i == 0) {
+ start[i] = (hsize_t)mpi_rank;
+ block[i] = 1;
+ }
+ else {
+ start[i] = 0;
+ block[i] = dims[i];
+ }
+
+ stride[i] = 1;
+ count[i] = 1;
+ }
+
+ if (H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, stride, count, block) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't select hyperslab for dataset read\n");
+ goto error;
+ }
+
+ {
+ hsize_t mdims[] = {(hsize_t)(2 * (space_npoints / mpi_size))};
+ size_t j;
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ goto error;
+ }
+
+ if (NULL == (points = HDmalloc((size_t)(space_npoints / mpi_size) * sizeof(hsize_t)))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for point selection\n");
+ goto error;
+ }
+
+ /* Select every other point in the 1-dimensional memory dataspace */
+ for (i = 0, j = 0; i < (size_t)(2 * (space_npoints / mpi_size)); i++) {
+ if (i % 2 == 0)
+ points[j++] = (hsize_t)i;
+ }
+
+ if (H5Sselect_elements(mspace_id, H5S_SELECT_SET, (size_t)(space_npoints / mpi_size), points) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set point selection for dataset read\n");
+ goto error;
+ }
+ }
+
+ if (H5Dread(dset_id, DATASET_READ_HYPER_FILE_POINT_MEM_TEST_DSET_DTYPE, mspace_id, fspace_id, H5P_DEFAULT,
+ read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_READ_HYPER_FILE_POINT_MEM_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < (size_t)(2 * (space_npoints / mpi_size)); i++) {
+ if (i % 2 == 0) {
+ if (((int *)read_buf)[i] != (int)mpi_rank) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ goto error;
+ }
+ }
+ else {
+ if (((int *)read_buf)[i] != 0) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ goto error;
+ }
+ }
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (points) {
+ HDfree(points);
+ points = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ if (write_buf)
+ HDfree(write_buf);
+ if (points)
+ HDfree(points);
+ if (dims)
+ HDfree(dims);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to ensure that a dataset can be read from by having
+ * a point selection in the file dataspace and a hyperslab
+ * selection in the memory dataspace.
+ */
+#define DATASET_READ_POINT_FILE_HYPER_MEM_TEST_SPACE_RANK 2
+#define DATASET_READ_POINT_FILE_HYPER_MEM_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_READ_POINT_FILE_HYPER_MEM_TEST_DTYPE_SIZE sizeof(int)
+#define DATASET_READ_POINT_FILE_HYPER_MEM_TEST_GROUP_NAME "point_sel_file_hyper_sel_mem_read_test"
+#define DATASET_READ_POINT_FILE_HYPER_MEM_TEST_DSET_NAME "point_sel_file_hyper_sel_mem_dset"
+static int
+test_read_dataset_point_file_hyper_mem(void)
+{
+ hssize_t space_npoints;
+ hsize_t *dims = NULL;
+ hsize_t *points = NULL;
+ size_t i, data_size, read_buf_size;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ void *write_buf = NULL;
+ void *read_buf = NULL;
+
+ TESTING("read from dataset with point sel. for file space; hyperslab sel. for memory");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ SKIPPED();
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ return 0;
+ }
+
+ if (generate_random_parallel_dimensions(DATASET_READ_POINT_FILE_HYPER_MEM_TEST_SPACE_RANK, &dims) < 0)
+ TEST_ERROR;
+
+ /*
+ * Have rank 0 create the dataset and completely fill it with data.
+ */
+ BEGIN_INDEPENDENT_OP(dset_create)
+ {
+ if (MAINPROCESS) {
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_parallel_filename);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((group_id = H5Gcreate2(container_group, DATASET_READ_POINT_FILE_HYPER_MEM_TEST_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_READ_POINT_FILE_HYPER_MEM_TEST_GROUP_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((fspace_id =
+ H5Screate_simple(DATASET_READ_POINT_FILE_HYPER_MEM_TEST_SPACE_RANK, dims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create file dataspace for dataset\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_READ_POINT_FILE_HYPER_MEM_TEST_DSET_NAME,
+ DATASET_READ_POINT_FILE_HYPER_MEM_TEST_DSET_DTYPE, fspace_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n",
+ DATASET_READ_POINT_FILE_HYPER_MEM_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ for (i = 0, data_size = 1; i < DATASET_READ_POINT_FILE_HYPER_MEM_TEST_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_READ_POINT_FILE_HYPER_MEM_TEST_DTYPE_SIZE;
+
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ for (i = 0; i < (size_t)mpi_size; i++) {
+ size_t j;
+ size_t elem_per_proc =
+ (data_size / DATASET_READ_POINT_FILE_HYPER_MEM_TEST_DTYPE_SIZE) / dims[0];
+
+ for (j = 0; j < elem_per_proc; j++) {
+ size_t idx = (i * elem_per_proc) + j;
+
+ ((int *)write_buf)[idx] = (int)i;
+ }
+ }
+
+ {
+ hsize_t mdims[] = {data_size / DATASET_READ_POINT_FILE_HYPER_MEM_TEST_DTYPE_SIZE};
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ }
+
+ if (H5Dwrite(dset_id, DATASET_READ_POINT_FILE_HYPER_MEM_TEST_DSET_DTYPE, mspace_id, H5S_ALL,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n",
+ DATASET_READ_POINT_FILE_HYPER_MEM_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+ if (mspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(mspace_id);
+ }
+ H5E_END_TRY;
+ mspace_id = H5I_INVALID_HID;
+ }
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ /*
+ * Close and re-open the file to ensure that the data gets written.
+ */
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close test's container group\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ if (H5Gclose(container_group) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close container group\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close file for data flushing\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ }
+ }
+ END_INDEPENDENT_OP(dset_create);
+
+ /*
+ * Re-open file on all ranks.
+ */
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, TRUE)) < 0)
+ TEST_ERROR;
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't re-open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+ if ((group_id =
+ H5Gopen2(container_group, DATASET_READ_POINT_FILE_HYPER_MEM_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container sub-group '%s'\n",
+ DATASET_READ_POINT_FILE_HYPER_MEM_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_READ_POINT_FILE_HYPER_MEM_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n", DATASET_READ_POINT_FILE_HYPER_MEM_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ goto error;
+ }
+
+ if ((space_npoints = H5Sget_simple_extent_npoints(fspace_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataspace num points\n");
+ goto error;
+ }
+
+ /*
+ * Allocate twice the amount of memory needed and leave "holes" in the memory
+ * buffer in order to prove that the mapping from point selection <-> hyperslab
+ * selection works correctly.
+ */
+ read_buf_size =
+ (2 * (size_t)(space_npoints / mpi_size) * DATASET_READ_POINT_FILE_HYPER_MEM_TEST_DTYPE_SIZE);
+ if (NULL == (read_buf = HDcalloc(1, read_buf_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset read\n");
+ goto error;
+ }
+
+ if (NULL == (points = HDmalloc((size_t)((space_npoints / mpi_size) *
+ DATASET_READ_POINT_FILE_HYPER_MEM_TEST_SPACE_RANK) *
+ sizeof(hsize_t)))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for point selection\n");
+ goto error;
+ }
+
+ for (i = 0; i < (size_t)(space_npoints / mpi_size); i++) {
+ size_t j;
+
+ for (j = 0; j < DATASET_READ_POINT_FILE_HYPER_MEM_TEST_SPACE_RANK; j++) {
+ size_t idx = (i * DATASET_READ_POINT_FILE_HYPER_MEM_TEST_SPACE_RANK) + j;
+
+ if (j == 0)
+ points[idx] = (hsize_t)mpi_rank;
+ else if (j != DATASET_READ_POINT_FILE_HYPER_MEM_TEST_SPACE_RANK - 1)
+ points[idx] = i / dims[j + 1];
+ else
+ points[idx] = i % dims[j];
+ }
+ }
+
+ if (H5Sselect_elements(fspace_id, H5S_SELECT_SET, (size_t)(space_npoints / mpi_size), points) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set point selection for dataset read\n");
+ goto error;
+ }
+
+ {
+ hsize_t start[1] = {0};
+ hsize_t stride[1] = {2};
+ hsize_t count[1] = {(hsize_t)(space_npoints / mpi_size)};
+ hsize_t block[1] = {1};
+ hsize_t mdims[] = {(hsize_t)(2 * (space_npoints / mpi_size))};
+
+ if ((mspace_id = H5Screate_simple(1, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create memory dataspace\n");
+ goto error;
+ }
+
+ if (H5Sselect_hyperslab(mspace_id, H5S_SELECT_SET, start, stride, count, block) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't set hyperslab selection for dataset write\n");
+ goto error;
+ }
+ }
+
+ if (H5Dread(dset_id, DATASET_READ_POINT_FILE_HYPER_MEM_TEST_DSET_DTYPE, mspace_id, fspace_id, H5P_DEFAULT,
+ read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n", DATASET_READ_POINT_FILE_HYPER_MEM_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (i = 0; i < (size_t)(2 * (space_npoints / mpi_size)); i++) {
+ if (i % 2 == 0) {
+ if (((int *)read_buf)[i] != (int)mpi_rank) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ goto error;
+ }
+ }
+ else {
+ if (((int *)read_buf)[i] != 0) {
+ H5_FAILED();
+ HDprintf(" data verification failed\n");
+ goto error;
+ }
+ }
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ if (points) {
+ HDfree(points);
+ points = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (H5Sclose(mspace_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (read_buf)
+ HDfree(read_buf);
+ if (write_buf)
+ HDfree(write_buf);
+ if (points)
+ HDfree(points);
+ if (dims)
+ HDfree(dims);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a dataset composed of multiple chunks
+ * can be written and read correctly. When reading back the
+ * chunks of the dataset, the file dataspace and memory dataspace
+ * used are the same shape. The dataset's first dimension grows
+ * with the number of MPI ranks, while the other dimensions are fixed.
+ */
+#define DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_FIXED_CHUNK_DIMSIZE \
+ 100 /* Should be an even divisor of fixed dimension size */
+#define DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_FIXED_DIMSIZE 1000
+#define DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK 2
+#define DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_DTYPESIZE sizeof(int)
+#define DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_GROUP_NAME \
+ "multi_chunk_dataset_write_same_space_read_test"
+#define DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_NAME "multi_chunk_dataset"
+static int
+test_write_multi_chunk_dataset_same_shape_read(void)
+{
+ hsize_t *dims = NULL;
+ hsize_t *chunk_dims = NULL;
+ hsize_t retrieved_chunk_dims[DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK];
+ hsize_t start[DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK];
+ hsize_t count[DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK];
+ size_t i, data_size, chunk_size, n_chunks_per_rank;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dcpl_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ void *write_buf = NULL;
+ int read_buf[1][DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_FIXED_CHUNK_DIMSIZE];
+
+ TESTING("write to dataset with multiple chunks using same shaped dataspaces");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_GET_PLIST)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, or getting property list aren't "
+ "supported with this connector\n");
+ return 0;
+ }
+
+ if (NULL ==
+ (dims = HDmalloc(DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK * sizeof(hsize_t)))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset dimensionality\n");
+ goto error;
+ }
+
+ if (NULL == (chunk_dims = HDmalloc(DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK *
+ sizeof(hsize_t)))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset dimensionality\n");
+ goto error;
+ }
+
+ for (i = 0; i < DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; i++) {
+ if (i == 0) {
+ dims[i] = (hsize_t)mpi_size;
+ chunk_dims[i] = 1;
+ }
+ else {
+ dims[i] = DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_FIXED_DIMSIZE;
+ chunk_dims[i] = DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_FIXED_CHUNK_DIMSIZE;
+ }
+ }
+
+ for (i = 0, chunk_size = 1; i < DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; i++)
+ chunk_size *= chunk_dims[i];
+ chunk_size *= DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_DTYPESIZE;
+
+ for (i = 0, data_size = 1; i < DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_DTYPESIZE;
+
+ /*
+ * Have rank 0 create the dataset and completely fill it with data.
+ */
+ BEGIN_INDEPENDENT_OP(dset_create)
+ {
+ if (MAINPROCESS) {
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_parallel_filename);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((group_id =
+ H5Gcreate2(container_group, DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_GROUP_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK,
+ dims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create file dataspace for dataset\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create DCPL\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if (H5Pset_chunk(dcpl_id, DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK,
+ chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set chunking on DCPL\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_NAME,
+ DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_DTYPE, fspace_id,
+ H5P_DEFAULT, dcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n",
+ DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ /*
+ * See if a copy of the DCPL reports the correct chunking.
+ */
+ if (H5Pclose(dcpl_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close DCPL\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((dcpl_id = H5Dget_create_plist(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve copy of DCPL\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ memset(retrieved_chunk_dims, 0, sizeof(retrieved_chunk_dims));
+ if (H5Pget_chunk(dcpl_id, DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK,
+ retrieved_chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve chunking info\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ for (i = 0; i < DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; i++) {
+ if (chunk_dims[i] != retrieved_chunk_dims[i]) {
+ H5_FAILED();
+ HDprintf(" chunk dimensionality retrieved from DCPL didn't match originally specified "
+ "dimensionality\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ }
+
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ /*
+ * Ensure that each underlying chunk contains the values
+ *
+ * chunk_index .. (chunk_nelemts - 1) + chunk_index.
+ *
+ * That is to say, for a chunk size of 10 x 10, chunk 0
+ * contains the values
+ *
+ * 0 .. 99
+ *
+ * while the next chunk contains the values
+ *
+ * 1 .. 100
+ *
+ * and so on.
+ */
+ for (i = 0; i < data_size / DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_DTYPESIZE; i++) {
+ size_t j;
+ size_t base;
+ size_t tot_adjust;
+
+ /*
+ * Calculate a starting base value by taking the index value mod
+ * the size of a chunk in each dimension.
+ */
+ for (j = 0, base = i; j < DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; j++)
+ if (chunk_dims[j] > 1 && base >= chunk_dims[j])
+ base %= chunk_dims[j];
+
+ /*
+ * Calculate the adjustment in each dimension.
+ */
+ for (j = 0, tot_adjust = 0;
+ j < DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; j++) {
+ if (j == (DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK - 1))
+ tot_adjust += (i % dims[j]) / chunk_dims[j];
+ else {
+ size_t k;
+ size_t n_faster_elemts;
+
+ /*
+ * Calculate the number of elements in faster dimensions.
+ */
+ for (k = j + 1, n_faster_elemts = 1;
+ k < DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; k++)
+ n_faster_elemts *= dims[k];
+
+ tot_adjust +=
+ (((i / n_faster_elemts) / chunk_dims[j]) * (dims[j + 1] / chunk_dims[j + 1])) +
+ (((i / n_faster_elemts) % chunk_dims[j]) * chunk_dims[j + 1]);
+ }
+ }
+
+ ((int *)write_buf)[i] = (int)(base + tot_adjust);
+ }
+
+ /*
+ * Write every chunk in the dataset.
+ */
+ if (H5Dwrite(dset_id, DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n",
+ DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dcpl_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(dcpl_id);
+ }
+ H5E_END_TRY;
+ dcpl_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ /*
+ * Close and re-open the file to ensure that the data gets written.
+ */
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close test's container group\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ if (H5Gclose(container_group) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close container group\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close file for data flushing\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ }
+ }
+ END_INDEPENDENT_OP(dset_create);
+
+ /*
+ * Re-open file on all ranks.
+ */
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, TRUE)) < 0)
+ TEST_ERROR;
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't re-open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+ if ((group_id = H5Gopen2(container_group, DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_GROUP_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container sub-group '%s'\n",
+ DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((dset_id =
+ H5Dopen2(group_id, DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n",
+ DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ goto error;
+ }
+
+ /*
+ * Create 2-dimensional memory dataspace for read buffer.
+ */
+ {
+ hsize_t mdims[] = {chunk_dims[0], chunk_dims[1]};
+
+ if ((mspace_id = H5Screate_simple(2, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create memory dataspace\n");
+ goto error;
+ }
+ }
+
+ for (i = 0; i < DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; i++) {
+ count[i] = chunk_dims[i];
+ }
+
+ /*
+ * Each rank reads their respective chunks in the dataset, checking the data for each one.
+ */
+ if (MAINPROCESS)
+ HDprintf("\n");
+ for (i = 0, n_chunks_per_rank = (data_size / (size_t)mpi_size) / chunk_size; i < n_chunks_per_rank; i++) {
+ size_t j, k;
+
+ if (MPI_SUCCESS != MPI_Barrier(MPI_COMM_WORLD)) {
+ H5_FAILED();
+ HDprintf(" MPI_Barrier failed\n");
+ goto error;
+ }
+
+ if (MAINPROCESS)
+ HDprintf("\r All ranks reading chunk %zu", i);
+
+ for (j = 0; j < DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; j++) {
+ if (j == 0)
+ start[j] = (hsize_t)mpi_rank;
+ else if (j == (DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK - 1))
+ /* Fastest changing dimension */
+ start[j] = (i * chunk_dims[j]) % dims[j];
+ else
+ start[j] = ((i * chunk_dims[j + 1]) / dims[j + 1]) * (chunk_dims[j]);
+ }
+
+ /*
+ * Adjust file dataspace selection for next chunk.
+ */
+ if (H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, NULL, count, NULL) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set hyperslab selection\n");
+ goto error;
+ }
+
+ for (j = 0; j < chunk_dims[0]; j++)
+ for (k = 0; k < chunk_dims[1]; k++)
+ read_buf[j][k] = 0;
+
+ if (H5Dread(dset_id, DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_DTYPE, mspace_id, fspace_id,
+ H5P_DEFAULT, read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n",
+ DATASET_MULTI_CHUNK_WRITE_SAME_SPACE_READ_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (j = 0; j < chunk_dims[0]; j++) {
+ for (k = 0; k < chunk_dims[1]; k++) {
+ size_t val =
+ ((j * chunk_dims[0]) + k + i) +
+ ((hsize_t)mpi_rank * n_chunks_per_rank); /* Additional value offset for each rank */
+ if (read_buf[j][k] != (int)val) {
+ H5_FAILED();
+ HDprintf(" data verification failed for chunk %lld\n", (long long)i);
+ goto error;
+ }
+ }
+ }
+ }
+
+ if (chunk_dims) {
+ HDfree(chunk_dims);
+ chunk_dims = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (H5Sclose(mspace_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (write_buf)
+ HDfree(write_buf);
+ if (chunk_dims)
+ HDfree(chunk_dims);
+ if (dims)
+ HDfree(dims);
+ H5Pclose(dcpl_id);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a dataset composed of multiple chunks
+ * can be written and read correctly. When reading back the
+ * chunks of the dataset, the file dataspace and memory dataspace
+ * used are differently shaped. The dataset's first dimension grows
+ * with the number of MPI ranks, while the other dimensions are fixed.
+ */
+#define DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_FIXED_CHUNK_DIMSIZE \
+ 100 /* Should be an even divisor of fixed dimension size */
+#define DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_READ_BUF_DIMSIZE \
+ (DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_FIXED_CHUNK_DIMSIZE / 10)
+#define DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_FIXED_DIMSIZE 1000
+#define DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK 2
+#define DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_DTYPESIZE sizeof(int)
+#define DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_GROUP_NAME \
+ "multi_chunk_dataset_write_diff_space_read_test"
+#define DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_NAME "multi_chunk_dataset"
+static int
+test_write_multi_chunk_dataset_diff_shape_read(void)
+{
+ hsize_t *dims = NULL;
+ hsize_t *chunk_dims = NULL;
+ hsize_t retrieved_chunk_dims[DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK];
+ hsize_t start[DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK];
+ hsize_t count[DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK];
+ size_t i, data_size, chunk_size, n_chunks_per_rank;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dcpl_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ void *write_buf = NULL;
+ int read_buf[DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_READ_BUF_DIMSIZE]
+ [DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_READ_BUF_DIMSIZE];
+
+ TESTING("write to dataset with multiple chunks using differently shaped dataspaces");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_GET_PLIST)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, or getting property list aren't "
+ "supported with this connector\n");
+ return 0;
+ }
+
+ if (NULL ==
+ (dims = HDmalloc(DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK * sizeof(hsize_t)))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset dimensionality\n");
+ goto error;
+ }
+
+ if (NULL == (chunk_dims = HDmalloc(DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK *
+ sizeof(hsize_t)))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset dimensionality\n");
+ goto error;
+ }
+
+ for (i = 0; i < DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; i++) {
+ if (i == 0) {
+ dims[i] = (hsize_t)mpi_size;
+ chunk_dims[i] = 1;
+ }
+ else {
+ dims[i] = DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_FIXED_DIMSIZE;
+ chunk_dims[i] = DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_FIXED_CHUNK_DIMSIZE;
+ }
+ }
+
+ for (i = 0, chunk_size = 1; i < DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; i++)
+ chunk_size *= chunk_dims[i];
+ chunk_size *= DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_DTYPESIZE;
+
+ for (i = 0, data_size = 1; i < DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_DTYPESIZE;
+
+ /*
+ * Have rank 0 create the dataset and completely fill it with data.
+ */
+ BEGIN_INDEPENDENT_OP(dset_create)
+ {
+ if (MAINPROCESS) {
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_parallel_filename);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((group_id =
+ H5Gcreate2(container_group, DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_GROUP_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((fspace_id = H5Screate_simple(DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK,
+ dims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create file dataspace for dataset\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create DCPL\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if (H5Pset_chunk(dcpl_id, DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK,
+ chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set chunking on DCPL\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_NAME,
+ DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_DTYPE, fspace_id,
+ H5P_DEFAULT, dcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n",
+ DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ /*
+ * See if a copy of the DCPL reports the correct chunking.
+ */
+ if (H5Pclose(dcpl_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close DCPL\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((dcpl_id = H5Dget_create_plist(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve copy of DCPL\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ memset(retrieved_chunk_dims, 0, sizeof(retrieved_chunk_dims));
+ if (H5Pget_chunk(dcpl_id, DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK,
+ retrieved_chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve chunking info\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ for (i = 0; i < DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; i++) {
+ if (chunk_dims[i] != retrieved_chunk_dims[i]) {
+ H5_FAILED();
+ HDprintf(" chunk dimensionality retrieved from DCPL didn't match originally specified "
+ "dimensionality\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ }
+
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ /*
+ * Ensure that each underlying chunk contains the values
+ *
+ * chunk_index .. (chunk_nelemts - 1) + chunk_index.
+ *
+ * That is to say, for a chunk size of 10 x 10, chunk 0
+ * contains the values
+ *
+ * 0 .. 99
+ *
+ * while the next chunk contains the values
+ *
+ * 1 .. 100
+ *
+ * and so on.
+ */
+ for (i = 0; i < data_size / DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_DTYPESIZE; i++) {
+ size_t j;
+ size_t base;
+ size_t tot_adjust;
+
+ /*
+ * Calculate a starting base value by taking the index value mod
+ * the size of a chunk in each dimension.
+ */
+ for (j = 0, base = i; j < DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; j++)
+ if (chunk_dims[j] > 1 && base >= chunk_dims[j])
+ base %= chunk_dims[j];
+
+ /*
+ * Calculate the adjustment in each dimension.
+ */
+ for (j = 0, tot_adjust = 0;
+ j < DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; j++) {
+ if (j == (DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK - 1))
+ tot_adjust += (i % dims[j]) / chunk_dims[j];
+ else {
+ size_t k;
+ size_t n_faster_elemts;
+
+ /*
+ * Calculate the number of elements in faster dimensions.
+ */
+ for (k = j + 1, n_faster_elemts = 1;
+ k < DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; k++)
+ n_faster_elemts *= dims[k];
+
+ tot_adjust +=
+ (((i / n_faster_elemts) / chunk_dims[j]) * (dims[j + 1] / chunk_dims[j + 1])) +
+ (((i / n_faster_elemts) % chunk_dims[j]) * chunk_dims[j + 1]);
+ }
+ }
+
+ ((int *)write_buf)[i] = (int)(base + tot_adjust);
+ }
+
+ /*
+ * Write every chunk in the dataset.
+ */
+ if (H5Dwrite(dset_id, DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_DTYPE, H5S_ALL, H5S_ALL,
+ H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n",
+ DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dcpl_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(dcpl_id);
+ }
+ H5E_END_TRY;
+ dcpl_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ /*
+ * Close and re-open the file to ensure that the data gets written.
+ */
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close test's container group\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ if (H5Gclose(container_group) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close container group\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close file for data flushing\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ }
+ }
+ END_INDEPENDENT_OP(dset_create);
+
+ /*
+ * Re-open file on all ranks.
+ */
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, TRUE)) < 0)
+ TEST_ERROR;
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't re-open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+ if ((group_id = H5Gopen2(container_group, DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_GROUP_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container sub-group '%s'\n",
+ DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ if ((dset_id =
+ H5Dopen2(group_id, DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n",
+ DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ goto error;
+ }
+
+ /*
+ * Create memory dataspace for read buffer.
+ */
+ {
+ hsize_t mdims[] = {DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_READ_BUF_DIMSIZE,
+ DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_READ_BUF_DIMSIZE};
+
+ if ((mspace_id = H5Screate_simple(DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK,
+ mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create memory dataspace\n");
+ goto error;
+ }
+ }
+
+ for (i = 0; i < DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; i++) {
+ count[i] = chunk_dims[i];
+ }
+
+ /*
+ * Each rank reads their respective chunks in the dataset, checking the data for each one.
+ */
+ if (MAINPROCESS)
+ HDprintf("\n");
+ for (i = 0, n_chunks_per_rank = (data_size / (size_t)mpi_size) / chunk_size; i < n_chunks_per_rank; i++) {
+ size_t j, k;
+
+ if (MPI_SUCCESS != MPI_Barrier(MPI_COMM_WORLD)) {
+ H5_FAILED();
+ HDprintf(" MPI_Barrier failed\n");
+ goto error;
+ }
+
+ if (MAINPROCESS)
+ HDprintf("\r All ranks reading chunk %zu", i);
+
+ for (j = 0; j < DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; j++) {
+ if (j == 0)
+ start[j] = (hsize_t)mpi_rank;
+ else if (j == (DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK - 1))
+ /* Fastest changing dimension */
+ start[j] = (i * chunk_dims[j]) % dims[j];
+ else
+ start[j] = ((i * chunk_dims[j + 1]) / dims[j + 1]) * (chunk_dims[j]);
+ }
+
+ /*
+ * Adjust file dataspace selection for next chunk.
+ */
+ if (H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, NULL, count, NULL) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set hyperslab selection\n");
+ goto error;
+ }
+
+ for (j = 0; j < DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_READ_BUF_DIMSIZE; j++)
+ for (k = 0; k < DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_READ_BUF_DIMSIZE; k++)
+ read_buf[j][k] = 0;
+
+ if (H5Dread(dset_id, DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_DTYPE, mspace_id, fspace_id,
+ H5P_DEFAULT, read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n",
+ DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (j = 0; j < DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_READ_BUF_DIMSIZE; j++) {
+ for (k = 0; k < DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_READ_BUF_DIMSIZE; k++) {
+ size_t val = ((j * DATASET_MULTI_CHUNK_WRITE_DIFF_SPACE_READ_TEST_READ_BUF_DIMSIZE) + k + i) +
+ ((hsize_t)mpi_rank * n_chunks_per_rank);
+
+ if (read_buf[j][k] != (int)val) {
+ H5_FAILED();
+ HDprintf(" data verification failed for chunk %lld\n", (long long)i);
+ goto error;
+ }
+ }
+ }
+ }
+
+ if (chunk_dims) {
+ HDfree(chunk_dims);
+ chunk_dims = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (H5Sclose(mspace_id) < 0)
+ TEST_ERROR;
+ if (H5Sclose(fspace_id) < 0)
+ TEST_ERROR;
+ if (H5Dclose(dset_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (write_buf)
+ HDfree(write_buf);
+ if (chunk_dims)
+ HDfree(chunk_dims);
+ if (dims)
+ HDfree(dims);
+ H5Pclose(dcpl_id);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a dataset composed of multiple chunks
+ * can be written and read correctly several times in a row.
+ * When reading back the chunks of the dataset, the file
+ * dataspace and memory dataspace used are the same shape.
+ * The dataset's first dimension grows with the number of MPI
+ * ranks, while the other dimensions are fixed.
+ */
+#define DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_FIXED_CHUNK_DIMSIZE \
+ 100 /* Should be an even divisor of fixed dimension size */
+#define DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_FIXED_DIMSIZE 1000
+#define DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK 2
+#define DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_DTYPESIZE sizeof(int)
+#define DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_GROUP_NAME \
+ "multi_chunk_dataset_same_space_overwrite_test"
+#define DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_NAME "multi_chunk_dataset"
+#define DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_NITERS 10
+static int
+test_overwrite_multi_chunk_dataset_same_shape_read(void)
+{
+ hsize_t *dims = NULL;
+ hsize_t *chunk_dims = NULL;
+ hsize_t retrieved_chunk_dims[DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK];
+ hsize_t start[DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK];
+ hsize_t count[DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK];
+ size_t i, data_size, chunk_size, n_chunks_per_rank;
+ size_t niter;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dcpl_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ void *write_buf = NULL;
+ int read_buf[1][DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_FIXED_CHUNK_DIMSIZE];
+
+ TESTING("several overwrites to dataset with multiple chunks using same shaped dataspaces");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_GET_PLIST)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, or getting property list aren't "
+ "supported with this connector\n");
+ return 0;
+ }
+
+ if (NULL == (dims = HDmalloc(DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK *
+ sizeof(hsize_t)))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset dimensionality\n");
+ goto error;
+ }
+
+ if (NULL == (chunk_dims = HDmalloc(DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK *
+ sizeof(hsize_t)))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset dimensionality\n");
+ goto error;
+ }
+
+ for (i = 0; i < DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; i++) {
+ if (i == 0) {
+ dims[i] = (hsize_t)mpi_size;
+ chunk_dims[i] = 1;
+ }
+ else {
+ dims[i] = DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_FIXED_DIMSIZE;
+ chunk_dims[i] = DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_FIXED_CHUNK_DIMSIZE;
+ }
+ }
+
+ for (i = 0, chunk_size = 1; i < DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; i++)
+ chunk_size *= chunk_dims[i];
+ chunk_size *= DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_DTYPESIZE;
+
+ for (i = 0, data_size = 1; i < DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_DTYPESIZE;
+
+ /*
+ * Have rank 0 create the dataset, but don't fill it with data yet.
+ */
+ BEGIN_INDEPENDENT_OP(dset_create)
+ {
+ if (MAINPROCESS) {
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_parallel_filename);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((group_id = H5Gcreate2(container_group,
+ DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_GROUP_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((fspace_id = H5Screate_simple(
+ DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK, dims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create file dataspace for dataset\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create DCPL\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if (H5Pset_chunk(dcpl_id, DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK,
+ chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set chunking on DCPL\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ /* Set dataset space allocation time to Early to ensure all chunk-related metadata is available to
+ * all other processes when they open the dataset */
+ if (H5Pset_alloc_time(dcpl_id, H5D_ALLOC_TIME_EARLY) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set allocation time on DCPL\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_NAME,
+ DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_DTYPE,
+ fspace_id, H5P_DEFAULT, dcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n",
+ DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ /*
+ * See if a copy of the DCPL reports the correct chunking.
+ */
+ if (H5Pclose(dcpl_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close DCPL\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((dcpl_id = H5Dget_create_plist(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve copy of DCPL\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ memset(retrieved_chunk_dims, 0, sizeof(retrieved_chunk_dims));
+ if (H5Pget_chunk(dcpl_id, DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK,
+ retrieved_chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve chunking info\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ for (i = 0; i < DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; i++) {
+ if (chunk_dims[i] != retrieved_chunk_dims[i]) {
+ H5_FAILED();
+ HDprintf(" chunk dimensionality retrieved from DCPL didn't match originally specified "
+ "dimensionality\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ }
+
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dcpl_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(dcpl_id);
+ }
+ H5E_END_TRY;
+ dcpl_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ /*
+ * Close and re-open the file on all ranks.
+ */
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close test's container group\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ if (H5Gclose(container_group) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close container group\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close file for data flushing\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ }
+ }
+ END_INDEPENDENT_OP(dset_create);
+
+ /*
+ * Re-open file on all ranks.
+ */
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, TRUE)) < 0)
+ TEST_ERROR;
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't re-open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+ if ((group_id = H5Gopen2(container_group, DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_GROUP_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container sub-group '%s'\n",
+ DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ /*
+ * Create 2-dimensional memory dataspace for read buffer.
+ */
+ {
+ hsize_t mdims[] = {chunk_dims[0], chunk_dims[1]};
+
+ if ((mspace_id = H5Screate_simple(2, mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create memory dataspace\n");
+ goto error;
+ }
+ }
+
+ for (i = 0; i < DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; i++) {
+ count[i] = chunk_dims[i];
+ }
+
+ if (MAINPROCESS)
+ HDprintf("\n");
+ for (niter = 0; niter < DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_NITERS; niter++) {
+ if ((dset_id = H5Dopen2(group_id, DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n",
+ DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_NAME);
+ goto error;
+ }
+
+ BEGIN_INDEPENDENT_OP(dset_write)
+ {
+ if (MAINPROCESS) {
+ memset(write_buf, 0, data_size);
+
+ /*
+ * Ensure that each underlying chunk contains the values
+ *
+ * chunk_index .. (chunk_nelemts - 1) + chunk_index.
+ *
+ * That is to say, for a chunk size of 10 x 10, chunk 0
+ * contains the values
+ *
+ * 0 .. 99
+ *
+ * while the next chunk contains the values
+ *
+ * 1 .. 100
+ *
+ * and so on. On each iteration, we add 1 to the previous
+ * values.
+ */
+ for (i = 0; i < data_size / DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_DTYPESIZE;
+ i++) {
+ size_t j;
+ size_t base;
+ size_t tot_adjust;
+
+ /*
+ * Calculate a starting base value by taking the index value mod
+ * the size of a chunk in each dimension.
+ */
+ for (j = 0, base = i;
+ j < DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; j++)
+ if (chunk_dims[j] > 1 && base >= chunk_dims[j])
+ base %= chunk_dims[j];
+
+ /*
+ * Calculate the adjustment in each dimension.
+ */
+ for (j = 0, tot_adjust = 0;
+ j < DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; j++) {
+ if (j == (DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK - 1))
+ tot_adjust += (i % dims[j]) / chunk_dims[j];
+ else {
+ size_t k;
+ size_t n_faster_elemts;
+
+ /*
+ * Calculate the number of elements in faster dimensions.
+ */
+ for (k = j + 1, n_faster_elemts = 1;
+ k < DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; k++)
+ n_faster_elemts *= dims[k];
+
+ tot_adjust += (((i / n_faster_elemts) / chunk_dims[j]) *
+ (dims[j + 1] / chunk_dims[j + 1])) +
+ (((i / n_faster_elemts) % chunk_dims[j]) * chunk_dims[j + 1]);
+ }
+ }
+
+ ((int *)write_buf)[i] = (int)(base + tot_adjust + niter);
+ }
+
+ /*
+ * Write every chunk in the dataset.
+ */
+ if (H5Dwrite(dset_id, DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_DTYPE, H5S_ALL,
+ H5S_ALL, H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n",
+ DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_write);
+ }
+ }
+ }
+ END_INDEPENDENT_OP(dset_write);
+
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ if (MPI_SUCCESS != MPI_Barrier(MPI_COMM_WORLD)) {
+ H5_FAILED();
+ HDprintf(" MPI_Barrier failed\n");
+ goto error;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n",
+ DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ goto error;
+ }
+
+ /*
+ * Each rank reads their respective chunks in the dataset, checking the data for each one.
+ */
+ for (i = 0, n_chunks_per_rank = (data_size / (size_t)mpi_size) / chunk_size; i < n_chunks_per_rank;
+ i++) {
+ size_t j, k;
+
+ if (MAINPROCESS)
+ HDprintf("\r All ranks reading chunk %zu", i);
+
+ for (j = 0; j < DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK; j++) {
+ if (j == 0)
+ start[j] = (hsize_t)mpi_rank;
+ else if (j == (DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK - 1))
+ /* Fastest changing dimension */
+ start[j] = (i * chunk_dims[j]) % dims[j];
+ else
+ start[j] = ((i * chunk_dims[j + 1]) / dims[j + 1]) * (chunk_dims[j]);
+ }
+
+ /*
+ * Adjust file dataspace selection for next chunk.
+ */
+ if (H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, NULL, count, NULL) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set hyperslab selection\n");
+ goto error;
+ }
+
+ for (j = 0; j < chunk_dims[0]; j++)
+ for (k = 0; k < chunk_dims[1]; k++)
+ read_buf[j][k] = 0;
+
+ if (H5Dread(dset_id, DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_DTYPE, mspace_id,
+ fspace_id, H5P_DEFAULT, read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n",
+ DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (j = 0; j < chunk_dims[0]; j++) {
+ for (k = 0; k < chunk_dims[1]; k++) {
+ size_t val =
+ ((j * chunk_dims[0]) + k + i) +
+ ((hsize_t)mpi_rank * n_chunks_per_rank) /* Additional value offset for each rank */
+ + niter;
+ if (read_buf[j][k] != (int)val) {
+ H5_FAILED();
+ HDprintf(" data verification failed for chunk %lld\n", (long long)i);
+ goto error;
+ }
+ }
+ }
+ }
+
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ if (MPI_SUCCESS != MPI_Barrier(MPI_COMM_WORLD)) {
+ H5_FAILED();
+ HDprintf(" MPI_Barrier failed\n");
+ goto error;
+ }
+ }
+
+ if (chunk_dims) {
+ HDfree(chunk_dims);
+ chunk_dims = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+
+ if (H5Sclose(mspace_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (write_buf)
+ HDfree(write_buf);
+ if (chunk_dims)
+ HDfree(chunk_dims);
+ if (dims)
+ HDfree(dims);
+ H5Pclose(dcpl_id);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to check that a dataset composed of multiple chunks
+ * can be written and read correctly several times in a row.
+ * When reading back the chunks of the dataset, the file
+ * dataspace and memory dataspace used are differently shaped.
+ * The dataset's first dimension grows with the number of MPI
+ * ranks, while the other dimensions are fixed.
+ */
+#define DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_FIXED_CHUNK_DIMSIZE \
+ 100 /* Should be an even divisor of fixed dimension size */
+#define DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_READ_BUF_DIMSIZE \
+ (DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_FIXED_CHUNK_DIMSIZE / 10)
+#define DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_FIXED_DIMSIZE 1000
+#define DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK 2
+#define DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_DTYPESIZE sizeof(int)
+#define DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_DTYPE H5T_NATIVE_INT
+#define DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_GROUP_NAME \
+ "multi_chunk_dataset_diff_space_overwrite_test"
+#define DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_NAME "multi_chunk_dataset"
+#define DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_NITERS 10
+static int
+test_overwrite_multi_chunk_dataset_diff_shape_read(void)
+{
+ hsize_t *dims = NULL;
+ hsize_t *chunk_dims = NULL;
+ hsize_t retrieved_chunk_dims[DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK];
+ hsize_t start[DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK];
+ hsize_t count[DATASET_MULTI_CHUNK_OVERWRITE_SAME_SPACE_READ_TEST_DSET_SPACE_RANK];
+ size_t i, data_size, chunk_size, n_chunks_per_rank;
+ size_t niter;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t container_group = H5I_INVALID_HID, group_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dcpl_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ void *write_buf = NULL;
+ int read_buf[DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_READ_BUF_DIMSIZE]
+ [DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_READ_BUF_DIMSIZE];
+
+ TESTING("several overwrites to dataset with multiple chunks using differently shaped dataspaces");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_GET_PLIST)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file, group, dataset, or getting property list aren't "
+ "supported with this connector\n");
+ return 0;
+ }
+
+ if (NULL == (dims = HDmalloc(DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK *
+ sizeof(hsize_t)))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset dimensionality\n");
+ goto error;
+ }
+
+ if (NULL == (chunk_dims = HDmalloc(DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK *
+ sizeof(hsize_t)))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset dimensionality\n");
+ goto error;
+ }
+
+ for (i = 0; i < DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; i++) {
+ if (i == 0) {
+ dims[i] = (hsize_t)mpi_size;
+ chunk_dims[i] = 1;
+ }
+ else {
+ dims[i] = DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_FIXED_DIMSIZE;
+ chunk_dims[i] = DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_FIXED_CHUNK_DIMSIZE;
+ }
+ }
+
+ for (i = 0, chunk_size = 1; i < DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; i++)
+ chunk_size *= chunk_dims[i];
+ chunk_size *= DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_DTYPESIZE;
+
+ for (i = 0, data_size = 1; i < DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; i++)
+ data_size *= dims[i];
+ data_size *= DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_DTYPESIZE;
+
+ /*
+ * Have rank 0 create the dataset, but don't fill it with data yet.
+ */
+ BEGIN_INDEPENDENT_OP(dset_create)
+ {
+ if (MAINPROCESS) {
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open file '%s'\n", H5_api_test_parallel_filename);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((group_id = H5Gcreate2(container_group,
+ DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_GROUP_NAME,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create container sub-group '%s'\n",
+ DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_GROUP_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((fspace_id = H5Screate_simple(
+ DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK, dims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create file dataspace for dataset\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create DCPL\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if (H5Pset_chunk(dcpl_id, DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK,
+ chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set chunking on DCPL\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ /* Set dataset space allocation time to Early to ensure all chunk-related metadata is available to
+ * all other processes when they open the dataset */
+ if (H5Pset_alloc_time(dcpl_id, H5D_ALLOC_TIME_EARLY) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set allocation time on DCPL\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((dset_id = H5Dcreate2(group_id, DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_NAME,
+ DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_DTYPE,
+ fspace_id, H5P_DEFAULT, dcpl_id, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create dataset '%s'\n",
+ DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ /*
+ * See if a copy of the DCPL reports the correct chunking.
+ */
+ if (H5Pclose(dcpl_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close DCPL\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if ((dcpl_id = H5Dget_create_plist(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve copy of DCPL\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ memset(retrieved_chunk_dims, 0, sizeof(retrieved_chunk_dims));
+ if (H5Pget_chunk(dcpl_id, DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK,
+ retrieved_chunk_dims) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to retrieve chunking info\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ for (i = 0; i < DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; i++) {
+ if (chunk_dims[i] != retrieved_chunk_dims[i]) {
+ H5_FAILED();
+ HDprintf(" chunk dimensionality retrieved from DCPL didn't match originally specified "
+ "dimensionality\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ }
+
+ if (NULL == (write_buf = HDmalloc(data_size))) {
+ H5_FAILED();
+ HDprintf(" couldn't allocate buffer for dataset write\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dcpl_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(dcpl_id);
+ }
+ H5E_END_TRY;
+ dcpl_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ /*
+ * Close and re-open the file on all ranks.
+ */
+ if (H5Gclose(group_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close test's container group\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ if (H5Gclose(container_group) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close container group\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close file for data flushing\n");
+ INDEPENDENT_OP_ERROR(dset_create);
+ }
+ }
+ }
+ END_INDEPENDENT_OP(dset_create);
+
+ /*
+ * Re-open file on all ranks.
+ */
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, TRUE)) < 0)
+ TEST_ERROR;
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't re-open file '%s'\n", H5_api_test_parallel_filename);
+ goto error;
+ }
+ if ((container_group = H5Gopen2(file_id, DATASET_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container group '%s'\n", DATASET_TEST_GROUP_NAME);
+ goto error;
+ }
+ if ((group_id = H5Gopen2(container_group, DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_GROUP_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open container sub-group '%s'\n",
+ DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_GROUP_NAME);
+ goto error;
+ }
+
+ /*
+ * Create memory dataspace for read buffer.
+ */
+ {
+ hsize_t mdims[] = {DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_READ_BUF_DIMSIZE,
+ DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_READ_BUF_DIMSIZE};
+
+ if ((mspace_id = H5Screate_simple(DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK,
+ mdims, NULL)) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to create memory dataspace\n");
+ goto error;
+ }
+ }
+
+ for (i = 0; i < DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; i++) {
+ count[i] = chunk_dims[i];
+ }
+
+ if (MAINPROCESS)
+ HDprintf("\n");
+ for (niter = 0; niter < DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_NITERS; niter++) {
+ if ((dset_id = H5Dopen2(group_id, DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n",
+ DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_NAME);
+ goto error;
+ }
+
+ BEGIN_INDEPENDENT_OP(dset_write)
+ {
+ if (MAINPROCESS) {
+ memset(write_buf, 0, data_size);
+
+ /*
+ * Ensure that each underlying chunk contains the values
+ *
+ * chunk_index .. (chunk_nelemts - 1) + chunk_index.
+ *
+ * That is to say, for a chunk size of 10 x 10, chunk 0
+ * contains the values
+ *
+ * 0 .. 99
+ *
+ * while the next chunk contains the values
+ *
+ * 1 .. 100
+ *
+ * and so on. On each iteration, we add 1 to the previous
+ * values.
+ */
+ for (i = 0; i < data_size / DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_DTYPESIZE;
+ i++) {
+ size_t j;
+ size_t base;
+ size_t tot_adjust;
+
+ /*
+ * Calculate a starting base value by taking the index value mod
+ * the size of a chunk in each dimension.
+ */
+ for (j = 0, base = i;
+ j < DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; j++)
+ if (chunk_dims[j] > 1 && base >= chunk_dims[j])
+ base %= chunk_dims[j];
+
+ /*
+ * Calculate the adjustment in each dimension.
+ */
+ for (j = 0, tot_adjust = 0;
+ j < DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; j++) {
+ if (j == (DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK - 1))
+ tot_adjust += (i % dims[j]) / chunk_dims[j];
+ else {
+ size_t k;
+ size_t n_faster_elemts;
+
+ /*
+ * Calculate the number of elements in faster dimensions.
+ */
+ for (k = j + 1, n_faster_elemts = 1;
+ k < DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; k++)
+ n_faster_elemts *= dims[k];
+
+ tot_adjust += (((i / n_faster_elemts) / chunk_dims[j]) *
+ (dims[j + 1] / chunk_dims[j + 1])) +
+ (((i / n_faster_elemts) % chunk_dims[j]) * chunk_dims[j + 1]);
+ }
+ }
+
+ ((int *)write_buf)[i] = (int)(base + tot_adjust + niter);
+ }
+
+ /*
+ * Write every chunk in the dataset.
+ */
+ if (H5Dwrite(dset_id, DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_DTYPE, H5S_ALL,
+ H5S_ALL, H5P_DEFAULT, write_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't write to dataset '%s'\n",
+ DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_NAME);
+ INDEPENDENT_OP_ERROR(dset_write);
+ }
+ }
+ }
+ END_INDEPENDENT_OP(dset_write);
+
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ if (MPI_SUCCESS != MPI_Barrier(MPI_COMM_WORLD)) {
+ H5_FAILED();
+ HDprintf(" MPI_Barrier failed\n");
+ goto error;
+ }
+
+ if ((dset_id = H5Dopen2(group_id, DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_NAME,
+ H5P_DEFAULT)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't open dataset '%s'\n",
+ DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_NAME);
+ goto error;
+ }
+
+ if ((fspace_id = H5Dget_space(dset_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't get dataset dataspace\n");
+ goto error;
+ }
+
+ /*
+ * Each rank reads their respective chunks in the dataset, checking the data for each one.
+ */
+ for (i = 0, n_chunks_per_rank = (data_size / (size_t)mpi_size) / chunk_size; i < n_chunks_per_rank;
+ i++) {
+ size_t j, k;
+
+ if (MAINPROCESS)
+ HDprintf("\r All ranks reading chunk %zu", i);
+
+ for (j = 0; j < DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK; j++) {
+ if (j == 0)
+ start[j] = (hsize_t)mpi_rank;
+ else if (j == (DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_SPACE_RANK - 1))
+ /* Fastest changing dimension */
+ start[j] = (i * chunk_dims[j]) % dims[j];
+ else
+ start[j] = ((i * chunk_dims[j + 1]) / dims[j + 1]) * (chunk_dims[j]);
+ }
+
+ /*
+ * Adjust file dataspace selection for next chunk.
+ */
+ if (H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, NULL, count, NULL) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to set hyperslab selection\n");
+ goto error;
+ }
+
+ for (j = 0; j < DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_READ_BUF_DIMSIZE; j++)
+ for (k = 0; k < DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_READ_BUF_DIMSIZE; k++)
+ read_buf[j][k] = 0;
+
+ if (H5Dread(dset_id, DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_DTYPE, mspace_id,
+ fspace_id, H5P_DEFAULT, read_buf) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't read from dataset '%s'\n",
+ DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_DSET_NAME);
+ goto error;
+ }
+
+ for (j = 0; j < DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_READ_BUF_DIMSIZE; j++) {
+ for (k = 0; k < DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_READ_BUF_DIMSIZE; k++) {
+ size_t val =
+ ((j * DATASET_MULTI_CHUNK_OVERWRITE_DIFF_SPACE_READ_TEST_READ_BUF_DIMSIZE) + k + i) +
+ ((hsize_t)mpi_rank * n_chunks_per_rank) + niter;
+
+ if (read_buf[j][k] != (int)val) {
+ H5_FAILED();
+ HDprintf(" data verification failed for chunk %lld\n", (long long)i);
+ goto error;
+ }
+ }
+ }
+ }
+
+ if (fspace_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Sclose(fspace_id);
+ }
+ H5E_END_TRY;
+ fspace_id = H5I_INVALID_HID;
+ }
+ if (dset_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Dclose(dset_id);
+ }
+ H5E_END_TRY;
+ dset_id = H5I_INVALID_HID;
+ }
+
+ if (MPI_SUCCESS != MPI_Barrier(MPI_COMM_WORLD)) {
+ H5_FAILED();
+ HDprintf(" MPI_Barrier failed\n");
+ goto error;
+ }
+ }
+
+ if (chunk_dims) {
+ HDfree(chunk_dims);
+ chunk_dims = NULL;
+ }
+
+ if (dims) {
+ HDfree(dims);
+ dims = NULL;
+ }
+
+ if (write_buf) {
+ HDfree(write_buf);
+ write_buf = NULL;
+ }
+
+ if (H5Sclose(mspace_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(group_id) < 0)
+ TEST_ERROR;
+ if (H5Gclose(container_group) < 0)
+ TEST_ERROR;
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ if (write_buf)
+ HDfree(write_buf);
+ if (chunk_dims)
+ HDfree(chunk_dims);
+ if (dims)
+ HDfree(dims);
+ H5Pclose(dcpl_id);
+ H5Sclose(mspace_id);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Gclose(group_id);
+ H5Gclose(container_group);
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+int
+H5_api_dataset_test_parallel(void)
+{
+ size_t i;
+ int nerrors;
+
+ if (MAINPROCESS) {
+ HDprintf("**********************************************\n");
+ HDprintf("* *\n");
+ HDprintf("* API Parallel Dataset Tests *\n");
+ HDprintf("* *\n");
+ HDprintf("**********************************************\n\n");
+ }
+
+ for (i = 0, nerrors = 0; i < ARRAY_LENGTH(par_dataset_tests); i++) {
+ nerrors += (*par_dataset_tests[i])() ? 1 : 0;
+
+ if (MPI_SUCCESS != MPI_Barrier(MPI_COMM_WORLD)) {
+ if (MAINPROCESS)
+ HDprintf(" MPI_Barrier() failed!\n");
+ }
+ }
+
+ if (MAINPROCESS)
+ HDprintf("\n");
+
+ return nerrors;
+}
diff --git a/testpar/API/H5_api_dataset_test_parallel.h b/testpar/API/H5_api_dataset_test_parallel.h
new file mode 100644
index 0000000..1e2cbd0
--- /dev/null
+++ b/testpar/API/H5_api_dataset_test_parallel.h
@@ -0,0 +1,20 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifndef H5_API_DATASET_TEST_PARALLEL_H_
+#define H5_API_DATASET_TEST_PARALLEL_H_
+
+#include "H5_api_test_parallel.h"
+
+int H5_api_dataset_test_parallel(void);
+
+#endif /* H5_API_DATASET_TEST_PARALLEL_H_ */
diff --git a/testpar/API/H5_api_datatype_test_parallel.c b/testpar/API/H5_api_datatype_test_parallel.c
new file mode 100644
index 0000000..7d090c0
--- /dev/null
+++ b/testpar/API/H5_api_datatype_test_parallel.c
@@ -0,0 +1,47 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include "H5_api_datatype_test_parallel.h"
+
+/*
+ * The array of parallel datatype tests to be performed.
+ */
+static int (*par_datatype_tests[])(void) = {NULL};
+
+int
+H5_api_datatype_test_parallel(void)
+{
+ size_t i;
+ int nerrors;
+
+ if (MAINPROCESS) {
+ HDprintf("**********************************************\n");
+ HDprintf("* *\n");
+ HDprintf("* API Parallel Datatype Tests *\n");
+ HDprintf("* *\n");
+ HDprintf("**********************************************\n\n");
+ }
+
+ for (i = 0, nerrors = 0; i < ARRAY_LENGTH(par_datatype_tests); i++) {
+ /* nerrors += (*par_datatype_tests[i])() ? 1 : 0; */
+
+ if (MPI_SUCCESS != MPI_Barrier(MPI_COMM_WORLD)) {
+ if (MAINPROCESS)
+ HDprintf(" MPI_Barrier() failed!\n");
+ }
+ }
+
+ if (MAINPROCESS)
+ HDprintf("\n");
+
+ return nerrors;
+}
diff --git a/testpar/API/H5_api_datatype_test_parallel.h b/testpar/API/H5_api_datatype_test_parallel.h
new file mode 100644
index 0000000..0a2ba50
--- /dev/null
+++ b/testpar/API/H5_api_datatype_test_parallel.h
@@ -0,0 +1,20 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifndef H5_API_DATATYPE_TEST_PARALLEL_H_
+#define H5_API_DATATYPE_TEST_PARALLEL_H_
+
+#include "H5_api_test_parallel.h"
+
+int H5_api_datatype_test_parallel(void);
+
+#endif /* H5_API_DATATYPE_TEST_PARALLEL_H_ */
diff --git a/testpar/API/H5_api_file_test_parallel.c b/testpar/API/H5_api_file_test_parallel.c
new file mode 100644
index 0000000..20fb2ba
--- /dev/null
+++ b/testpar/API/H5_api_file_test_parallel.c
@@ -0,0 +1,367 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include "H5_api_file_test_parallel.h"
+
+static int test_create_file(void);
+static int test_open_file(void);
+static int test_split_comm_file_access(void);
+
+/*
+ * The array of parallel file tests to be performed.
+ */
+static int (*par_file_tests[])(void) = {
+ test_create_file,
+ test_open_file,
+ test_split_comm_file_access,
+};
+
+/*
+ * A test to ensure that a file can be created in parallel.
+ */
+#define FILE_CREATE_TEST_FILENAME "test_file_parallel.h5"
+static int
+test_create_file(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+
+ TESTING("H5Fcreate");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file aren't supported with this connector\n");
+ return 0;
+ }
+
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ if ((file_id = H5Fcreate(FILE_CREATE_TEST_FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s'\n", FILE_CREATE_TEST_FILENAME);
+ goto error;
+ }
+
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+ if (H5Fclose(file_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * A test to ensure that a file can be opened in parallel.
+ */
+static int
+test_open_file(void)
+{
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+
+ TESTING_MULTIPART("H5Fopen");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file aren't supported with this connector\n");
+ return 0;
+ }
+
+ TESTING_2("test setup");
+
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, TRUE)) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ BEGIN_MULTIPART
+ {
+ PART_BEGIN(H5Fopen_rdonly)
+ {
+ TESTING_2("H5Fopen in read-only mode");
+
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDONLY, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" unable to open file '%s' in read-only mode\n", H5_api_test_parallel_filename);
+ PART_ERROR(H5Fopen_rdonly);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fopen_rdonly);
+
+ if (file_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+ file_id = H5I_INVALID_HID;
+ }
+
+ PART_BEGIN(H5Fopen_rdwrite)
+ {
+ TESTING_2("H5Fopen in read-write mode");
+
+ if ((file_id = H5Fopen(H5_api_test_parallel_filename, H5F_ACC_RDWR, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" unable to open file '%s' in read-write mode\n", H5_api_test_parallel_filename);
+ PART_ERROR(H5Fopen_rdwrite);
+ }
+
+ PASSED();
+ }
+ PART_END(H5Fopen_rdwrite);
+
+ if (file_id >= 0) {
+ H5E_BEGIN_TRY
+ {
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+ file_id = H5I_INVALID_HID;
+ }
+
+ /*
+ * XXX: SWMR open flags
+ */
+ }
+ END_MULTIPART;
+
+ TESTING_2("test cleanup");
+
+ if (H5Pclose(fapl_id) < 0)
+ TEST_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * Tests file access by a communicator other than MPI_COMM_WORLD.
+ *
+ * Splits MPI_COMM_WORLD into two groups, where one (even_comm) contains
+ * the original processes of even ranks. The other (odd_comm) contains
+ * the original processes of odd ranks. Processes in even_comm create a
+ * file, then close it, using even_comm. Processes in old_comm just do
+ * a barrier using odd_comm. Then they all do a barrier using MPI_COMM_WORLD.
+ * If the file creation and close does not do correct collective action
+ * according to the communicator argument, the processes will freeze up
+ * sooner or later due to MPI_Barrier calls being mixed up.
+ */
+#define SPLIT_FILE_COMM_TEST_FILE_NAME "split_comm_file.h5"
+static int
+test_split_comm_file_access(void)
+{
+ MPI_Comm comm;
+ MPI_Info info = MPI_INFO_NULL;
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ int is_old;
+ int newrank;
+ int err_occurred = 0;
+
+ TESTING("file access with a split communicator");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC)) {
+ SKIPPED();
+ HDprintf(" API functions for basic file aren't supported with this connector\n");
+ return 0;
+ }
+
+ /* set up MPI parameters */
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+ is_old = mpi_rank % 2;
+ if (MPI_SUCCESS != MPI_Comm_split(MPI_COMM_WORLD, is_old, mpi_rank, &comm)) {
+ H5_FAILED();
+ HDprintf(" failed to split communicator!\n");
+ goto error;
+ }
+ MPI_Comm_rank(comm, &newrank);
+
+ if (is_old) {
+ /* odd-rank processes */
+ if (MPI_SUCCESS != MPI_Barrier(comm)) {
+ err_occurred = 1;
+ goto access_end;
+ }
+ }
+ else {
+ /* even-rank processes */
+ int sub_mpi_rank; /* rank in the sub-comm */
+
+ MPI_Comm_rank(comm, &sub_mpi_rank);
+
+ /* setup file access template */
+ if ((fapl_id = create_mpi_fapl(comm, info, TRUE)) < 0) {
+ err_occurred = 1;
+ goto access_end;
+ }
+
+ /* create the file collectively */
+ if ((file_id = H5Fcreate(SPLIT_FILE_COMM_TEST_FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id)) < 0) {
+ H5_FAILED();
+ HDprintf(" couldn't create file '%s'\n", SPLIT_FILE_COMM_TEST_FILE_NAME);
+ err_occurred = 1;
+ goto access_end;
+ }
+
+ /* close the file */
+ if (H5Fclose(file_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to close file '%s'\n", SPLIT_FILE_COMM_TEST_FILE_NAME);
+ err_occurred = 1;
+ goto access_end;
+ }
+
+ /* delete the test file */
+ if (H5Fdelete(SPLIT_FILE_COMM_TEST_FILE_NAME, fapl_id) < 0) {
+ H5_FAILED();
+ HDprintf(" failed to delete file '%s'\n", SPLIT_FILE_COMM_TEST_FILE_NAME);
+ err_occurred = 1;
+ goto access_end;
+ }
+
+ /* Release file-access template */
+ if (H5Pclose(fapl_id) < 0) {
+ err_occurred = 1;
+ goto access_end;
+ }
+ }
+access_end:
+
+ /* Get the collective results about whether an error occurred */
+ if (MPI_SUCCESS != MPI_Allreduce(MPI_IN_PLACE, &err_occurred, 1, MPI_INT, MPI_LOR, MPI_COMM_WORLD)) {
+ H5_FAILED();
+ HDprintf(" MPI_Allreduce failed\n");
+ goto error;
+ }
+
+ if (err_occurred) {
+ H5_FAILED();
+ HDprintf(" an error occurred on only some ranks during split-communicator file access! - "
+ "collectively failing\n");
+ goto error;
+ }
+
+ if (MPI_SUCCESS != MPI_Comm_free(&comm)) {
+ H5_FAILED();
+ HDprintf(" MPI_Comm_free failed\n");
+ goto error;
+ }
+
+ if (MPI_SUCCESS != MPI_Barrier(MPI_COMM_WORLD)) {
+ H5_FAILED();
+ HDprintf(" MPI_Barrier on MPI_COMM_WORLD failed\n");
+ goto error;
+ }
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(fapl_id);
+ H5Fclose(file_id);
+ }
+ H5E_END_TRY;
+
+ return 1;
+}
+
+/*
+ * Cleanup temporary test files
+ */
+static void
+cleanup_files(void)
+{
+ hid_t fapl_id = H5I_INVALID_HID;
+
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, TRUE)) < 0) {
+ if (MAINPROCESS)
+ HDprintf(" failed to create FAPL for deleting test files\n");
+ return;
+ }
+
+ H5Fdelete(FILE_CREATE_TEST_FILENAME, fapl_id);
+
+ /* The below file is deleted as part of the test */
+ /* H5Fdelete(SPLIT_FILE_COMM_TEST_FILE_NAME, H5P_DEFAULT); */
+
+ if (H5Pclose(fapl_id) < 0) {
+ if (MAINPROCESS)
+ HDprintf(" failed to close FAPL used for deleting test files\n");
+ return;
+ }
+}
+
+int
+H5_api_file_test_parallel(void)
+{
+ size_t i;
+ int nerrors;
+
+ if (MAINPROCESS) {
+ HDprintf("**********************************************\n");
+ HDprintf("* *\n");
+ HDprintf("* API Parallel File Tests *\n");
+ HDprintf("* *\n");
+ HDprintf("**********************************************\n\n");
+ }
+
+ for (i = 0, nerrors = 0; i < ARRAY_LENGTH(par_file_tests); i++) {
+ nerrors += (*par_file_tests[i])() ? 1 : 0;
+
+ if (MPI_SUCCESS != MPI_Barrier(MPI_COMM_WORLD)) {
+ if (MAINPROCESS)
+ HDprintf(" MPI_Barrier() failed!\n");
+ }
+ }
+
+ if (MAINPROCESS) {
+ HDprintf("\n");
+ HDprintf("Cleaning up testing files\n");
+ }
+
+ cleanup_files();
+
+ return nerrors;
+}
diff --git a/testpar/API/H5_api_file_test_parallel.h b/testpar/API/H5_api_file_test_parallel.h
new file mode 100644
index 0000000..aac9800
--- /dev/null
+++ b/testpar/API/H5_api_file_test_parallel.h
@@ -0,0 +1,20 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifndef H5_API_FILE_TEST_PARALLEL_H_
+#define H5_API_FILE_TEST_PARALLEL_H_
+
+#include "H5_api_test_parallel.h"
+
+int H5_api_file_test_parallel(void);
+
+#endif /* H5_API_FILE_TEST_PARALLEL_H_ */
diff --git a/testpar/API/H5_api_group_test_parallel.c b/testpar/API/H5_api_group_test_parallel.c
new file mode 100644
index 0000000..d6d8f18
--- /dev/null
+++ b/testpar/API/H5_api_group_test_parallel.c
@@ -0,0 +1,47 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include "H5_api_group_test_parallel.h"
+
+/*
+ * The array of parallel group tests to be performed.
+ */
+static int (*par_group_tests[])(void) = {NULL};
+
+int
+H5_api_group_test_parallel(void)
+{
+ size_t i;
+ int nerrors;
+
+ if (MAINPROCESS) {
+ HDprintf("**********************************************\n");
+ HDprintf("* *\n");
+ HDprintf("* API Parallel Group Tests *\n");
+ HDprintf("* *\n");
+ HDprintf("**********************************************\n\n");
+ }
+
+ for (i = 0, nerrors = 0; i < ARRAY_LENGTH(par_group_tests); i++) {
+ /* nerrors += (*par_group_tests[i])() ? 1 : 0; */
+
+ if (MPI_SUCCESS != MPI_Barrier(MPI_COMM_WORLD)) {
+ if (MAINPROCESS)
+ HDprintf(" MPI_Barrier() failed!\n");
+ }
+ }
+
+ if (MAINPROCESS)
+ HDprintf("\n");
+
+ return nerrors;
+}
diff --git a/testpar/API/H5_api_group_test_parallel.h b/testpar/API/H5_api_group_test_parallel.h
new file mode 100644
index 0000000..87dd24f
--- /dev/null
+++ b/testpar/API/H5_api_group_test_parallel.h
@@ -0,0 +1,20 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifndef H5_API_GROUP_TEST_PARALLEL_H_
+#define H5_API_GROUP_TEST_PARALLEL_H_
+
+#include "H5_api_test_parallel.h"
+
+int H5_api_group_test_parallel(void);
+
+#endif /* H5_API_GROUP_TEST_PARALLEL_H_ */
diff --git a/testpar/API/H5_api_link_test_parallel.c b/testpar/API/H5_api_link_test_parallel.c
new file mode 100644
index 0000000..fb865a0
--- /dev/null
+++ b/testpar/API/H5_api_link_test_parallel.c
@@ -0,0 +1,47 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include "H5_api_link_test_parallel.h"
+
+/*
+ * The array of parallel link tests to be performed.
+ */
+static int (*par_link_tests[])(void) = {NULL};
+
+int
+H5_api_link_test_parallel(void)
+{
+ size_t i;
+ int nerrors;
+
+ if (MAINPROCESS) {
+ HDprintf("**********************************************\n");
+ HDprintf("* *\n");
+ HDprintf("* API Parallel Link Tests *\n");
+ HDprintf("* *\n");
+ HDprintf("**********************************************\n\n");
+ }
+
+ for (i = 0, nerrors = 0; i < ARRAY_LENGTH(par_link_tests); i++) {
+ /* nerrors += (*par_link_tests[i])() ? 1 : 0; */
+
+ if (MPI_SUCCESS != MPI_Barrier(MPI_COMM_WORLD)) {
+ if (MAINPROCESS)
+ HDprintf(" MPI_Barrier() failed!\n");
+ }
+ }
+
+ if (MAINPROCESS)
+ HDprintf("\n");
+
+ return nerrors;
+}
diff --git a/testpar/API/H5_api_link_test_parallel.h b/testpar/API/H5_api_link_test_parallel.h
new file mode 100644
index 0000000..dbf0fc7
--- /dev/null
+++ b/testpar/API/H5_api_link_test_parallel.h
@@ -0,0 +1,20 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifndef H5_API_LINK_TEST_PARALLEL_H_
+#define H5_API_LINK_TEST_PARALLEL_H_
+
+#include "H5_api_test_parallel.h"
+
+int H5_api_link_test_parallel(void);
+
+#endif /* H5_API_LINK_TEST_PARALLEL_H_ */
diff --git a/testpar/API/H5_api_misc_test_parallel.c b/testpar/API/H5_api_misc_test_parallel.c
new file mode 100644
index 0000000..0dc85eb
--- /dev/null
+++ b/testpar/API/H5_api_misc_test_parallel.c
@@ -0,0 +1,47 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include "H5_api_misc_test_parallel.h"
+
+/*
+ * The array of parallel miscellaneous tests to be performed.
+ */
+static int (*par_misc_tests[])(void) = {NULL};
+
+int
+H5_api_misc_test_parallel(void)
+{
+ size_t i;
+ int nerrors;
+
+ if (MAINPROCESS) {
+ HDprintf("**********************************************\n");
+ HDprintf("* *\n");
+ HDprintf("* API Parallel Miscellaneous Tests *\n");
+ HDprintf("* *\n");
+ HDprintf("**********************************************\n\n");
+ }
+
+ for (i = 0, nerrors = 0; i < ARRAY_LENGTH(par_misc_tests); i++) {
+ /* nerrors += (*par_misc_tests[i])() ? 1 : 0; */
+
+ if (MPI_SUCCESS != MPI_Barrier(MPI_COMM_WORLD)) {
+ if (MAINPROCESS)
+ HDprintf(" MPI_Barrier() failed!\n");
+ }
+ }
+
+ if (MAINPROCESS)
+ HDprintf("\n");
+
+ return nerrors;
+}
diff --git a/testpar/API/H5_api_misc_test_parallel.h b/testpar/API/H5_api_misc_test_parallel.h
new file mode 100644
index 0000000..84553a9
--- /dev/null
+++ b/testpar/API/H5_api_misc_test_parallel.h
@@ -0,0 +1,20 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifndef H5_API_MISC_TEST_PARALLEL_H_
+#define H5_API_MISC_TEST_PARALLEL_H_
+
+#include "H5_api_test_parallel.h"
+
+int H5_api_misc_test_parallel(void);
+
+#endif /* H5_API_MISC_TEST_PARALLEL_H_ */
diff --git a/testpar/API/H5_api_object_test_parallel.c b/testpar/API/H5_api_object_test_parallel.c
new file mode 100644
index 0000000..a264eb2
--- /dev/null
+++ b/testpar/API/H5_api_object_test_parallel.c
@@ -0,0 +1,47 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include "H5_api_object_test_parallel.h"
+
+/*
+ * The array of parallel object tests to be performed.
+ */
+static int (*par_object_tests[])(void) = {NULL};
+
+int
+H5_api_object_test_parallel(void)
+{
+ size_t i;
+ int nerrors;
+
+ if (MAINPROCESS) {
+ HDprintf("**********************************************\n");
+ HDprintf("* *\n");
+ HDprintf("* API Parallel Object Tests *\n");
+ HDprintf("* *\n");
+ HDprintf("**********************************************\n\n");
+ }
+
+ for (i = 0, nerrors = 0; i < ARRAY_LENGTH(par_object_tests); i++) {
+ /* nerrors += (*par_object_tests[i])() ? 1 : 0; */
+
+ if (MPI_SUCCESS != MPI_Barrier(MPI_COMM_WORLD)) {
+ if (MAINPROCESS)
+ HDprintf(" MPI_Barrier() failed!\n");
+ }
+ }
+
+ if (MAINPROCESS)
+ HDprintf("\n");
+
+ return nerrors;
+}
diff --git a/testpar/API/H5_api_object_test_parallel.h b/testpar/API/H5_api_object_test_parallel.h
new file mode 100644
index 0000000..6a8569f
--- /dev/null
+++ b/testpar/API/H5_api_object_test_parallel.h
@@ -0,0 +1,20 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifndef H5_API_OBJECT_TEST_PARALLEL_H_
+#define H5_API_OBJECT_TEST_PARALLEL_H_
+
+#include "H5_api_test_parallel.h"
+
+int H5_api_object_test_parallel(void);
+
+#endif /* H5_API_OBJECT_TEST_PARALLEL_H_ */
diff --git a/testpar/API/H5_api_test_parallel.c b/testpar/API/H5_api_test_parallel.c
new file mode 100644
index 0000000..45fa4ec
--- /dev/null
+++ b/testpar/API/H5_api_test_parallel.c
@@ -0,0 +1,338 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include "H5_api_test_util.h"
+#include "H5_api_test_parallel.h"
+
+#include "H5_api_attribute_test_parallel.h"
+#include "H5_api_dataset_test_parallel.h"
+#include "H5_api_datatype_test_parallel.h"
+#include "H5_api_file_test_parallel.h"
+#include "H5_api_group_test_parallel.h"
+#include "H5_api_link_test_parallel.h"
+#include "H5_api_misc_test_parallel.h"
+#include "H5_api_object_test_parallel.h"
+#ifdef H5_API_TEST_HAVE_ASYNC
+#include "H5_api_async_test_parallel.h"
+#endif
+
+char H5_api_test_parallel_filename[H5_API_TEST_FILENAME_MAX_LENGTH];
+
+const char *test_path_prefix;
+
+size_t n_tests_run_g;
+size_t n_tests_passed_g;
+size_t n_tests_failed_g;
+size_t n_tests_skipped_g;
+
+int mpi_size;
+int mpi_rank;
+
+/* X-macro to define the following for each test:
+ * - enum type
+ * - name
+ * - test function
+ * - enabled by default
+ */
+#ifdef H5_API_TEST_HAVE_ASYNC
+#define H5_API_PARALLEL_TESTS \
+ X(H5_API_TEST_NULL, "", NULL, 0) \
+ X(H5_API_TEST_FILE, "file", H5_api_file_test_parallel, 1) \
+ X(H5_API_TEST_GROUP, "group", H5_api_group_test_parallel, 1) \
+ X(H5_API_TEST_DATASET, "dataset", H5_api_dataset_test_parallel, 1) \
+ X(H5_API_TEST_DATATYPE, "datatype", H5_api_datatype_test_parallel, 1) \
+ X(H5_API_TEST_ATTRIBUTE, "attribute", H5_api_attribute_test_parallel, 1) \
+ X(H5_API_TEST_LINK, "link", H5_api_link_test_parallel, 1) \
+ X(H5_API_TEST_OBJECT, "object", H5_api_object_test_parallel, 1) \
+ X(H5_API_TEST_MISC, "misc", H5_api_misc_test_parallel, 1) \
+ X(H5_API_TEST_ASYNC, "async", H5_api_async_test_parallel, 1) \
+ X(H5_API_TEST_MAX, "", NULL, 0)
+#else
+#define H5_API_PARALLEL_TESTS \
+ X(H5_API_TEST_NULL, "", NULL, 0) \
+ X(H5_API_TEST_FILE, "file", H5_api_file_test_parallel, 1) \
+ X(H5_API_TEST_GROUP, "group", H5_api_group_test_parallel, 1) \
+ X(H5_API_TEST_DATASET, "dataset", H5_api_dataset_test_parallel, 1) \
+ X(H5_API_TEST_DATATYPE, "datatype", H5_api_datatype_test_parallel, 1) \
+ X(H5_API_TEST_ATTRIBUTE, "attribute", H5_api_attribute_test_parallel, 1) \
+ X(H5_API_TEST_LINK, "link", H5_api_link_test_parallel, 1) \
+ X(H5_API_TEST_OBJECT, "object", H5_api_object_test_parallel, 1) \
+ X(H5_API_TEST_MISC, "misc", H5_api_misc_test_parallel, 1) \
+ X(H5_API_TEST_MAX, "", NULL, 0)
+#endif
+
+#define X(a, b, c, d) a,
+enum H5_api_test_type { H5_API_PARALLEL_TESTS };
+#undef X
+#define X(a, b, c, d) b,
+static const char *const H5_api_test_name[] = {H5_API_PARALLEL_TESTS};
+#undef X
+#define X(a, b, c, d) c,
+static int (*H5_api_test_func[])(void) = {H5_API_PARALLEL_TESTS};
+#undef X
+#define X(a, b, c, d) d,
+static int H5_api_test_enabled[] = {H5_API_PARALLEL_TESTS};
+#undef X
+
+static enum H5_api_test_type
+H5_api_test_name_to_type(const char *test_name)
+{
+ enum H5_api_test_type i = 0;
+
+ while (strcmp(H5_api_test_name[i], test_name) && i != H5_API_TEST_MAX)
+ i++;
+
+ return ((i == H5_API_TEST_MAX) ? H5_API_TEST_NULL : i);
+}
+
+static void
+H5_api_test_run(void)
+{
+ enum H5_api_test_type i;
+
+ for (i = H5_API_TEST_FILE; i < H5_API_TEST_MAX; i++)
+ if (H5_api_test_enabled[i])
+ (void)H5_api_test_func[i]();
+}
+
+hid_t
+create_mpi_fapl(MPI_Comm comm, MPI_Info info, hbool_t coll_md_read)
+{
+ hid_t ret_pl = H5I_INVALID_HID;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ if ((ret_pl = H5Pcreate(H5P_FILE_ACCESS)) < 0)
+ goto error;
+
+ if (H5Pset_fapl_mpio(ret_pl, comm, info) < 0)
+ goto error;
+ if (H5Pset_all_coll_metadata_ops(ret_pl, coll_md_read) < 0)
+ goto error;
+ if (H5Pset_coll_metadata_write(ret_pl, TRUE) < 0)
+ goto error;
+
+ return ret_pl;
+
+error:
+ return H5I_INVALID_HID;
+} /* end create_mpi_fapl() */
+
+/*
+ * Generates random dimensions for a dataspace. The first dimension
+ * is always `mpi_size` to allow for convenient subsetting; the rest
+ * of the dimensions are randomized.
+ */
+int
+generate_random_parallel_dimensions(int space_rank, hsize_t **dims_out)
+{
+ hsize_t *dims = NULL;
+ size_t i;
+
+ if (space_rank <= 0)
+ goto error;
+
+ if (NULL == (dims = HDmalloc((size_t)space_rank * sizeof(hsize_t))))
+ goto error;
+ if (MAINPROCESS) {
+ for (i = 0; i < (size_t)space_rank; i++) {
+ if (i == 0)
+ dims[i] = (hsize_t)mpi_size;
+ else
+ dims[i] = (hsize_t)((rand() % MAX_DIM_SIZE) + 1);
+ }
+ }
+
+ /*
+ * Ensure that the dataset dimensions are uniform across ranks.
+ */
+ if (MPI_SUCCESS != MPI_Bcast(dims, space_rank, MPI_UNSIGNED_LONG_LONG, 0, MPI_COMM_WORLD))
+ goto error;
+
+ *dims_out = dims;
+
+ return 0;
+
+error:
+ if (dims)
+ HDfree(dims);
+
+ return -1;
+}
+
+int
+main(int argc, char **argv)
+{
+ const char *vol_connector_name;
+ unsigned seed;
+ hid_t fapl_id = H5I_INVALID_HID;
+
+ MPI_Init(&argc, &argv);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+
+ /* Simple argument checking, TODO can improve that later */
+ if (argc > 1) {
+ enum H5_api_test_type i = H5_api_test_name_to_type(argv[1]);
+ if (i != H5_API_TEST_NULL) {
+ /* Run only specific API test */
+ memset(H5_api_test_enabled, 0, sizeof(H5_api_test_enabled));
+ H5_api_test_enabled[i] = 1;
+ }
+ }
+
+ /*
+ * Make sure that HDF5 is initialized on all MPI ranks before proceeding.
+ * This is important for certain VOL connectors which may require a
+ * collective initialization.
+ */
+ H5open();
+
+ n_tests_run_g = 0;
+ n_tests_passed_g = 0;
+ n_tests_failed_g = 0;
+ n_tests_skipped_g = 0;
+
+ if (MAINPROCESS) {
+ seed = (unsigned)HDtime(NULL);
+ }
+
+ if (mpi_size > 1) {
+ if (MPI_SUCCESS != MPI_Bcast(&seed, 1, MPI_UNSIGNED, 0, MPI_COMM_WORLD)) {
+ if (MAINPROCESS)
+ HDprintf("Couldn't broadcast test seed\n");
+ goto error;
+ }
+ }
+
+ srand(seed);
+
+ if (NULL == (test_path_prefix = HDgetenv(HDF5_API_TEST_PATH_PREFIX)))
+ test_path_prefix = "";
+
+ HDsnprintf(H5_api_test_parallel_filename, H5_API_TEST_FILENAME_MAX_LENGTH, "%s%s", test_path_prefix,
+ PARALLEL_TEST_FILE_NAME);
+
+ if (NULL == (vol_connector_name = HDgetenv(HDF5_VOL_CONNECTOR))) {
+ if (MAINPROCESS)
+ HDprintf("No VOL connector selected; using native VOL connector\n");
+ vol_connector_name = "native";
+ }
+
+ if (MAINPROCESS) {
+ HDprintf("Running parallel API tests with VOL connector '%s'\n\n", vol_connector_name);
+ HDprintf("Test parameters:\n");
+ HDprintf(" - Test file name: '%s'\n", H5_api_test_parallel_filename);
+ HDprintf(" - Number of MPI ranks: %d\n", mpi_size);
+ HDprintf(" - Test seed: %u\n", seed);
+ HDprintf("\n\n");
+ }
+
+ /* Retrieve the VOL cap flags - work around an HDF5
+ * library issue by creating a FAPL
+ */
+ BEGIN_INDEPENDENT_OP(get_capability_flags)
+ {
+ if ((fapl_id = create_mpi_fapl(MPI_COMM_WORLD, MPI_INFO_NULL, FALSE)) < 0) {
+ if (MAINPROCESS)
+ HDfprintf(stderr, "Unable to create FAPL\n");
+ INDEPENDENT_OP_ERROR(get_capability_flags);
+ }
+
+ vol_cap_flags_g = H5VL_CAP_FLAG_NONE;
+ if (H5Pget_vol_cap_flags(fapl_id, &vol_cap_flags_g) < 0) {
+ if (MAINPROCESS)
+ HDfprintf(stderr, "Unable to retrieve VOL connector capability flags\n");
+ INDEPENDENT_OP_ERROR(get_capability_flags);
+ }
+ }
+ END_INDEPENDENT_OP(get_capability_flags);
+
+ /*
+ * Create the file that will be used for all of the tests,
+ * except for those which test file creation.
+ */
+ BEGIN_INDEPENDENT_OP(create_test_container)
+ {
+ if (MAINPROCESS) {
+ if (create_test_container(H5_api_test_parallel_filename, vol_cap_flags_g) < 0) {
+ HDprintf(" failed to create testing container file '%s'\n", H5_api_test_parallel_filename);
+ INDEPENDENT_OP_ERROR(create_test_container);
+ }
+ }
+ }
+ END_INDEPENDENT_OP(create_test_container);
+
+ /* Run all the tests that are enabled */
+ H5_api_test_run();
+
+ if (MAINPROCESS)
+ HDprintf("Cleaning up testing files\n");
+ H5Fdelete(H5_api_test_parallel_filename, fapl_id);
+
+ if (n_tests_run_g > 0) {
+ if (MAINPROCESS)
+ HDprintf("The below statistics are minimum values due to the possibility of some ranks failing a "
+ "test while others pass:\n");
+
+ if (MPI_SUCCESS != MPI_Allreduce(MPI_IN_PLACE, &n_tests_passed_g, 1, MPI_UNSIGNED_LONG_LONG, MPI_MIN,
+ MPI_COMM_WORLD)) {
+ if (MAINPROCESS)
+ HDprintf(" failed to collect consensus about the minimum number of tests that passed -- "
+ "reporting rank 0's (possibly inaccurate) value\n");
+ }
+
+ if (MAINPROCESS)
+ HDprintf("%s%zu/%zu (%.2f%%) API tests passed across all ranks with VOL connector '%s'\n",
+ n_tests_passed_g > 0 ? "At least " : "", n_tests_passed_g, n_tests_run_g,
+ ((double)n_tests_passed_g / (double)n_tests_run_g * 100.0), vol_connector_name);
+
+ if (MPI_SUCCESS != MPI_Allreduce(MPI_IN_PLACE, &n_tests_failed_g, 1, MPI_UNSIGNED_LONG_LONG, MPI_MIN,
+ MPI_COMM_WORLD)) {
+ if (MAINPROCESS)
+ HDprintf(" failed to collect consensus about the minimum number of tests that failed -- "
+ "reporting rank 0's (possibly inaccurate) value\n");
+ }
+
+ if (MAINPROCESS) {
+ HDprintf("%s%zu/%zu (%.2f%%) API tests did not pass across all ranks with VOL connector '%s'\n",
+ n_tests_failed_g > 0 ? "At least " : "", n_tests_failed_g, n_tests_run_g,
+ ((double)n_tests_failed_g / (double)n_tests_run_g * 100.0), vol_connector_name);
+
+ HDprintf("%zu/%zu (%.2f%%) API tests were skipped with VOL connector '%s'\n", n_tests_skipped_g,
+ n_tests_run_g, ((double)n_tests_skipped_g / (double)n_tests_run_g * 100.0),
+ vol_connector_name);
+ }
+ }
+
+ if (fapl_id >= 0 && H5Pclose(fapl_id) < 0) {
+ if (MAINPROCESS)
+ HDprintf(" failed to close MPI FAPL\n");
+ }
+
+ H5close();
+
+ MPI_Finalize();
+
+ HDexit(EXIT_SUCCESS);
+
+error:
+ H5E_BEGIN_TRY
+ {
+ H5Pclose(fapl_id);
+ }
+ H5E_END_TRY;
+
+ MPI_Finalize();
+
+ HDexit(EXIT_FAILURE);
+}
diff --git a/testpar/API/H5_api_test_parallel.h b/testpar/API/H5_api_test_parallel.h
new file mode 100644
index 0000000..6df83e8
--- /dev/null
+++ b/testpar/API/H5_api_test_parallel.h
@@ -0,0 +1,188 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifndef H5_API_TEST_PARALLEL_H
+#define H5_API_TEST_PARALLEL_H
+
+#include <mpi.h>
+
+#include "testpar.h"
+
+#include "H5_api_test.h"
+
+/* Define H5VL_VERSION if not already defined */
+#ifndef H5VL_VERSION
+#define H5VL_VERSION 0
+#endif
+
+/* Define macro to wait forever depending on version */
+#if H5VL_VERSION >= 2
+#define H5_API_TEST_WAIT_FOREVER H5ES_WAIT_FOREVER
+#else
+#define H5_API_TEST_WAIT_FOREVER UINT64_MAX
+#endif
+
+#define PARALLEL_TEST_FILE_NAME "H5_api_test_parallel.h5"
+extern char H5_api_test_parallel_filename[];
+
+#undef TESTING
+#undef TESTING_2
+#undef PASSED
+#undef H5_FAILED
+#undef H5_WARNING
+#undef SKIPPED
+#undef PUTS_ERROR
+#undef TEST_ERROR
+#undef STACK_ERROR
+#undef FAIL_STACK_ERROR
+#undef FAIL_PUTS_ERROR
+#undef TESTING_MULTIPART
+
+#define TESTING(WHAT) \
+ { \
+ if (MAINPROCESS) { \
+ printf("Testing %-62s", WHAT); \
+ fflush(stdout); \
+ } \
+ n_tests_run_g++; \
+ }
+#define TESTING_2(WHAT) \
+ { \
+ if (MAINPROCESS) { \
+ printf(" Testing %-60s", WHAT); \
+ fflush(stdout); \
+ } \
+ n_tests_run_g++; \
+ }
+#define PASSED() \
+ { \
+ if (MAINPROCESS) { \
+ puts(" PASSED"); \
+ fflush(stdout); \
+ } \
+ n_tests_passed_g++; \
+ }
+#define H5_FAILED() \
+ { \
+ if (MAINPROCESS) { \
+ puts("*FAILED*"); \
+ fflush(stdout); \
+ } \
+ n_tests_failed_g++; \
+ }
+#define H5_WARNING() \
+ { \
+ if (MAINPROCESS) { \
+ puts("*WARNING*"); \
+ fflush(stdout); \
+ } \
+ }
+#define SKIPPED() \
+ { \
+ if (MAINPROCESS) { \
+ puts(" -SKIP-"); \
+ fflush(stdout); \
+ } \
+ n_tests_skipped_g++; \
+ }
+#define PUTS_ERROR(s) \
+ { \
+ if (MAINPROCESS) { \
+ puts(s); \
+ AT(); \
+ } \
+ goto error; \
+ }
+#define TEST_ERROR \
+ { \
+ H5_FAILED(); \
+ if (MAINPROCESS) { \
+ AT(); \
+ } \
+ goto error; \
+ }
+#define STACK_ERROR \
+ { \
+ if (MAINPROCESS) { \
+ H5Eprint2(H5E_DEFAULT, stdout); \
+ } \
+ goto error; \
+ }
+#define FAIL_STACK_ERROR \
+ { \
+ H5_FAILED(); \
+ if (MAINPROCESS) { \
+ AT(); \
+ H5Eprint2(H5E_DEFAULT, stdout); \
+ } \
+ goto error; \
+ }
+#define FAIL_PUTS_ERROR(s) \
+ { \
+ H5_FAILED(); \
+ if (MAINPROCESS) { \
+ AT(); \
+ puts(s); \
+ } \
+ goto error; \
+ }
+#define TESTING_MULTIPART(WHAT) \
+ { \
+ if (MAINPROCESS) { \
+ printf("Testing %-62s", WHAT); \
+ HDputs(""); \
+ fflush(stdout); \
+ } \
+ }
+
+/*
+ * Macros to surround an action that will be performed non-collectively. Once the
+ * operation has completed, a consensus will be formed by all ranks on whether the
+ * operation failed.
+ */
+#define BEGIN_INDEPENDENT_OP(op_name) \
+ { \
+ hbool_t ind_op_failed = FALSE; \
+ \
+ {
+
+#define END_INDEPENDENT_OP(op_name) \
+ } \
+ \
+ op_##op_name##_end : if (MPI_SUCCESS != MPI_Allreduce(MPI_IN_PLACE, &ind_op_failed, 1, MPI_C_BOOL, \
+ MPI_LOR, MPI_COMM_WORLD)) \
+ { \
+ if (MAINPROCESS) \
+ HDprintf( \
+ " failed to collect consensus about whether non-collective operation was successful\n"); \
+ goto error; \
+ } \
+ \
+ if (ind_op_failed) { \
+ if (MAINPROCESS) \
+ HDprintf(" failure detected during non-collective operation - all other ranks will now fail " \
+ "too\n"); \
+ goto error; \
+ } \
+ }
+
+#define INDEPENDENT_OP_ERROR(op_name) \
+ ind_op_failed = TRUE; \
+ goto op_##op_name##_end;
+
+hid_t create_mpi_fapl(MPI_Comm comm, MPI_Info info, hbool_t coll_md_read);
+int generate_random_parallel_dimensions(int space_rank, hsize_t **dims_out);
+
+extern int mpi_size;
+extern int mpi_rank;
+
+#endif
diff --git a/testpar/API/t_bigio.c b/testpar/API/t_bigio.c
new file mode 100644
index 0000000..3e18c8f
--- /dev/null
+++ b/testpar/API/t_bigio.c
@@ -0,0 +1,1942 @@
+
+#include "hdf5.h"
+#include "testphdf5.h"
+
+#if 0
+#include "H5Dprivate.h" /* For Chunk tests */
+#endif
+
+/* FILENAME and filenames must have the same number of names */
+const char *FILENAME[3] = {"bigio_test.h5", "single_rank_independent_io.h5", NULL};
+
+/* Constants definitions */
+#define MAX_ERR_REPORT 10 /* Maximum number of errors reported */
+
+/* Define some handy debugging shorthands, routines, ... */
+/* debugging tools */
+
+#define MAIN_PROCESS (mpi_rank_g == 0) /* define process 0 as main process */
+
+/* Constants definitions */
+#define RANK 2
+
+#define IN_ORDER 1
+#define OUT_OF_ORDER 2
+
+#define DATASET1 "DSET1"
+#define DATASET2 "DSET2"
+#define DATASET3 "DSET3"
+#define DATASET4 "DSET4"
+#define DXFER_COLLECTIVE_IO 0x1 /* Collective IO*/
+#define DXFER_INDEPENDENT_IO 0x2 /* Independent IO collectively */
+#define DXFER_BIGCOUNT (1 << 29)
+
+#define HYPER 1
+#define POINT 2
+#define ALL 3
+
+/* Dataset data type. Int's can be easily octo dumped. */
+typedef hsize_t B_DATATYPE;
+
+int facc_type = FACC_MPIO; /*Test file access type */
+int dxfer_coll_type = DXFER_COLLECTIVE_IO;
+size_t bigcount = (size_t) /* DXFER_BIGCOUNT */ 1310720;
+int nerrors = 0;
+static int mpi_size_g, mpi_rank_g;
+
+hsize_t space_dim1 = SPACE_DIM1 * 256; // 4096
+hsize_t space_dim2 = SPACE_DIM2;
+
+static void coll_chunktest(const char *filename, int chunk_factor, int select_factor, int api_option,
+ int file_selection, int mem_selection, int mode);
+
+/*
+ * Setup the coordinates for point selection.
+ */
+static void
+set_coords(hsize_t start[], hsize_t count[], hsize_t stride[], hsize_t block[], size_t num_points,
+ hsize_t coords[], int order)
+{
+ hsize_t i, j, k = 0, m, n, s1, s2;
+
+ if (OUT_OF_ORDER == order)
+ k = (num_points * RANK) - 1;
+ else if (IN_ORDER == order)
+ k = 0;
+
+ s1 = start[0];
+ s2 = start[1];
+
+ for (i = 0; i < count[0]; i++)
+ for (j = 0; j < count[1]; j++)
+ for (m = 0; m < block[0]; m++)
+ for (n = 0; n < block[1]; n++)
+ if (OUT_OF_ORDER == order) {
+ coords[k--] = s2 + (stride[1] * j) + n;
+ coords[k--] = s1 + (stride[0] * i) + m;
+ }
+ else if (IN_ORDER == order) {
+ coords[k++] = s1 + stride[0] * i + m;
+ coords[k++] = s2 + stride[1] * j + n;
+ }
+}
+
+/*
+ * Fill the dataset with trivial data for testing.
+ * Assume dimension rank is 2 and data is stored contiguous.
+ */
+static void
+fill_datasets(hsize_t start[], hsize_t block[], B_DATATYPE *dataset)
+{
+ B_DATATYPE *dataptr = dataset;
+ hsize_t i, j;
+
+ /* put some trivial data in the data_array */
+ for (i = 0; i < block[0]; i++) {
+ for (j = 0; j < block[1]; j++) {
+ *dataptr = (B_DATATYPE)((i + start[0]) * 100 + (j + start[1] + 1));
+ dataptr++;
+ }
+ }
+}
+
+/*
+ * Setup the coordinates for point selection.
+ */
+void
+point_set(hsize_t start[], hsize_t count[], hsize_t stride[], hsize_t block[], size_t num_points,
+ hsize_t coords[], int order)
+{
+ hsize_t i, j, k = 0, m, n, s1, s2;
+
+ HDcompile_assert(RANK == 2);
+
+ if (OUT_OF_ORDER == order)
+ k = (num_points * RANK) - 1;
+ else if (IN_ORDER == order)
+ k = 0;
+
+ s1 = start[0];
+ s2 = start[1];
+
+ for (i = 0; i < count[0]; i++)
+ for (j = 0; j < count[1]; j++)
+ for (m = 0; m < block[0]; m++)
+ for (n = 0; n < block[1]; n++)
+ if (OUT_OF_ORDER == order) {
+ coords[k--] = s2 + (stride[1] * j) + n;
+ coords[k--] = s1 + (stride[0] * i) + m;
+ }
+ else if (IN_ORDER == order) {
+ coords[k++] = s1 + stride[0] * i + m;
+ coords[k++] = s2 + stride[1] * j + n;
+ }
+
+ if (VERBOSE_MED) {
+ HDprintf("start[]=(%" PRIuHSIZE ", %" PRIuHSIZE "), "
+ "count[]=(%" PRIuHSIZE ", %" PRIuHSIZE "), "
+ "stride[]=(%" PRIuHSIZE ", %" PRIuHSIZE "), "
+ "block[]=(%" PRIuHSIZE ", %" PRIuHSIZE "), "
+ "total datapoints=%" PRIuHSIZE "\n",
+ start[0], start[1], count[0], count[1], stride[0], stride[1], block[0], block[1],
+ block[0] * block[1] * count[0] * count[1]);
+ k = 0;
+ for (i = 0; i < num_points; i++) {
+ HDprintf("(%d, %d)\n", (int)coords[k], (int)coords[k + 1]);
+ k += 2;
+ }
+ }
+}
+
+/*
+ * Print the content of the dataset.
+ */
+static void
+dataset_print(hsize_t start[], hsize_t block[], B_DATATYPE *dataset)
+{
+ B_DATATYPE *dataptr = dataset;
+ hsize_t i, j;
+
+ /* print the column heading */
+ HDprintf("%-8s", "Cols:");
+ for (j = 0; j < block[1]; j++) {
+ HDprintf("%3" PRIuHSIZE " ", start[1] + j);
+ }
+ HDprintf("\n");
+
+ /* print the slab data */
+ for (i = 0; i < block[0]; i++) {
+ HDprintf("Row %2" PRIuHSIZE ": ", i + start[0]);
+ for (j = 0; j < block[1]; j++) {
+ HDprintf("%" PRIuHSIZE " ", *dataptr++);
+ }
+ HDprintf("\n");
+ }
+}
+
+/*
+ * Print the content of the dataset.
+ */
+static int
+verify_data(hsize_t start[], hsize_t count[], hsize_t stride[], hsize_t block[], B_DATATYPE *dataset,
+ B_DATATYPE *original)
+{
+ hsize_t i, j;
+ int vrfyerrs;
+
+ /* print it if VERBOSE_MED */
+ if (VERBOSE_MED) {
+ HDprintf("verify_data dumping:::\n");
+ HDprintf("start(%" PRIuHSIZE ", %" PRIuHSIZE "), "
+ "count(%" PRIuHSIZE ", %" PRIuHSIZE "), "
+ "stride(%" PRIuHSIZE ", %" PRIuHSIZE "), "
+ "block(%" PRIuHSIZE ", %" PRIuHSIZE ")\n",
+ start[0], start[1], count[0], count[1], stride[0], stride[1], block[0], block[1]);
+ HDprintf("original values:\n");
+ dataset_print(start, block, original);
+ HDprintf("compared values:\n");
+ dataset_print(start, block, dataset);
+ }
+
+ vrfyerrs = 0;
+ for (i = 0; i < block[0]; i++) {
+ for (j = 0; j < block[1]; j++) {
+ if (*dataset != *original) {
+ if (vrfyerrs++ < MAX_ERR_REPORT || VERBOSE_MED) {
+ HDprintf("Dataset Verify failed at [%" PRIuHSIZE "][%" PRIuHSIZE "]"
+ "(row %" PRIuHSIZE ", col %" PRIuHSIZE "): "
+ "expect %" PRIuHSIZE ", got %" PRIuHSIZE "\n",
+ i, j, i + start[0], j + start[1], *(original), *(dataset));
+ }
+ dataset++;
+ original++;
+ }
+ }
+ }
+ if (vrfyerrs > MAX_ERR_REPORT && !VERBOSE_MED)
+ HDprintf("[more errors ...]\n");
+ if (vrfyerrs)
+ HDprintf("%d errors found in verify_data\n", vrfyerrs);
+ return (vrfyerrs);
+}
+
+/* Set up the selection */
+static void
+ccslab_set(int mpi_rank, int mpi_size, hsize_t start[], hsize_t count[], hsize_t stride[], hsize_t block[],
+ int mode)
+{
+
+ switch (mode) {
+
+ case BYROW_CONT:
+ /* Each process takes a slabs of rows. */
+ block[0] = 1;
+ block[1] = 1;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = space_dim1;
+ count[1] = space_dim2;
+ start[0] = (hsize_t)mpi_rank * count[0];
+ start[1] = 0;
+
+ break;
+
+ case BYROW_DISCONT:
+ /* Each process takes several disjoint blocks. */
+ block[0] = 1;
+ block[1] = 1;
+ stride[0] = 3;
+ stride[1] = 3;
+ count[0] = space_dim1 / (stride[0] * block[0]);
+ count[1] = (space_dim2) / (stride[1] * block[1]);
+ start[0] = space_dim1 * (hsize_t)mpi_rank;
+ start[1] = 0;
+
+ break;
+
+ case BYROW_SELECTNONE:
+ /* Each process takes a slabs of rows, there are
+ no selections for the last process. */
+ block[0] = 1;
+ block[1] = 1;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = ((mpi_rank >= MAX(1, (mpi_size - 2))) ? 0 : space_dim1);
+ count[1] = space_dim2;
+ start[0] = (hsize_t)mpi_rank * count[0];
+ start[1] = 0;
+
+ break;
+
+ case BYROW_SELECTUNBALANCE:
+ /* The first one-third of the number of processes only
+ select top half of the domain, The rest will select the bottom
+ half of the domain. */
+
+ block[0] = 1;
+ count[0] = 2;
+ stride[0] = (hsize_t)(space_dim1 * (hsize_t)mpi_size / 4 + 1);
+ block[1] = space_dim2;
+ count[1] = 1;
+ start[1] = 0;
+ stride[1] = 1;
+ if ((mpi_rank * 3) < (mpi_size * 2))
+ start[0] = (hsize_t)mpi_rank;
+ else
+ start[0] = 1 + space_dim1 * (hsize_t)mpi_size / 2 + (hsize_t)(mpi_rank - 2 * mpi_size / 3);
+ break;
+
+ case BYROW_SELECTINCHUNK:
+ /* Each process will only select one chunk */
+
+ block[0] = 1;
+ count[0] = 1;
+ start[0] = (hsize_t)mpi_rank * space_dim1;
+ stride[0] = 1;
+ block[1] = space_dim2;
+ count[1] = 1;
+ stride[1] = 1;
+ start[1] = 0;
+
+ break;
+
+ default:
+ /* Unknown mode. Set it to cover the whole dataset. */
+ block[0] = space_dim1 * (hsize_t)mpi_size;
+ block[1] = space_dim2;
+ stride[0] = block[0];
+ stride[1] = block[1];
+ count[0] = 1;
+ count[1] = 1;
+ start[0] = 0;
+ start[1] = 0;
+
+ break;
+ }
+ if (VERBOSE_MED) {
+ HDprintf("start[]=(%lu,%lu), count[]=(%lu,%lu), stride[]=(%lu,%lu), block[]=(%lu,%lu), total "
+ "datapoints=%lu\n",
+ (unsigned long)start[0], (unsigned long)start[1], (unsigned long)count[0],
+ (unsigned long)count[1], (unsigned long)stride[0], (unsigned long)stride[1],
+ (unsigned long)block[0], (unsigned long)block[1],
+ (unsigned long)(block[0] * block[1] * count[0] * count[1]));
+ }
+}
+
+/*
+ * Fill the dataset with trivial data for testing.
+ * Assume dimension rank is 2.
+ */
+static void
+ccdataset_fill(hsize_t start[], hsize_t stride[], hsize_t count[], hsize_t block[], DATATYPE *dataset,
+ int mem_selection)
+{
+ DATATYPE *dataptr = dataset;
+ DATATYPE *tmptr;
+ hsize_t i, j, k1, k2, k = 0;
+ /* put some trivial data in the data_array */
+ tmptr = dataptr;
+
+ /* assign the disjoint block (two-dimensional)data array value
+ through the pointer */
+
+ for (k1 = 0; k1 < count[0]; k1++) {
+ for (i = 0; i < block[0]; i++) {
+ for (k2 = 0; k2 < count[1]; k2++) {
+ for (j = 0; j < block[1]; j++) {
+
+ if (ALL != mem_selection) {
+ dataptr = tmptr + ((start[0] + k1 * stride[0] + i) * space_dim2 + start[1] +
+ k2 * stride[1] + j);
+ }
+ else {
+ dataptr = tmptr + k;
+ k++;
+ }
+
+ *dataptr = (DATATYPE)(k1 + k2 + i + j);
+ }
+ }
+ }
+ }
+}
+
+/*
+ * Print the first block of the content of the dataset.
+ */
+static void
+ccdataset_print(hsize_t start[], hsize_t block[], DATATYPE *dataset)
+
+{
+ DATATYPE *dataptr = dataset;
+ hsize_t i, j;
+
+ /* print the column heading */
+ HDprintf("Print only the first block of the dataset\n");
+ HDprintf("%-8s", "Cols:");
+ for (j = 0; j < block[1]; j++) {
+ HDprintf("%3lu ", (unsigned long)(start[1] + j));
+ }
+ HDprintf("\n");
+
+ /* print the slab data */
+ for (i = 0; i < block[0]; i++) {
+ HDprintf("Row %2lu: ", (unsigned long)(i + start[0]));
+ for (j = 0; j < block[1]; j++) {
+ HDprintf("%03d ", *dataptr++);
+ }
+ HDprintf("\n");
+ }
+}
+
+/*
+ * Print the content of the dataset.
+ */
+static int
+ccdataset_vrfy(hsize_t start[], hsize_t count[], hsize_t stride[], hsize_t block[], DATATYPE *dataset,
+ DATATYPE *original, int mem_selection)
+{
+ hsize_t i, j, k1, k2, k = 0;
+ int vrfyerrs;
+ DATATYPE *dataptr, *oriptr;
+
+ /* print it if VERBOSE_MED */
+ if (VERBOSE_MED) {
+ HDprintf("dataset_vrfy dumping:::\n");
+ HDprintf("start(%lu, %lu), count(%lu, %lu), stride(%lu, %lu), block(%lu, %lu)\n",
+ (unsigned long)start[0], (unsigned long)start[1], (unsigned long)count[0],
+ (unsigned long)count[1], (unsigned long)stride[0], (unsigned long)stride[1],
+ (unsigned long)block[0], (unsigned long)block[1]);
+ HDprintf("original values:\n");
+ ccdataset_print(start, block, original);
+ HDprintf("compared values:\n");
+ ccdataset_print(start, block, dataset);
+ }
+
+ vrfyerrs = 0;
+
+ for (k1 = 0; k1 < count[0]; k1++) {
+ for (i = 0; i < block[0]; i++) {
+ for (k2 = 0; k2 < count[1]; k2++) {
+ for (j = 0; j < block[1]; j++) {
+ if (ALL != mem_selection) {
+ dataptr = dataset + ((start[0] + k1 * stride[0] + i) * space_dim2 + start[1] +
+ k2 * stride[1] + j);
+ oriptr = original + ((start[0] + k1 * stride[0] + i) * space_dim2 + start[1] +
+ k2 * stride[1] + j);
+ }
+ else {
+ dataptr = dataset + k;
+ oriptr = original + k;
+ k++;
+ }
+ if (*dataptr != *oriptr) {
+ if (vrfyerrs++ < MAX_ERR_REPORT || VERBOSE_MED) {
+ HDprintf("Dataset Verify failed at [%lu][%lu]: expect %d, got %d\n",
+ (unsigned long)i, (unsigned long)j, *(oriptr), *(dataptr));
+ }
+ }
+ }
+ }
+ }
+ }
+ if (vrfyerrs > MAX_ERR_REPORT && !VERBOSE_MED)
+ HDprintf("[more errors ...]\n");
+ if (vrfyerrs)
+ HDprintf("%d errors found in ccdataset_vrfy\n", vrfyerrs);
+ return (vrfyerrs);
+}
+
+/*
+ * Example of using the parallel HDF5 library to create two datasets
+ * in one HDF5 file with collective parallel access support.
+ * The Datasets are of sizes (number-of-mpi-processes x dim0) x dim1.
+ * Each process controls only a slab of size dim0 x dim1 within each
+ * dataset. [Note: not so yet. Datasets are of sizes dim0xdim1 and
+ * each process controls a hyperslab within.]
+ */
+
+static void
+dataset_big_write(void)
+{
+
+ hid_t xfer_plist; /* Dataset transfer properties list */
+ hid_t sid; /* Dataspace ID */
+ hid_t file_dataspace; /* File dataspace ID */
+ hid_t mem_dataspace; /* memory dataspace ID */
+ hid_t dataset;
+ hsize_t dims[RANK]; /* dataset dim sizes */
+ hsize_t start[RANK]; /* for hyperslab setting */
+ hsize_t count[RANK], stride[RANK]; /* for hyperslab setting */
+ hsize_t block[RANK]; /* for hyperslab setting */
+ hsize_t *coords = NULL;
+ herr_t ret; /* Generic return value */
+ hid_t fid; /* HDF5 file ID */
+ hid_t acc_tpl; /* File access templates */
+ size_t num_points;
+ B_DATATYPE *wdata;
+
+ /* allocate memory for data buffer */
+ wdata = (B_DATATYPE *)HDmalloc(bigcount * sizeof(B_DATATYPE));
+ VRFY_G((wdata != NULL), "wdata malloc succeeded");
+
+ /* setup file access template */
+ acc_tpl = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY_G((acc_tpl >= 0), "H5P_FILE_ACCESS");
+ H5Pset_fapl_mpio(acc_tpl, MPI_COMM_WORLD, MPI_INFO_NULL);
+
+ /* create the file collectively */
+ fid = H5Fcreate(FILENAME[0], H5F_ACC_TRUNC, H5P_DEFAULT, acc_tpl);
+ VRFY_G((fid >= 0), "H5Fcreate succeeded");
+
+ /* Release file-access template */
+ ret = H5Pclose(acc_tpl);
+ VRFY_G((ret >= 0), "");
+
+ /* Each process takes a slabs of rows. */
+ if (mpi_rank_g == 0)
+ HDprintf("\nTesting Dataset1 write by ROW\n");
+ /* Create a large dataset */
+ dims[0] = bigcount;
+ dims[1] = (hsize_t)mpi_size_g;
+
+ sid = H5Screate_simple(RANK, dims, NULL);
+ VRFY_G((sid >= 0), "H5Screate_simple succeeded");
+ dataset = H5Dcreate2(fid, DATASET1, H5T_NATIVE_LLONG, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY_G((dataset >= 0), "H5Dcreate2 succeeded");
+ H5Sclose(sid);
+
+ block[0] = dims[0] / (hsize_t)mpi_size_g;
+ block[1] = dims[1];
+ stride[0] = block[0];
+ stride[1] = block[1];
+ count[0] = 1;
+ count[1] = 1;
+ start[0] = (hsize_t)mpi_rank_g * block[0];
+ start[1] = 0;
+
+ /* create a file dataspace independently */
+ file_dataspace = H5Dget_space(dataset);
+ VRFY_G((file_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY_G((ret >= 0), "H5Sset_hyperslab succeeded");
+
+ /* create a memory dataspace independently */
+ mem_dataspace = H5Screate_simple(RANK, block, NULL);
+ VRFY_G((mem_dataspace >= 0), "");
+
+ /* fill the local slab with some trivial data */
+ fill_datasets(start, block, wdata);
+ MESG("data_array initialized");
+ if (VERBOSE_MED) {
+ MESG("data_array created");
+ dataset_print(start, block, wdata);
+ }
+
+ /* set up the collective transfer properties list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY_G((xfer_plist >= 0), "H5Pcreate xfer succeeded");
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY_G((ret >= 0), "H5Pset_dxpl_mpio succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY_G((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ ret = H5Dwrite(dataset, H5T_NATIVE_LLONG, mem_dataspace, file_dataspace, xfer_plist, wdata);
+ VRFY_G((ret >= 0), "H5Dwrite dataset1 succeeded");
+
+ /* release all temporary handles. */
+ H5Sclose(file_dataspace);
+ H5Sclose(mem_dataspace);
+ H5Pclose(xfer_plist);
+
+ ret = H5Dclose(dataset);
+ VRFY_G((ret >= 0), "H5Dclose1 succeeded");
+
+ /* Each process takes a slabs of cols. */
+ if (mpi_rank_g == 0)
+ HDprintf("\nTesting Dataset2 write by COL\n");
+ /* Create a large dataset */
+ dims[0] = bigcount;
+ dims[1] = (hsize_t)mpi_size_g;
+
+ sid = H5Screate_simple(RANK, dims, NULL);
+ VRFY_G((sid >= 0), "H5Screate_simple succeeded");
+ dataset = H5Dcreate2(fid, DATASET2, H5T_NATIVE_LLONG, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY_G((dataset >= 0), "H5Dcreate2 succeeded");
+ H5Sclose(sid);
+
+ block[0] = dims[0];
+ block[1] = dims[1] / (hsize_t)mpi_size_g;
+ stride[0] = block[0];
+ stride[1] = block[1];
+ count[0] = 1;
+ count[1] = 1;
+ start[0] = 0;
+ start[1] = (hsize_t)mpi_rank_g * block[1];
+
+ /* create a file dataspace independently */
+ file_dataspace = H5Dget_space(dataset);
+ VRFY_G((file_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY_G((ret >= 0), "H5Sset_hyperslab succeeded");
+
+ /* create a memory dataspace independently */
+ mem_dataspace = H5Screate_simple(RANK, block, NULL);
+ VRFY_G((mem_dataspace >= 0), "");
+
+ /* fill the local slab with some trivial data */
+ fill_datasets(start, block, wdata);
+ MESG("data_array initialized");
+ if (VERBOSE_MED) {
+ MESG("data_array created");
+ dataset_print(start, block, wdata);
+ }
+
+ /* set up the collective transfer properties list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY_G((xfer_plist >= 0), "H5Pcreate xfer succeeded");
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY_G((ret >= 0), "H5Pset_dxpl_mpio succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY_G((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ ret = H5Dwrite(dataset, H5T_NATIVE_LLONG, mem_dataspace, file_dataspace, xfer_plist, wdata);
+ VRFY_G((ret >= 0), "H5Dwrite dataset1 succeeded");
+
+ /* release all temporary handles. */
+ H5Sclose(file_dataspace);
+ H5Sclose(mem_dataspace);
+ H5Pclose(xfer_plist);
+
+ ret = H5Dclose(dataset);
+ VRFY_G((ret >= 0), "H5Dclose1 succeeded");
+
+ /* ALL selection */
+ if (mpi_rank_g == 0)
+ HDprintf("\nTesting Dataset3 write select ALL proc 0, NONE others\n");
+ /* Create a large dataset */
+ dims[0] = bigcount;
+ dims[1] = 1;
+
+ sid = H5Screate_simple(RANK, dims, NULL);
+ VRFY_G((sid >= 0), "H5Screate_simple succeeded");
+ dataset = H5Dcreate2(fid, DATASET3, H5T_NATIVE_LLONG, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY_G((dataset >= 0), "H5Dcreate2 succeeded");
+ H5Sclose(sid);
+
+ /* create a file dataspace independently */
+ file_dataspace = H5Dget_space(dataset);
+ VRFY_G((file_dataspace >= 0), "H5Dget_space succeeded");
+ if (mpi_rank_g == 0) {
+ ret = H5Sselect_all(file_dataspace);
+ VRFY_G((ret >= 0), "H5Sset_all succeeded");
+ }
+ else {
+ ret = H5Sselect_none(file_dataspace);
+ VRFY_G((ret >= 0), "H5Sset_none succeeded");
+ }
+
+ /* create a memory dataspace independently */
+ mem_dataspace = H5Screate_simple(RANK, dims, NULL);
+ VRFY_G((mem_dataspace >= 0), "");
+ if (mpi_rank_g != 0) {
+ ret = H5Sselect_none(mem_dataspace);
+ VRFY_G((ret >= 0), "H5Sset_none succeeded");
+ }
+
+ /* set up the collective transfer properties list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY_G((xfer_plist >= 0), "H5Pcreate xfer succeeded");
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY_G((ret >= 0), "H5Pset_dxpl_mpio succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY_G((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ /* fill the local slab with some trivial data */
+ fill_datasets(start, dims, wdata);
+ MESG("data_array initialized");
+ if (VERBOSE_MED) {
+ MESG("data_array created");
+ }
+
+ ret = H5Dwrite(dataset, H5T_NATIVE_LLONG, mem_dataspace, file_dataspace, xfer_plist, wdata);
+ VRFY_G((ret >= 0), "H5Dwrite dataset1 succeeded");
+
+ /* release all temporary handles. */
+ H5Sclose(file_dataspace);
+ H5Sclose(mem_dataspace);
+ H5Pclose(xfer_plist);
+
+ ret = H5Dclose(dataset);
+ VRFY_G((ret >= 0), "H5Dclose1 succeeded");
+
+ /* Point selection */
+ if (mpi_rank_g == 0)
+ HDprintf("\nTesting Dataset4 write point selection\n");
+ /* Create a large dataset */
+ dims[0] = bigcount;
+ dims[1] = (hsize_t)(mpi_size_g * 4);
+
+ sid = H5Screate_simple(RANK, dims, NULL);
+ VRFY_G((sid >= 0), "H5Screate_simple succeeded");
+ dataset = H5Dcreate2(fid, DATASET4, H5T_NATIVE_LLONG, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY_G((dataset >= 0), "H5Dcreate2 succeeded");
+ H5Sclose(sid);
+
+ block[0] = dims[0] / 2;
+ block[1] = 2;
+ stride[0] = dims[0] / 2;
+ stride[1] = 2;
+ count[0] = 1;
+ count[1] = 1;
+ start[0] = 0;
+ start[1] = dims[1] / (hsize_t)mpi_size_g * (hsize_t)mpi_rank_g;
+
+ num_points = bigcount;
+
+ coords = (hsize_t *)HDmalloc(num_points * RANK * sizeof(hsize_t));
+ VRFY_G((coords != NULL), "coords malloc succeeded");
+
+ set_coords(start, count, stride, block, num_points, coords, IN_ORDER);
+ /* create a file dataspace */
+ file_dataspace = H5Dget_space(dataset);
+ VRFY_G((file_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_elements(file_dataspace, H5S_SELECT_SET, num_points, coords);
+ VRFY_G((ret >= 0), "H5Sselect_elements succeeded");
+
+ if (coords)
+ free(coords);
+
+ fill_datasets(start, block, wdata);
+ MESG("data_array initialized");
+ if (VERBOSE_MED) {
+ MESG("data_array created");
+ dataset_print(start, block, wdata);
+ }
+
+ /* create a memory dataspace */
+ /* Warning: H5Screate_simple requires an array of hsize_t elements
+ * even if we only pass only a single value. Attempting anything else
+ * appears to cause problems with 32 bit compilers.
+ */
+ mem_dataspace = H5Screate_simple(1, dims, NULL);
+ VRFY_G((mem_dataspace >= 0), "");
+
+ /* set up the collective transfer properties list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY_G((xfer_plist >= 0), "H5Pcreate xfer succeeded");
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY_G((ret >= 0), "H5Pset_dxpl_mpio succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY_G((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ ret = H5Dwrite(dataset, H5T_NATIVE_LLONG, mem_dataspace, file_dataspace, xfer_plist, wdata);
+ VRFY_G((ret >= 0), "H5Dwrite dataset1 succeeded");
+
+ /* release all temporary handles. */
+ H5Sclose(file_dataspace);
+ H5Sclose(mem_dataspace);
+ H5Pclose(xfer_plist);
+
+ ret = H5Dclose(dataset);
+ VRFY_G((ret >= 0), "H5Dclose1 succeeded");
+
+ HDfree(wdata);
+ H5Fclose(fid);
+}
+
+/*
+ * Example of using the parallel HDF5 library to read two datasets
+ * in one HDF5 file with collective parallel access support.
+ * The Datasets are of sizes (number-of-mpi-processes x dim0) x dim1.
+ * Each process controls only a slab of size dim0 x dim1 within each
+ * dataset. [Note: not so yet. Datasets are of sizes dim0xdim1 and
+ * each process controls a hyperslab within.]
+ */
+
+static void
+dataset_big_read(void)
+{
+ hid_t fid; /* HDF5 file ID */
+ hid_t acc_tpl; /* File access templates */
+ hid_t xfer_plist; /* Dataset transfer properties list */
+ hid_t file_dataspace; /* File dataspace ID */
+ hid_t mem_dataspace; /* memory dataspace ID */
+ hid_t dataset;
+ B_DATATYPE *rdata = NULL; /* data buffer */
+ B_DATATYPE *wdata = NULL; /* expected data buffer */
+ hsize_t dims[RANK]; /* dataset dim sizes */
+ hsize_t start[RANK]; /* for hyperslab setting */
+ hsize_t count[RANK], stride[RANK]; /* for hyperslab setting */
+ hsize_t block[RANK]; /* for hyperslab setting */
+ size_t num_points;
+ hsize_t *coords = NULL;
+ herr_t ret; /* Generic return value */
+
+ /* allocate memory for data buffer */
+ rdata = (B_DATATYPE *)HDmalloc(bigcount * sizeof(B_DATATYPE));
+ VRFY_G((rdata != NULL), "rdata malloc succeeded");
+ wdata = (B_DATATYPE *)HDmalloc(bigcount * sizeof(B_DATATYPE));
+ VRFY_G((wdata != NULL), "wdata malloc succeeded");
+
+ HDmemset(rdata, 0, bigcount * sizeof(B_DATATYPE));
+
+ /* setup file access template */
+ acc_tpl = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY_G((acc_tpl >= 0), "H5P_FILE_ACCESS");
+ H5Pset_fapl_mpio(acc_tpl, MPI_COMM_WORLD, MPI_INFO_NULL);
+
+ /* open the file collectively */
+ fid = H5Fopen(FILENAME[0], H5F_ACC_RDONLY, acc_tpl);
+ VRFY_G((fid >= 0), "H5Fopen succeeded");
+
+ /* Release file-access template */
+ ret = H5Pclose(acc_tpl);
+ VRFY_G((ret >= 0), "");
+
+ if (mpi_rank_g == 0)
+ HDprintf("\nRead Testing Dataset1 by COL\n");
+
+ dataset = H5Dopen2(fid, DATASET1, H5P_DEFAULT);
+ VRFY_G((dataset >= 0), "H5Dopen2 succeeded");
+
+ dims[0] = bigcount;
+ dims[1] = (hsize_t)mpi_size_g;
+ /* Each process takes a slabs of cols. */
+ block[0] = dims[0];
+ block[1] = dims[1] / (hsize_t)mpi_size_g;
+ stride[0] = block[0];
+ stride[1] = block[1];
+ count[0] = 1;
+ count[1] = 1;
+ start[0] = 0;
+ start[1] = (hsize_t)mpi_rank_g * block[1];
+
+ /* create a file dataspace independently */
+ file_dataspace = H5Dget_space(dataset);
+ VRFY_G((file_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY_G((ret >= 0), "H5Sset_hyperslab succeeded");
+
+ /* create a memory dataspace independently */
+ mem_dataspace = H5Screate_simple(RANK, block, NULL);
+ VRFY_G((mem_dataspace >= 0), "");
+
+ /* fill dataset with test data */
+ fill_datasets(start, block, wdata);
+ MESG("data_array initialized");
+ if (VERBOSE_MED) {
+ MESG("data_array created");
+ }
+
+ /* set up the collective transfer properties list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY_G((xfer_plist >= 0), "");
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY_G((ret >= 0), "H5Pcreate xfer succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY_G((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ /* read data collectively */
+ ret = H5Dread(dataset, H5T_NATIVE_LLONG, mem_dataspace, file_dataspace, xfer_plist, rdata);
+ VRFY_G((ret >= 0), "H5Dread dataset1 succeeded");
+
+ /* verify the read data with original expected data */
+ ret = verify_data(start, count, stride, block, rdata, wdata);
+ if (ret) {
+ HDfprintf(stderr, "verify failed\n");
+ exit(1);
+ }
+
+ /* release all temporary handles. */
+ H5Sclose(file_dataspace);
+ H5Sclose(mem_dataspace);
+ H5Pclose(xfer_plist);
+ ret = H5Dclose(dataset);
+ VRFY_G((ret >= 0), "H5Dclose1 succeeded");
+
+ if (mpi_rank_g == 0)
+ HDprintf("\nRead Testing Dataset2 by ROW\n");
+ HDmemset(rdata, 0, bigcount * sizeof(B_DATATYPE));
+ dataset = H5Dopen2(fid, DATASET2, H5P_DEFAULT);
+ VRFY_G((dataset >= 0), "H5Dopen2 succeeded");
+
+ dims[0] = bigcount;
+ dims[1] = (hsize_t)mpi_size_g;
+ /* Each process takes a slabs of rows. */
+ block[0] = dims[0] / (hsize_t)mpi_size_g;
+ block[1] = dims[1];
+ stride[0] = block[0];
+ stride[1] = block[1];
+ count[0] = 1;
+ count[1] = 1;
+ start[0] = (hsize_t)mpi_rank_g * block[0];
+ start[1] = 0;
+
+ /* create a file dataspace independently */
+ file_dataspace = H5Dget_space(dataset);
+ VRFY_G((file_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY_G((ret >= 0), "H5Sset_hyperslab succeeded");
+
+ /* create a memory dataspace independently */
+ mem_dataspace = H5Screate_simple(RANK, block, NULL);
+ VRFY_G((mem_dataspace >= 0), "");
+
+ /* fill dataset with test data */
+ fill_datasets(start, block, wdata);
+ MESG("data_array initialized");
+ if (VERBOSE_MED) {
+ MESG("data_array created");
+ }
+
+ /* set up the collective transfer properties list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY_G((xfer_plist >= 0), "");
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY_G((ret >= 0), "H5Pcreate xfer succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY_G((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ /* read data collectively */
+ ret = H5Dread(dataset, H5T_NATIVE_LLONG, mem_dataspace, file_dataspace, xfer_plist, rdata);
+ VRFY_G((ret >= 0), "H5Dread dataset2 succeeded");
+
+ /* verify the read data with original expected data */
+ ret = verify_data(start, count, stride, block, rdata, wdata);
+ if (ret) {
+ HDfprintf(stderr, "verify failed\n");
+ exit(1);
+ }
+
+ /* release all temporary handles. */
+ H5Sclose(file_dataspace);
+ H5Sclose(mem_dataspace);
+ H5Pclose(xfer_plist);
+ ret = H5Dclose(dataset);
+ VRFY_G((ret >= 0), "H5Dclose1 succeeded");
+
+ if (mpi_rank_g == 0)
+ HDprintf("\nRead Testing Dataset3 read select ALL proc 0, NONE others\n");
+ HDmemset(rdata, 0, bigcount * sizeof(B_DATATYPE));
+ dataset = H5Dopen2(fid, DATASET3, H5P_DEFAULT);
+ VRFY_G((dataset >= 0), "H5Dopen2 succeeded");
+
+ dims[0] = bigcount;
+ dims[1] = 1;
+
+ /* create a file dataspace independently */
+ file_dataspace = H5Dget_space(dataset);
+ VRFY_G((file_dataspace >= 0), "H5Dget_space succeeded");
+ if (mpi_rank_g == 0) {
+ ret = H5Sselect_all(file_dataspace);
+ VRFY_G((ret >= 0), "H5Sset_all succeeded");
+ }
+ else {
+ ret = H5Sselect_none(file_dataspace);
+ VRFY_G((ret >= 0), "H5Sset_none succeeded");
+ }
+
+ /* create a memory dataspace independently */
+ mem_dataspace = H5Screate_simple(RANK, dims, NULL);
+ VRFY_G((mem_dataspace >= 0), "");
+ if (mpi_rank_g != 0) {
+ ret = H5Sselect_none(mem_dataspace);
+ VRFY_G((ret >= 0), "H5Sset_none succeeded");
+ }
+
+ /* fill dataset with test data */
+ fill_datasets(start, dims, wdata);
+ MESG("data_array initialized");
+ if (VERBOSE_MED) {
+ MESG("data_array created");
+ }
+
+ /* set up the collective transfer properties list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY_G((xfer_plist >= 0), "");
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY_G((ret >= 0), "H5Pcreate xfer succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY_G((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ /* read data collectively */
+ ret = H5Dread(dataset, H5T_NATIVE_LLONG, mem_dataspace, file_dataspace, xfer_plist, rdata);
+ VRFY_G((ret >= 0), "H5Dread dataset3 succeeded");
+
+ if (mpi_rank_g == 0) {
+ /* verify the read data with original expected data */
+ ret = verify_data(start, count, stride, block, rdata, wdata);
+ if (ret) {
+ HDfprintf(stderr, "verify failed\n");
+ exit(1);
+ }
+ }
+
+ /* release all temporary handles. */
+ H5Sclose(file_dataspace);
+ H5Sclose(mem_dataspace);
+ H5Pclose(xfer_plist);
+ ret = H5Dclose(dataset);
+ VRFY_G((ret >= 0), "H5Dclose1 succeeded");
+
+ if (mpi_rank_g == 0)
+ HDprintf("\nRead Testing Dataset4 with Point selection\n");
+ dataset = H5Dopen2(fid, DATASET4, H5P_DEFAULT);
+ VRFY_G((dataset >= 0), "H5Dopen2 succeeded");
+
+ dims[0] = bigcount;
+ dims[1] = (hsize_t)(mpi_size_g * 4);
+
+ block[0] = dims[0] / 2;
+ block[1] = 2;
+ stride[0] = dims[0] / 2;
+ stride[1] = 2;
+ count[0] = 1;
+ count[1] = 1;
+ start[0] = 0;
+ start[1] = dims[1] / (hsize_t)mpi_size_g * (hsize_t)mpi_rank_g;
+
+ fill_datasets(start, block, wdata);
+ MESG("data_array initialized");
+ if (VERBOSE_MED) {
+ MESG("data_array created");
+ dataset_print(start, block, wdata);
+ }
+
+ num_points = bigcount;
+
+ coords = (hsize_t *)HDmalloc(num_points * RANK * sizeof(hsize_t));
+ VRFY_G((coords != NULL), "coords malloc succeeded");
+
+ set_coords(start, count, stride, block, num_points, coords, IN_ORDER);
+ /* create a file dataspace */
+ file_dataspace = H5Dget_space(dataset);
+ VRFY_G((file_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_elements(file_dataspace, H5S_SELECT_SET, num_points, coords);
+ VRFY_G((ret >= 0), "H5Sselect_elements succeeded");
+
+ if (coords)
+ HDfree(coords);
+
+ /* create a memory dataspace */
+ /* Warning: H5Screate_simple requires an array of hsize_t elements
+ * even if we only pass only a single value. Attempting anything else
+ * appears to cause problems with 32 bit compilers.
+ */
+ mem_dataspace = H5Screate_simple(1, dims, NULL);
+ VRFY_G((mem_dataspace >= 0), "");
+
+ /* set up the collective transfer properties list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY_G((xfer_plist >= 0), "");
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY_G((ret >= 0), "H5Pcreate xfer succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY_G((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ /* read data collectively */
+ ret = H5Dread(dataset, H5T_NATIVE_LLONG, mem_dataspace, file_dataspace, xfer_plist, rdata);
+ VRFY_G((ret >= 0), "H5Dread dataset1 succeeded");
+
+ ret = verify_data(start, count, stride, block, rdata, wdata);
+ if (ret) {
+ HDfprintf(stderr, "verify failed\n");
+ exit(1);
+ }
+
+ /* release all temporary handles. */
+ H5Sclose(file_dataspace);
+ H5Sclose(mem_dataspace);
+ H5Pclose(xfer_plist);
+ ret = H5Dclose(dataset);
+ VRFY_G((ret >= 0), "H5Dclose1 succeeded");
+
+ HDfree(wdata);
+ HDfree(rdata);
+
+ wdata = NULL;
+ rdata = NULL;
+ /* We never wrote Dataset5 in the write section, so we can't
+ * expect to read it...
+ */
+ file_dataspace = -1;
+ mem_dataspace = -1;
+ xfer_plist = -1;
+ dataset = -1;
+
+ /* release all temporary handles. */
+ if (file_dataspace != -1)
+ H5Sclose(file_dataspace);
+ if (mem_dataspace != -1)
+ H5Sclose(mem_dataspace);
+ if (xfer_plist != -1)
+ H5Pclose(xfer_plist);
+ if (dataset != -1) {
+ ret = H5Dclose(dataset);
+ VRFY_G((ret >= 0), "H5Dclose1 succeeded");
+ }
+ H5Fclose(fid);
+
+ /* release data buffers */
+ if (rdata)
+ HDfree(rdata);
+ if (wdata)
+ HDfree(wdata);
+
+} /* dataset_large_readAll */
+
+static void
+single_rank_independent_io(void)
+{
+ if (mpi_rank_g == 0)
+ HDprintf("single_rank_independent_io\n");
+
+ if (MAIN_PROCESS) {
+ hsize_t dims[1];
+ hid_t file_id = -1;
+ hid_t fapl_id = -1;
+ hid_t dset_id = -1;
+ hid_t fspace_id = -1;
+ herr_t ret;
+ int *data = NULL;
+ uint64_t i;
+
+ fapl_id = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY_G((fapl_id >= 0), "H5P_FILE_ACCESS");
+
+ H5Pset_fapl_mpio(fapl_id, MPI_COMM_SELF, MPI_INFO_NULL);
+ file_id = H5Fcreate(FILENAME[1], H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id);
+ VRFY_G((file_id >= 0), "H5Dcreate2 succeeded");
+
+ /*
+ * Calculate the number of elements needed to exceed
+ * MPI's INT_MAX limitation
+ */
+ dims[0] = (INT_MAX / sizeof(int)) + 10;
+
+ fspace_id = H5Screate_simple(1, dims, NULL);
+ VRFY_G((fspace_id >= 0), "H5Screate_simple fspace_id succeeded");
+
+ /*
+ * Create and write to a >2GB dataset from a single rank.
+ */
+ dset_id = H5Dcreate2(file_id, "test_dset", H5T_NATIVE_INT, fspace_id, H5P_DEFAULT, H5P_DEFAULT,
+ H5P_DEFAULT);
+
+ VRFY_G((dset_id >= 0), "H5Dcreate2 succeeded");
+
+ data = malloc(dims[0] * sizeof(int));
+
+ /* Initialize data */
+ for (i = 0; i < dims[0]; i++)
+ data[i] = (int)(i % (uint64_t)DXFER_BIGCOUNT);
+
+ /* Write data */
+ ret = H5Dwrite(dset_id, H5T_NATIVE_INT, H5S_BLOCK, fspace_id, H5P_DEFAULT, data);
+ VRFY_G((ret >= 0), "H5Dwrite succeeded");
+
+ /* Wipe buffer */
+ HDmemset(data, 0, dims[0] * sizeof(int));
+
+ /* Read data back */
+ ret = H5Dread(dset_id, H5T_NATIVE_INT, H5S_BLOCK, fspace_id, H5P_DEFAULT, data);
+ VRFY_G((ret >= 0), "H5Dread succeeded");
+
+ /* Verify data */
+ for (i = 0; i < dims[0]; i++)
+ if (data[i] != (int)(i % (uint64_t)DXFER_BIGCOUNT)) {
+ HDfprintf(stderr, "verify failed\n");
+ exit(1);
+ }
+
+ free(data);
+ H5Sclose(fspace_id);
+ H5Dclose(dset_id);
+ H5Fclose(file_id);
+
+ H5Fdelete(FILENAME[1], fapl_id);
+
+ H5Pclose(fapl_id);
+ }
+ MPI_Barrier(MPI_COMM_WORLD);
+}
+
+/*
+ * Create the appropriate File access property list
+ */
+hid_t
+create_faccess_plist(MPI_Comm comm, MPI_Info info, int l_facc_type)
+{
+ hid_t ret_pl = -1;
+ herr_t ret; /* generic return value */
+ int mpi_rank; /* mpi variables */
+
+ /* need the rank for error checking macros */
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ ret_pl = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY_G((ret_pl >= 0), "H5P_FILE_ACCESS");
+
+ if (l_facc_type == FACC_DEFAULT)
+ return (ret_pl);
+
+ if (l_facc_type == FACC_MPIO) {
+ /* set Parallel access with communicator */
+ ret = H5Pset_fapl_mpio(ret_pl, comm, info);
+ VRFY_G((ret >= 0), "");
+ ret = H5Pset_all_coll_metadata_ops(ret_pl, TRUE);
+ VRFY_G((ret >= 0), "");
+ ret = H5Pset_coll_metadata_write(ret_pl, TRUE);
+ VRFY_G((ret >= 0), "");
+ return (ret_pl);
+ }
+
+ if (l_facc_type == (FACC_MPIO | FACC_SPLIT)) {
+ hid_t mpio_pl;
+
+ mpio_pl = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY_G((mpio_pl >= 0), "");
+ /* set Parallel access with communicator */
+ ret = H5Pset_fapl_mpio(mpio_pl, comm, info);
+ VRFY_G((ret >= 0), "");
+
+ /* setup file access template */
+ ret_pl = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY_G((ret_pl >= 0), "");
+ /* set Parallel access with communicator */
+ ret = H5Pset_fapl_split(ret_pl, ".meta", mpio_pl, ".raw", mpio_pl);
+ VRFY_G((ret >= 0), "H5Pset_fapl_split succeeded");
+ H5Pclose(mpio_pl);
+ return (ret_pl);
+ }
+
+ /* unknown file access types */
+ return (ret_pl);
+}
+
+/*-------------------------------------------------------------------------
+ * Function: coll_chunk1
+ *
+ * Purpose: Wrapper to test the collective chunk IO for regular JOINT
+ selection with a single chunk
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Unknown
+ * July 12th, 2004
+ *
+ *-------------------------------------------------------------------------
+ */
+
+/* ------------------------------------------------------------------------
+ * Descriptions for the selection: One big singular selection inside one chunk
+ * Two dimensions,
+ *
+ * dim1 = space_dim1(5760)*mpi_size
+ * dim2 = space_dim2(3)
+ * chunk_dim1 = dim1
+ * chunk_dim2 = dim2
+ * block = 1 for all dimensions
+ * stride = 1 for all dimensions
+ * count0 = space_dim1(5760)
+ * count1 = space_dim2(3)
+ * start0 = mpi_rank*space_dim1
+ * start1 = 0
+ * ------------------------------------------------------------------------
+ */
+
+void
+coll_chunk1(void)
+{
+ const char *filename = FILENAME[0];
+ if (mpi_rank_g == 0)
+ HDprintf("coll_chunk1\n");
+
+ coll_chunktest(filename, 1, BYROW_CONT, API_NONE, HYPER, HYPER, OUT_OF_ORDER);
+ coll_chunktest(filename, 1, BYROW_CONT, API_NONE, HYPER, POINT, OUT_OF_ORDER);
+ coll_chunktest(filename, 1, BYROW_CONT, API_NONE, POINT, ALL, OUT_OF_ORDER);
+ coll_chunktest(filename, 1, BYROW_CONT, API_NONE, POINT, POINT, OUT_OF_ORDER);
+ coll_chunktest(filename, 1, BYROW_CONT, API_NONE, POINT, HYPER, OUT_OF_ORDER);
+
+ coll_chunktest(filename, 1, BYROW_CONT, API_NONE, POINT, ALL, IN_ORDER);
+ coll_chunktest(filename, 1, BYROW_CONT, API_NONE, POINT, POINT, IN_ORDER);
+ coll_chunktest(filename, 1, BYROW_CONT, API_NONE, POINT, HYPER, IN_ORDER);
+}
+
+/*-------------------------------------------------------------------------
+ * Function: coll_chunk2
+ *
+ * Purpose: Wrapper to test the collective chunk IO for regular DISJOINT
+ selection with a single chunk
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Unknown
+ * July 12th, 2004
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+
+/* ------------------------------------------------------------------------
+ * Descriptions for the selection: many disjoint selections inside one chunk
+ * Two dimensions,
+ *
+ * dim1 = space_dim1*mpi_size(5760)
+ * dim2 = space_dim2(3)
+ * chunk_dim1 = dim1
+ * chunk_dim2 = dim2
+ * block = 1 for all dimensions
+ * stride = 3 for all dimensions
+ * count0 = space_dim1/stride0(5760/3)
+ * count1 = space_dim2/stride(3/3 = 1)
+ * start0 = mpi_rank*space_dim1
+ * start1 = 0
+ *
+ * ------------------------------------------------------------------------
+ */
+void
+coll_chunk2(void)
+{
+ const char *filename = FILENAME[0];
+ if (mpi_rank_g == 0)
+ HDprintf("coll_chunk2\n");
+
+ coll_chunktest(filename, 1, BYROW_DISCONT, API_NONE, HYPER, HYPER, OUT_OF_ORDER);
+ coll_chunktest(filename, 1, BYROW_DISCONT, API_NONE, HYPER, POINT, OUT_OF_ORDER);
+ coll_chunktest(filename, 1, BYROW_DISCONT, API_NONE, POINT, ALL, OUT_OF_ORDER);
+ coll_chunktest(filename, 1, BYROW_DISCONT, API_NONE, POINT, POINT, OUT_OF_ORDER);
+ coll_chunktest(filename, 1, BYROW_DISCONT, API_NONE, POINT, HYPER, OUT_OF_ORDER);
+
+ coll_chunktest(filename, 1, BYROW_DISCONT, API_NONE, POINT, ALL, IN_ORDER);
+ coll_chunktest(filename, 1, BYROW_DISCONT, API_NONE, POINT, POINT, IN_ORDER);
+ coll_chunktest(filename, 1, BYROW_DISCONT, API_NONE, POINT, HYPER, IN_ORDER);
+}
+
+/*-------------------------------------------------------------------------
+ * Function: coll_chunk3
+ *
+ * Purpose: Wrapper to test the collective chunk IO for regular JOINT
+ selection with at least number of 2*mpi_size chunks
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Unknown
+ * July 12th, 2004
+ *
+ *-------------------------------------------------------------------------
+ */
+
+/* ------------------------------------------------------------------------
+ * Descriptions for the selection: one singular selection across many chunks
+ * Two dimensions, Num of chunks = 2* mpi_size
+ *
+ * dim1 = space_dim1*mpi_size
+ * dim2 = space_dim2(3)
+ * chunk_dim1 = space_dim1
+ * chunk_dim2 = dim2/2
+ * block = 1 for all dimensions
+ * stride = 1 for all dimensions
+ * count0 = space_dim1
+ * count1 = space_dim2(3)
+ * start0 = mpi_rank*space_dim1
+ * start1 = 0
+ *
+ * ------------------------------------------------------------------------
+ */
+
+void
+coll_chunk3(void)
+{
+ const char *filename = FILENAME[0];
+ if (mpi_rank_g == 0)
+ HDprintf("coll_chunk3\n");
+
+ coll_chunktest(filename, mpi_size_g, BYROW_CONT, API_NONE, HYPER, HYPER, OUT_OF_ORDER);
+ coll_chunktest(filename, mpi_size_g, BYROW_CONT, API_NONE, HYPER, POINT, OUT_OF_ORDER);
+ coll_chunktest(filename, mpi_size_g, BYROW_CONT, API_NONE, POINT, ALL, OUT_OF_ORDER);
+ coll_chunktest(filename, mpi_size_g, BYROW_CONT, API_NONE, POINT, POINT, OUT_OF_ORDER);
+ coll_chunktest(filename, mpi_size_g, BYROW_CONT, API_NONE, POINT, HYPER, OUT_OF_ORDER);
+
+ coll_chunktest(filename, mpi_size_g, BYROW_CONT, API_NONE, POINT, ALL, IN_ORDER);
+ coll_chunktest(filename, mpi_size_g, BYROW_CONT, API_NONE, POINT, POINT, IN_ORDER);
+ coll_chunktest(filename, mpi_size_g, BYROW_CONT, API_NONE, POINT, HYPER, IN_ORDER);
+}
+
+//-------------------------------------------------------------------------
+// Borrowed/Modified (slightly) from t_coll_chunk.c
+/*-------------------------------------------------------------------------
+ * Function: coll_chunktest
+ *
+ * Purpose: The real testing routine for regular selection of collective
+ chunking storage
+ testing both write and read,
+ If anything fails, it may be read or write. There is no
+ separation test between read and write.
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Unknown
+ * July 12th, 2004
+ *
+ *-------------------------------------------------------------------------
+ */
+
+static void
+coll_chunktest(const char *filename, int chunk_factor, int select_factor, int api_option, int file_selection,
+ int mem_selection, int mode)
+{
+ hid_t file, dataset, file_dataspace, mem_dataspace;
+ hid_t acc_plist, xfer_plist, crp_plist;
+
+ hsize_t dims[RANK], chunk_dims[RANK];
+ int *data_array1 = NULL;
+ int *data_origin1 = NULL;
+
+ hsize_t start[RANK], count[RANK], stride[RANK], block[RANK];
+
+#ifdef H5_HAVE_INSTRUMENTED_LIBRARY
+ unsigned prop_value;
+#endif /* H5_HAVE_INSTRUMENTED_LIBRARY */
+
+ herr_t status;
+ MPI_Comm comm = MPI_COMM_WORLD;
+ MPI_Info info = MPI_INFO_NULL;
+
+ size_t num_points; /* for point selection */
+ hsize_t *coords = NULL; /* for point selection */
+
+ /* Create the data space */
+
+ acc_plist = create_faccess_plist(comm, info, facc_type);
+ VRFY_G((acc_plist >= 0), "");
+
+ file = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, acc_plist);
+ VRFY_G((file >= 0), "H5Fcreate succeeded");
+
+ status = H5Pclose(acc_plist);
+ VRFY_G((status >= 0), "");
+
+ /* setup dimensionality object */
+ dims[0] = space_dim1 * (hsize_t)mpi_size_g;
+ dims[1] = space_dim2;
+
+ /* allocate memory for data buffer */
+ data_array1 = (int *)HDmalloc(dims[0] * dims[1] * sizeof(int));
+ VRFY_G((data_array1 != NULL), "data_array1 malloc succeeded");
+
+ /* set up dimensions of the slab this process accesses */
+ ccslab_set(mpi_rank_g, mpi_size_g, start, count, stride, block, select_factor);
+
+ /* set up the coords array selection */
+ num_points = block[0] * block[1] * count[0] * count[1];
+ coords = (hsize_t *)HDmalloc(num_points * RANK * sizeof(hsize_t));
+ VRFY_G((coords != NULL), "coords malloc succeeded");
+ point_set(start, count, stride, block, num_points, coords, mode);
+
+ /* Warning: H5Screate_simple requires an array of hsize_t elements
+ * even if we only pass only a single value. Attempting anything else
+ * appears to cause problems with 32 bit compilers.
+ */
+ file_dataspace = H5Screate_simple(2, dims, NULL);
+ VRFY_G((file_dataspace >= 0), "file dataspace created succeeded");
+
+ if (ALL != mem_selection) {
+ mem_dataspace = H5Screate_simple(2, dims, NULL);
+ VRFY_G((mem_dataspace >= 0), "mem dataspace created succeeded");
+ }
+ else {
+ /* Putting the warning about H5Screate_simple (above) into practice... */
+ hsize_t dsdims[1] = {num_points};
+ mem_dataspace = H5Screate_simple(1, dsdims, NULL);
+ VRFY_G((mem_dataspace >= 0), "mem_dataspace create succeeded");
+ }
+
+ crp_plist = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY_G((crp_plist >= 0), "");
+
+ /* Set up chunk information. */
+ chunk_dims[0] = dims[0] / (hsize_t)chunk_factor;
+
+ /* to decrease the testing time, maintain bigger chunk size */
+ (chunk_factor == 1) ? (chunk_dims[1] = space_dim2) : (chunk_dims[1] = space_dim2 / 2);
+ status = H5Pset_chunk(crp_plist, 2, chunk_dims);
+ VRFY_G((status >= 0), "chunk creation property list succeeded");
+
+ dataset = H5Dcreate2(file, DSET_COLLECTIVE_CHUNK_NAME, H5T_NATIVE_INT, file_dataspace, H5P_DEFAULT,
+ crp_plist, H5P_DEFAULT);
+ VRFY_G((dataset >= 0), "dataset created succeeded");
+
+ status = H5Pclose(crp_plist);
+ VRFY_G((status >= 0), "");
+
+ /*put some trivial data in the data array */
+ ccdataset_fill(start, stride, count, block, data_array1, mem_selection);
+
+ MESG("data_array initialized");
+
+ switch (file_selection) {
+ case HYPER:
+ status = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY_G((status >= 0), "hyperslab selection succeeded");
+ break;
+
+ case POINT:
+ if (num_points) {
+ status = H5Sselect_elements(file_dataspace, H5S_SELECT_SET, num_points, coords);
+ VRFY_G((status >= 0), "Element selection succeeded");
+ }
+ else {
+ status = H5Sselect_none(file_dataspace);
+ VRFY_G((status >= 0), "none selection succeeded");
+ }
+ break;
+
+ case ALL:
+ status = H5Sselect_all(file_dataspace);
+ VRFY_G((status >= 0), "H5Sselect_all succeeded");
+ break;
+ }
+
+ switch (mem_selection) {
+ case HYPER:
+ status = H5Sselect_hyperslab(mem_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY_G((status >= 0), "hyperslab selection succeeded");
+ break;
+
+ case POINT:
+ if (num_points) {
+ status = H5Sselect_elements(mem_dataspace, H5S_SELECT_SET, num_points, coords);
+ VRFY_G((status >= 0), "Element selection succeeded");
+ }
+ else {
+ status = H5Sselect_none(mem_dataspace);
+ VRFY_G((status >= 0), "none selection succeeded");
+ }
+ break;
+
+ case ALL:
+ status = H5Sselect_all(mem_dataspace);
+ VRFY_G((status >= 0), "H5Sselect_all succeeded");
+ break;
+ }
+
+ /* set up the collective transfer property list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY_G((xfer_plist >= 0), "");
+
+ status = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY_G((status >= 0), "MPIO collective transfer property succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ status = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY_G((status >= 0), "set independent IO collectively succeeded");
+ }
+
+ switch (api_option) {
+ case API_LINK_HARD:
+ status = H5Pset_dxpl_mpio_chunk_opt(xfer_plist, H5FD_MPIO_CHUNK_ONE_IO);
+ VRFY_G((status >= 0), "collective chunk optimization succeeded");
+ break;
+
+ case API_MULTI_HARD:
+ status = H5Pset_dxpl_mpio_chunk_opt(xfer_plist, H5FD_MPIO_CHUNK_MULTI_IO);
+ VRFY_G((status >= 0), "collective chunk optimization succeeded ");
+ break;
+
+ case API_LINK_TRUE:
+ status = H5Pset_dxpl_mpio_chunk_opt_num(xfer_plist, 2);
+ VRFY_G((status >= 0), "collective chunk optimization set chunk number succeeded");
+ break;
+
+ case API_LINK_FALSE:
+ status = H5Pset_dxpl_mpio_chunk_opt_num(xfer_plist, 6);
+ VRFY_G((status >= 0), "collective chunk optimization set chunk number succeeded");
+ break;
+
+ case API_MULTI_COLL:
+ status = H5Pset_dxpl_mpio_chunk_opt_num(xfer_plist, 8); /* make sure it is using multi-chunk IO */
+ VRFY_G((status >= 0), "collective chunk optimization set chunk number succeeded");
+ status = H5Pset_dxpl_mpio_chunk_opt_ratio(xfer_plist, 50);
+ VRFY_G((status >= 0), "collective chunk optimization set chunk ratio succeeded");
+ break;
+
+ case API_MULTI_IND:
+ status = H5Pset_dxpl_mpio_chunk_opt_num(xfer_plist, 8); /* make sure it is using multi-chunk IO */
+ VRFY_G((status >= 0), "collective chunk optimization set chunk number succeeded");
+ status = H5Pset_dxpl_mpio_chunk_opt_ratio(xfer_plist, 100);
+ VRFY_G((status >= 0), "collective chunk optimization set chunk ratio succeeded");
+ break;
+
+ default:;
+ }
+
+#ifdef H5_HAVE_INSTRUMENTED_LIBRARY
+ if (facc_type == FACC_MPIO) {
+ switch (api_option) {
+ case API_LINK_HARD:
+ prop_value = H5D_XFER_COLL_CHUNK_DEF;
+ status = H5Pinsert2(xfer_plist, H5D_XFER_COLL_CHUNK_LINK_HARD_NAME, H5D_XFER_COLL_CHUNK_SIZE,
+ &prop_value, NULL, NULL, NULL, NULL, NULL, NULL);
+ VRFY_G((status >= 0), "testing property list inserted succeeded");
+ break;
+
+ case API_MULTI_HARD:
+ prop_value = H5D_XFER_COLL_CHUNK_DEF;
+ status = H5Pinsert2(xfer_plist, H5D_XFER_COLL_CHUNK_MULTI_HARD_NAME, H5D_XFER_COLL_CHUNK_SIZE,
+ &prop_value, NULL, NULL, NULL, NULL, NULL, NULL);
+ VRFY_G((status >= 0), "testing property list inserted succeeded");
+ break;
+
+ case API_LINK_TRUE:
+ prop_value = H5D_XFER_COLL_CHUNK_DEF;
+ status =
+ H5Pinsert2(xfer_plist, H5D_XFER_COLL_CHUNK_LINK_NUM_TRUE_NAME, H5D_XFER_COLL_CHUNK_SIZE,
+ &prop_value, NULL, NULL, NULL, NULL, NULL, NULL);
+ VRFY_G((status >= 0), "testing property list inserted succeeded");
+ break;
+
+ case API_LINK_FALSE:
+ prop_value = H5D_XFER_COLL_CHUNK_DEF;
+ status =
+ H5Pinsert2(xfer_plist, H5D_XFER_COLL_CHUNK_LINK_NUM_FALSE_NAME, H5D_XFER_COLL_CHUNK_SIZE,
+ &prop_value, NULL, NULL, NULL, NULL, NULL, NULL);
+ VRFY_G((status >= 0), "testing property list inserted succeeded");
+ break;
+
+ case API_MULTI_COLL:
+ prop_value = H5D_XFER_COLL_CHUNK_DEF;
+ status =
+ H5Pinsert2(xfer_plist, H5D_XFER_COLL_CHUNK_MULTI_RATIO_COLL_NAME,
+ H5D_XFER_COLL_CHUNK_SIZE, &prop_value, NULL, NULL, NULL, NULL, NULL, NULL);
+ VRFY_G((status >= 0), "testing property list inserted succeeded");
+ break;
+
+ case API_MULTI_IND:
+ prop_value = H5D_XFER_COLL_CHUNK_DEF;
+ status =
+ H5Pinsert2(xfer_plist, H5D_XFER_COLL_CHUNK_MULTI_RATIO_IND_NAME, H5D_XFER_COLL_CHUNK_SIZE,
+ &prop_value, NULL, NULL, NULL, NULL, NULL, NULL);
+ VRFY_G((status >= 0), "testing property list inserted succeeded");
+ break;
+
+ default:;
+ }
+ }
+#endif
+
+ /* write data collectively */
+ status = H5Dwrite(dataset, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1);
+ VRFY_G((status >= 0), "dataset write succeeded");
+
+#ifdef H5_HAVE_INSTRUMENTED_LIBRARY
+ if (facc_type == FACC_MPIO) {
+ switch (api_option) {
+ case API_LINK_HARD:
+ status = H5Pget(xfer_plist, H5D_XFER_COLL_CHUNK_LINK_HARD_NAME, &prop_value);
+ VRFY_G((status >= 0), "testing property list get succeeded");
+ VRFY_G((prop_value == 0), "API to set LINK COLLECTIVE IO directly succeeded");
+ break;
+
+ case API_MULTI_HARD:
+ status = H5Pget(xfer_plist, H5D_XFER_COLL_CHUNK_MULTI_HARD_NAME, &prop_value);
+ VRFY_G((status >= 0), "testing property list get succeeded");
+ VRFY_G((prop_value == 0), "API to set MULTI-CHUNK COLLECTIVE IO optimization succeeded");
+ break;
+
+ case API_LINK_TRUE:
+ status = H5Pget(xfer_plist, H5D_XFER_COLL_CHUNK_LINK_NUM_TRUE_NAME, &prop_value);
+ VRFY_G((status >= 0), "testing property list get succeeded");
+ VRFY_G((prop_value == 0), "API to set LINK COLLECTIVE IO succeeded");
+ break;
+
+ case API_LINK_FALSE:
+ status = H5Pget(xfer_plist, H5D_XFER_COLL_CHUNK_LINK_NUM_FALSE_NAME, &prop_value);
+ VRFY_G((status >= 0), "testing property list get succeeded");
+ VRFY_G((prop_value == 0), "API to set LINK IO transferring to multi-chunk IO succeeded");
+ break;
+
+ case API_MULTI_COLL:
+ status = H5Pget(xfer_plist, H5D_XFER_COLL_CHUNK_MULTI_RATIO_COLL_NAME, &prop_value);
+ VRFY_G((status >= 0), "testing property list get succeeded");
+ VRFY_G((prop_value == 0), "API to set MULTI-CHUNK COLLECTIVE IO with optimization succeeded");
+ break;
+
+ case API_MULTI_IND:
+ status = H5Pget(xfer_plist, H5D_XFER_COLL_CHUNK_MULTI_RATIO_IND_NAME, &prop_value);
+ VRFY_G((status >= 0), "testing property list get succeeded");
+ VRFY_G((prop_value == 0),
+ "API to set MULTI-CHUNK IO transferring to independent IO succeeded");
+ break;
+
+ default:;
+ }
+ }
+#endif
+
+ status = H5Dclose(dataset);
+ VRFY_G((status >= 0), "");
+
+ status = H5Pclose(xfer_plist);
+ VRFY_G((status >= 0), "property list closed");
+
+ status = H5Sclose(file_dataspace);
+ VRFY_G((status >= 0), "");
+
+ status = H5Sclose(mem_dataspace);
+ VRFY_G((status >= 0), "");
+
+ status = H5Fclose(file);
+ VRFY_G((status >= 0), "");
+
+ if (data_array1)
+ HDfree(data_array1);
+
+ /* Use collective read to verify the correctness of collective write. */
+
+ /* allocate memory for data buffer */
+ data_array1 = (int *)HDmalloc(dims[0] * dims[1] * sizeof(int));
+ VRFY_G((data_array1 != NULL), "data_array1 malloc succeeded");
+
+ /* allocate memory for data buffer */
+ data_origin1 = (int *)HDmalloc(dims[0] * dims[1] * sizeof(int));
+ VRFY_G((data_origin1 != NULL), "data_origin1 malloc succeeded");
+
+ acc_plist = create_faccess_plist(comm, info, facc_type);
+ VRFY_G((acc_plist >= 0), "MPIO creation property list succeeded");
+
+ file = H5Fopen(FILENAME[0], H5F_ACC_RDONLY, acc_plist);
+ VRFY_G((file >= 0), "H5Fcreate succeeded");
+
+ status = H5Pclose(acc_plist);
+ VRFY_G((status >= 0), "");
+
+ /* open the collective dataset*/
+ dataset = H5Dopen2(file, DSET_COLLECTIVE_CHUNK_NAME, H5P_DEFAULT);
+ VRFY_G((dataset >= 0), "");
+
+ /* set up dimensions of the slab this process accesses */
+ ccslab_set(mpi_rank_g, mpi_size_g, start, count, stride, block, select_factor);
+
+ /* obtain the file and mem dataspace*/
+ file_dataspace = H5Dget_space(dataset);
+ VRFY_G((file_dataspace >= 0), "");
+
+ if (ALL != mem_selection) {
+ mem_dataspace = H5Dget_space(dataset);
+ VRFY_G((mem_dataspace >= 0), "");
+ }
+ else {
+ /* Warning: H5Screate_simple requires an array of hsize_t elements
+ * even if we only pass only a single value. Attempting anything else
+ * appears to cause problems with 32 bit compilers.
+ */
+ hsize_t dsdims[1] = {num_points};
+ mem_dataspace = H5Screate_simple(1, dsdims, NULL);
+ VRFY_G((mem_dataspace >= 0), "mem_dataspace create succeeded");
+ }
+
+ switch (file_selection) {
+ case HYPER:
+ status = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY_G((status >= 0), "hyperslab selection succeeded");
+ break;
+
+ case POINT:
+ if (num_points) {
+ status = H5Sselect_elements(file_dataspace, H5S_SELECT_SET, num_points, coords);
+ VRFY_G((status >= 0), "Element selection succeeded");
+ }
+ else {
+ status = H5Sselect_none(file_dataspace);
+ VRFY_G((status >= 0), "none selection succeeded");
+ }
+ break;
+
+ case ALL:
+ status = H5Sselect_all(file_dataspace);
+ VRFY_G((status >= 0), "H5Sselect_all succeeded");
+ break;
+ }
+
+ switch (mem_selection) {
+ case HYPER:
+ status = H5Sselect_hyperslab(mem_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY_G((status >= 0), "hyperslab selection succeeded");
+ break;
+
+ case POINT:
+ if (num_points) {
+ status = H5Sselect_elements(mem_dataspace, H5S_SELECT_SET, num_points, coords);
+ VRFY_G((status >= 0), "Element selection succeeded");
+ }
+ else {
+ status = H5Sselect_none(mem_dataspace);
+ VRFY_G((status >= 0), "none selection succeeded");
+ }
+ break;
+
+ case ALL:
+ status = H5Sselect_all(mem_dataspace);
+ VRFY_G((status >= 0), "H5Sselect_all succeeded");
+ break;
+ }
+
+ /* fill dataset with test data */
+ ccdataset_fill(start, stride, count, block, data_origin1, mem_selection);
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY_G((xfer_plist >= 0), "");
+
+ status = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY_G((status >= 0), "MPIO collective transfer property succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ status = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY_G((status >= 0), "set independent IO collectively succeeded");
+ }
+
+ status = H5Dread(dataset, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1);
+ VRFY_G((status >= 0), "dataset read succeeded");
+
+ /* verify the read data with original expected data */
+ status = ccdataset_vrfy(start, count, stride, block, data_array1, data_origin1, mem_selection);
+ if (status)
+ nerrors++;
+
+ status = H5Pclose(xfer_plist);
+ VRFY_G((status >= 0), "property list closed");
+
+ /* close dataset collectively */
+ status = H5Dclose(dataset);
+ VRFY_G((status >= 0), "H5Dclose");
+
+ /* release all IDs created */
+ status = H5Sclose(file_dataspace);
+ VRFY_G((status >= 0), "H5Sclose");
+
+ status = H5Sclose(mem_dataspace);
+ VRFY_G((status >= 0), "H5Sclose");
+
+ /* close the file collectively */
+ status = H5Fclose(file);
+ VRFY_G((status >= 0), "H5Fclose");
+
+ /* release data buffers */
+ if (coords)
+ HDfree(coords);
+ if (data_array1)
+ HDfree(data_array1);
+ if (data_origin1)
+ HDfree(data_origin1);
+}
+
+int
+main(int argc, char **argv)
+{
+ hid_t acc_plist = H5I_INVALID_HID;
+
+ MPI_Init(&argc, &argv);
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size_g);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank_g);
+
+ /* Attempt to turn off atexit post processing so that in case errors
+ * happen during the test and the process is aborted, it will not get
+ * hang in the atexit post processing in which it may try to make MPI
+ * calls. By then, MPI calls may not work.
+ */
+ if (H5dont_atexit() < 0)
+ HDprintf("Failed to turn off atexit processing. Continue.\n");
+
+ /* set alarm. */
+ /* TestAlarmOn(); */
+
+ acc_plist = create_faccess_plist(MPI_COMM_WORLD, MPI_INFO_NULL, facc_type);
+
+ /* Get the capability flag of the VOL connector being used */
+ if (H5Pget_vol_cap_flags(acc_plist, &vol_cap_flags_g) < 0) {
+ if (MAIN_PROCESS)
+ HDprintf("Failed to get the capability flag of the VOL connector being used\n");
+
+ MPI_Finalize();
+ return 0;
+ }
+
+ /* Make sure the connector supports the API functions being tested. This test only
+ * uses a few API functions, such as H5Fcreate/open/close/delete, H5Dcreate/write/read/close,
+ * and H5Dget_space. */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAIN_PROCESS)
+ HDprintf(
+ "API functions for basic file, dataset basic or more aren't supported with this connector\n");
+
+ MPI_Finalize();
+ return 0;
+ }
+
+ dataset_big_write();
+ MPI_Barrier(MPI_COMM_WORLD);
+
+ dataset_big_read();
+ MPI_Barrier(MPI_COMM_WORLD);
+
+ coll_chunk1();
+ MPI_Barrier(MPI_COMM_WORLD);
+ coll_chunk2();
+ MPI_Barrier(MPI_COMM_WORLD);
+ coll_chunk3();
+ MPI_Barrier(MPI_COMM_WORLD);
+
+ single_rank_independent_io();
+
+ /* turn off alarm */
+ /* TestAlarmOff(); */
+
+ if (mpi_rank_g == 0) {
+ hid_t fapl_id = H5Pcreate(H5P_FILE_ACCESS);
+
+ H5Pset_fapl_mpio(fapl_id, MPI_COMM_SELF, MPI_INFO_NULL);
+
+ H5E_BEGIN_TRY
+ {
+ H5Fdelete(FILENAME[0], fapl_id);
+ H5Fdelete(FILENAME[1], fapl_id);
+ }
+ H5E_END_TRY;
+
+ H5Pclose(fapl_id);
+ }
+
+ H5Pclose(acc_plist);
+
+ /* close HDF5 library */
+ H5close();
+
+ MPI_Finalize();
+
+ return 0;
+}
diff --git a/testpar/API/t_chunk_alloc.c b/testpar/API/t_chunk_alloc.c
new file mode 100644
index 0000000..dd78225
--- /dev/null
+++ b/testpar/API/t_chunk_alloc.c
@@ -0,0 +1,512 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * This verifies if the storage space allocation methods are compatible between
+ * serial and parallel modes.
+ *
+ * Created by: Christian Chilan and Albert Cheng
+ * Date: 2006/05/25
+ */
+
+#include "hdf5.h"
+#include "testphdf5.h"
+static int mpi_size, mpi_rank;
+
+#define DSET_NAME "ExtendibleArray"
+#define CHUNK_SIZE 1000 /* #elements per chunk */
+#define CHUNK_FACTOR 200 /* default dataset size in terms of chunks */
+#define CLOSE 1
+#define NO_CLOSE 0
+
+#if 0
+static MPI_Offset
+get_filesize(const char *filename)
+{
+ int mpierr;
+ MPI_File fd;
+ MPI_Offset filesize;
+
+ mpierr = MPI_File_open(MPI_COMM_SELF, filename, MPI_MODE_RDONLY, MPI_INFO_NULL, &fd);
+ VRFY((mpierr == MPI_SUCCESS), "");
+
+ mpierr = MPI_File_get_size(fd, &filesize);
+ VRFY((mpierr == MPI_SUCCESS), "");
+
+ mpierr = MPI_File_close(&fd);
+ VRFY((mpierr == MPI_SUCCESS), "");
+
+ return (filesize);
+}
+#endif
+
+typedef enum write_pattern { none, sec_last, all } write_type;
+
+typedef enum access_ { write_all, open_only, extend_only } access_type;
+
+/*
+ * This creates a dataset serially with chunks, each of CHUNK_SIZE
+ * elements. The allocation time is set to H5D_ALLOC_TIME_EARLY. Another
+ * routine will open this in parallel for extension test.
+ */
+static void
+create_chunked_dataset(const char *filename, int chunk_factor, write_type write_pattern)
+{
+ hid_t file_id, dataset; /* handles */
+ hid_t dataspace, memspace;
+ hid_t cparms;
+ hsize_t dims[1];
+ hsize_t maxdims[1] = {H5S_UNLIMITED};
+
+ hsize_t chunk_dims[1] = {CHUNK_SIZE};
+ hsize_t count[1];
+ hsize_t stride[1];
+ hsize_t block[1];
+ hsize_t offset[1]; /* Selection offset within dataspace */
+ /* Variables used in reading data back */
+ char buffer[CHUNK_SIZE];
+ long nchunks;
+ herr_t hrc;
+#if 0
+ MPI_Offset filesize, /* actual file size */
+ est_filesize; /* estimated file size */
+#endif
+ /* set up MPI parameters */
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Only MAINPROCESS should create the file. Others just wait. */
+ if (MAINPROCESS) {
+ nchunks = chunk_factor * mpi_size;
+ dims[0] = (hsize_t)(nchunks * CHUNK_SIZE);
+ /* Create the data space with unlimited dimensions. */
+ dataspace = H5Screate_simple(1, dims, maxdims);
+ VRFY((dataspace >= 0), "");
+
+ memspace = H5Screate_simple(1, chunk_dims, NULL);
+ VRFY((memspace >= 0), "");
+
+ /* Create a new file. If file exists its contents will be overwritten. */
+ file_id = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((file_id >= 0), "H5Fcreate");
+
+ /* Modify dataset creation properties, i.e. enable chunking */
+ cparms = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY((cparms >= 0), "");
+
+ hrc = H5Pset_alloc_time(cparms, H5D_ALLOC_TIME_EARLY);
+ VRFY((hrc >= 0), "");
+
+ hrc = H5Pset_chunk(cparms, 1, chunk_dims);
+ VRFY((hrc >= 0), "");
+
+ /* Create a new dataset within the file using cparms creation properties. */
+ dataset =
+ H5Dcreate2(file_id, DSET_NAME, H5T_NATIVE_UCHAR, dataspace, H5P_DEFAULT, cparms, H5P_DEFAULT);
+ VRFY((dataset >= 0), "");
+
+ if (write_pattern == sec_last) {
+ HDmemset(buffer, 100, CHUNK_SIZE);
+
+ count[0] = 1;
+ stride[0] = 1;
+ block[0] = chunk_dims[0];
+ offset[0] = (hsize_t)(nchunks - 2) * chunk_dims[0];
+
+ hrc = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, offset, stride, count, block);
+ VRFY((hrc >= 0), "");
+
+ /* Write sec_last chunk */
+ hrc = H5Dwrite(dataset, H5T_NATIVE_UCHAR, memspace, dataspace, H5P_DEFAULT, buffer);
+ VRFY((hrc >= 0), "H5Dwrite");
+ } /* end if */
+
+ /* Close resources */
+ hrc = H5Dclose(dataset);
+ VRFY((hrc >= 0), "");
+ dataset = -1;
+
+ hrc = H5Sclose(dataspace);
+ VRFY((hrc >= 0), "");
+
+ hrc = H5Sclose(memspace);
+ VRFY((hrc >= 0), "");
+
+ hrc = H5Pclose(cparms);
+ VRFY((hrc >= 0), "");
+
+ hrc = H5Fclose(file_id);
+ VRFY((hrc >= 0), "");
+ file_id = -1;
+
+#if 0
+ /* verify file size */
+ filesize = get_filesize(filename);
+ est_filesize = (MPI_Offset)nchunks * (MPI_Offset)CHUNK_SIZE * (MPI_Offset)sizeof(unsigned char);
+ VRFY((filesize >= est_filesize), "file size check");
+#endif
+ }
+
+ /* Make sure all processes are done before exiting this routine. Otherwise,
+ * other tests may start and change the test data file before some processes
+ * of this test are still accessing the file.
+ */
+
+ MPI_Barrier(MPI_COMM_WORLD);
+}
+
+/*
+ * This program performs three different types of parallel access. It writes on
+ * the entire dataset, it extends the dataset to nchunks*CHUNK_SIZE, and it only
+ * opens the dataset. At the end, it verifies the size of the dataset to be
+ * consistent with argument 'chunk_factor'.
+ */
+static void
+parallel_access_dataset(const char *filename, int chunk_factor, access_type action, hid_t *file_id,
+ hid_t *dataset)
+{
+ /* HDF5 gubbins */
+ hid_t memspace, dataspace; /* HDF5 file identifier */
+ hid_t access_plist; /* HDF5 ID for file access property list */
+ herr_t hrc; /* HDF5 return code */
+ hsize_t size[1];
+
+ hsize_t chunk_dims[1] = {CHUNK_SIZE};
+ hsize_t count[1];
+ hsize_t stride[1];
+ hsize_t block[1];
+ hsize_t offset[1]; /* Selection offset within dataspace */
+ hsize_t dims[1];
+ hsize_t maxdims[1];
+
+ /* Variables used in reading data back */
+ char buffer[CHUNK_SIZE];
+ int i;
+ long nchunks;
+#if 0
+ /* MPI Gubbins */
+ MPI_Offset filesize, /* actual file size */
+ est_filesize; /* estimated file size */
+#endif
+
+ /* Initialize MPI */
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ nchunks = chunk_factor * mpi_size;
+
+ /* Set up MPIO file access property lists */
+ access_plist = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY((access_plist >= 0), "");
+
+ hrc = H5Pset_fapl_mpio(access_plist, MPI_COMM_WORLD, MPI_INFO_NULL);
+ VRFY((hrc >= 0), "");
+
+ /* Open the file */
+ if (*file_id < 0) {
+ *file_id = H5Fopen(filename, H5F_ACC_RDWR, access_plist);
+ VRFY((*file_id >= 0), "");
+ }
+
+ /* Open dataset*/
+ if (*dataset < 0) {
+ *dataset = H5Dopen2(*file_id, DSET_NAME, H5P_DEFAULT);
+ VRFY((*dataset >= 0), "");
+ }
+
+ /* Make sure all processes are done before continuing. Otherwise, one
+ * process could change the dataset extent before another finishes opening
+ * it, resulting in only some of the processes calling H5Dset_extent(). */
+ MPI_Barrier(MPI_COMM_WORLD);
+
+ memspace = H5Screate_simple(1, chunk_dims, NULL);
+ VRFY((memspace >= 0), "");
+
+ dataspace = H5Dget_space(*dataset);
+ VRFY((dataspace >= 0), "");
+
+ size[0] = (hsize_t)nchunks * CHUNK_SIZE;
+
+ switch (action) {
+
+ /* all chunks are written by all the processes in an interleaved way*/
+ case write_all:
+
+ HDmemset(buffer, mpi_rank + 1, CHUNK_SIZE);
+ count[0] = 1;
+ stride[0] = 1;
+ block[0] = chunk_dims[0];
+ for (i = 0; i < nchunks / mpi_size; i++) {
+ offset[0] = (hsize_t)(i * mpi_size + mpi_rank) * chunk_dims[0];
+
+ hrc = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, offset, stride, count, block);
+ VRFY((hrc >= 0), "");
+
+ /* Write the buffer out */
+ hrc = H5Dwrite(*dataset, H5T_NATIVE_UCHAR, memspace, dataspace, H5P_DEFAULT, buffer);
+ VRFY((hrc >= 0), "H5Dwrite");
+ }
+
+ break;
+
+ /* only extends the dataset */
+ case extend_only:
+ /* check if new size is larger than old size */
+ hrc = H5Sget_simple_extent_dims(dataspace, dims, maxdims);
+ VRFY((hrc >= 0), "");
+
+ /* Extend dataset*/
+ if (size[0] > dims[0]) {
+ hrc = H5Dset_extent(*dataset, size);
+ VRFY((hrc >= 0), "");
+ }
+ break;
+
+ /* only opens the *dataset */
+ case open_only:
+ break;
+ default:
+ HDassert(0);
+ }
+
+ /* Close up */
+ hrc = H5Dclose(*dataset);
+ VRFY((hrc >= 0), "");
+ *dataset = -1;
+
+ hrc = H5Sclose(dataspace);
+ VRFY((hrc >= 0), "");
+
+ hrc = H5Sclose(memspace);
+ VRFY((hrc >= 0), "");
+
+ hrc = H5Fclose(*file_id);
+ VRFY((hrc >= 0), "");
+ *file_id = -1;
+
+#if 0
+ /* verify file size */
+ filesize = get_filesize(filename);
+ est_filesize = (MPI_Offset)nchunks * (MPI_Offset)CHUNK_SIZE * (MPI_Offset)sizeof(unsigned char);
+ VRFY((filesize >= est_filesize), "file size check");
+#endif
+
+ /* Can close some plists */
+ hrc = H5Pclose(access_plist);
+ VRFY((hrc >= 0), "");
+
+ /* Make sure all processes are done before exiting this routine. Otherwise,
+ * other tests may start and change the test data file before some processes
+ * of this test are still accessing the file.
+ */
+ MPI_Barrier(MPI_COMM_WORLD);
+}
+
+/*
+ * This routine verifies the data written in the dataset. It does one of the
+ * three cases according to the value of parameter `write_pattern'.
+ * 1. it returns correct fill values though the dataset has not been written;
+ * 2. it still returns correct fill values though only a small part is written;
+ * 3. it returns correct values when the whole dataset has been written in an
+ * interleaved pattern.
+ */
+static void
+verify_data(const char *filename, int chunk_factor, write_type write_pattern, int vclose, hid_t *file_id,
+ hid_t *dataset)
+{
+ /* HDF5 gubbins */
+ hid_t dataspace, memspace; /* HDF5 file identifier */
+ hid_t access_plist; /* HDF5 ID for file access property list */
+ herr_t hrc; /* HDF5 return code */
+
+ hsize_t chunk_dims[1] = {CHUNK_SIZE};
+ hsize_t count[1];
+ hsize_t stride[1];
+ hsize_t block[1];
+ hsize_t offset[1]; /* Selection offset within dataspace */
+ /* Variables used in reading data back */
+ char buffer[CHUNK_SIZE];
+ int value, i;
+ int index_l;
+ long nchunks;
+ /* Initialize MPI */
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ nchunks = chunk_factor * mpi_size;
+
+ /* Set up MPIO file access property lists */
+ access_plist = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY((access_plist >= 0), "");
+
+ hrc = H5Pset_fapl_mpio(access_plist, MPI_COMM_WORLD, MPI_INFO_NULL);
+ VRFY((hrc >= 0), "");
+
+ /* Open the file */
+ if (*file_id < 0) {
+ *file_id = H5Fopen(filename, H5F_ACC_RDWR, access_plist);
+ VRFY((*file_id >= 0), "");
+ }
+
+ /* Open dataset*/
+ if (*dataset < 0) {
+ *dataset = H5Dopen2(*file_id, DSET_NAME, H5P_DEFAULT);
+ VRFY((*dataset >= 0), "");
+ }
+
+ memspace = H5Screate_simple(1, chunk_dims, NULL);
+ VRFY((memspace >= 0), "");
+
+ dataspace = H5Dget_space(*dataset);
+ VRFY((dataspace >= 0), "");
+
+ /* all processes check all chunks. */
+ count[0] = 1;
+ stride[0] = 1;
+ block[0] = chunk_dims[0];
+ for (i = 0; i < nchunks; i++) {
+ /* reset buffer values */
+ HDmemset(buffer, -1, CHUNK_SIZE);
+
+ offset[0] = (hsize_t)i * chunk_dims[0];
+
+ hrc = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, offset, stride, count, block);
+ VRFY((hrc >= 0), "");
+
+ /* Read the chunk */
+ hrc = H5Dread(*dataset, H5T_NATIVE_UCHAR, memspace, dataspace, H5P_DEFAULT, buffer);
+ VRFY((hrc >= 0), "H5Dread");
+
+ /* set expected value according the write pattern */
+ switch (write_pattern) {
+ case all:
+ value = i % mpi_size + 1;
+ break;
+ case none:
+ value = 0;
+ break;
+ case sec_last:
+ if (i == nchunks - 2)
+ value = 100;
+ else
+ value = 0;
+ break;
+ default:
+ HDassert(0);
+ }
+
+ /* verify content of the chunk */
+ for (index_l = 0; index_l < CHUNK_SIZE; index_l++)
+ VRFY((buffer[index_l] == value), "data verification");
+ }
+
+ hrc = H5Sclose(dataspace);
+ VRFY((hrc >= 0), "");
+
+ hrc = H5Sclose(memspace);
+ VRFY((hrc >= 0), "");
+
+ /* Can close some plists */
+ hrc = H5Pclose(access_plist);
+ VRFY((hrc >= 0), "");
+
+ /* Close up */
+ if (vclose) {
+ hrc = H5Dclose(*dataset);
+ VRFY((hrc >= 0), "");
+ *dataset = -1;
+
+ hrc = H5Fclose(*file_id);
+ VRFY((hrc >= 0), "");
+ *file_id = -1;
+ }
+
+ /* Make sure all processes are done before exiting this routine. Otherwise,
+ * other tests may start and change the test data file before some processes
+ * of this test are still accessing the file.
+ */
+ MPI_Barrier(MPI_COMM_WORLD);
+}
+
+/*
+ * Test following possible scenarios,
+ * Case 1:
+ * Sequential create a file and dataset with H5D_ALLOC_TIME_EARLY and large
+ * size, no write, close, reopen in parallel, read to verify all return
+ * the fill value.
+ * Case 2:
+ * Sequential create a file and dataset with H5D_ALLOC_TIME_EARLY but small
+ * size, no write, close, reopen in parallel, extend to large size, then close,
+ * then reopen in parallel and read to verify all return the fill value.
+ * Case 3:
+ * Sequential create a file and dataset with H5D_ALLOC_TIME_EARLY and large
+ * size, write just a small part of the dataset (second to the last), close,
+ * then reopen in parallel, read to verify all return the fill value except
+ * those small portion that has been written. Without closing it, writes
+ * all parts of the dataset in a interleave pattern, close it, and reopen
+ * it, read to verify all data are as written.
+ */
+void
+test_chunk_alloc(void)
+{
+ const char *filename;
+ hid_t file_id, dataset;
+
+ file_id = dataset = -1;
+
+ /* Initialize MPI */
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, dataset, or dataset more aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ filename = (const char *)PARATESTFILE /* GetTestParameters() */;
+ if (VERBOSE_MED)
+ HDprintf("Extend Chunked allocation test on file %s\n", filename);
+
+ /* Case 1 */
+ /* Create chunked dataset without writing anything.*/
+ create_chunked_dataset(filename, CHUNK_FACTOR, none);
+ /* reopen dataset in parallel and check for file size */
+ parallel_access_dataset(filename, CHUNK_FACTOR, open_only, &file_id, &dataset);
+ /* reopen dataset in parallel, read and verify the data */
+ verify_data(filename, CHUNK_FACTOR, none, CLOSE, &file_id, &dataset);
+
+ /* Case 2 */
+ /* Create chunked dataset without writing anything */
+ create_chunked_dataset(filename, 20, none);
+ /* reopen dataset in parallel and only extend it */
+ parallel_access_dataset(filename, CHUNK_FACTOR, extend_only, &file_id, &dataset);
+ /* reopen dataset in parallel, read and verify the data */
+ verify_data(filename, CHUNK_FACTOR, none, CLOSE, &file_id, &dataset);
+
+ /* Case 3 */
+ /* Create chunked dataset and write in the second to last chunk */
+ create_chunked_dataset(filename, CHUNK_FACTOR, sec_last);
+ /* Reopen dataset in parallel, read and verify the data. The file and dataset are not closed*/
+ verify_data(filename, CHUNK_FACTOR, sec_last, NO_CLOSE, &file_id, &dataset);
+ /* All processes write in all the chunks in a interleaved way */
+ parallel_access_dataset(filename, CHUNK_FACTOR, write_all, &file_id, &dataset);
+ /* reopen dataset in parallel, read and verify the data */
+ verify_data(filename, CHUNK_FACTOR, all, CLOSE, &file_id, &dataset);
+}
diff --git a/testpar/API/t_coll_chunk.c b/testpar/API/t_coll_chunk.c
new file mode 100644
index 0000000..57ee605
--- /dev/null
+++ b/testpar/API/t_coll_chunk.c
@@ -0,0 +1,1417 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include "hdf5.h"
+#include "testphdf5.h"
+
+#define HYPER 1
+#define POINT 2
+#define ALL 3
+
+/* some commonly used routines for collective chunk IO tests*/
+
+static void ccslab_set(int mpi_rank, int mpi_size, hsize_t start[], hsize_t count[], hsize_t stride[],
+ hsize_t block[], int mode);
+
+static void ccdataset_fill(hsize_t start[], hsize_t count[], hsize_t stride[], hsize_t block[],
+ DATATYPE *dataset, int mem_selection);
+
+static void ccdataset_print(hsize_t start[], hsize_t block[], DATATYPE *dataset);
+
+static int ccdataset_vrfy(hsize_t start[], hsize_t count[], hsize_t stride[], hsize_t block[],
+ DATATYPE *dataset, DATATYPE *original, int mem_selection);
+
+static void coll_chunktest(const char *filename, int chunk_factor, int select_factor, int api_option,
+ int file_selection, int mem_selection, int mode);
+
+/*-------------------------------------------------------------------------
+ * Function: coll_chunk1
+ *
+ * Purpose: Wrapper to test the collective chunk IO for regular JOINT
+ selection with a single chunk
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Unknown
+ * July 12th, 2004
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+
+/* ------------------------------------------------------------------------
+ * Descriptions for the selection: One big singular selection inside one chunk
+ * Two dimensions,
+ *
+ * dim1 = SPACE_DIM1(5760)*mpi_size
+ * dim2 = SPACE_DIM2(3)
+ * chunk_dim1 = dim1
+ * chunk_dim2 = dim2
+ * block = 1 for all dimensions
+ * stride = 1 for all dimensions
+ * count0 = SPACE_DIM1(5760)
+ * count1 = SPACE_DIM2(3)
+ * start0 = mpi_rank*SPACE_DIM1
+ * start1 = 0
+ * ------------------------------------------------------------------------
+ */
+
+void
+coll_chunk1(void)
+{
+ const char *filename = PARATESTFILE /* GetTestParameters() */;
+ int mpi_rank;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, dataset or dataset more aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ coll_chunktest(filename, 1, BYROW_CONT, API_NONE, HYPER, HYPER, OUT_OF_ORDER);
+ coll_chunktest(filename, 1, BYROW_CONT, API_NONE, HYPER, POINT, OUT_OF_ORDER);
+ coll_chunktest(filename, 1, BYROW_CONT, API_NONE, POINT, ALL, OUT_OF_ORDER);
+ coll_chunktest(filename, 1, BYROW_CONT, API_NONE, POINT, POINT, OUT_OF_ORDER);
+ coll_chunktest(filename, 1, BYROW_CONT, API_NONE, POINT, HYPER, OUT_OF_ORDER);
+
+ coll_chunktest(filename, 1, BYROW_CONT, API_NONE, POINT, ALL, IN_ORDER);
+ coll_chunktest(filename, 1, BYROW_CONT, API_NONE, POINT, POINT, IN_ORDER);
+ coll_chunktest(filename, 1, BYROW_CONT, API_NONE, POINT, HYPER, IN_ORDER);
+}
+
+/*-------------------------------------------------------------------------
+ * Function: coll_chunk2
+ *
+ * Purpose: Wrapper to test the collective chunk IO for regular DISJOINT
+ selection with a single chunk
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Unknown
+ * July 12th, 2004
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+
+/* ------------------------------------------------------------------------
+ * Descriptions for the selection: many disjoint selections inside one chunk
+ * Two dimensions,
+ *
+ * dim1 = SPACE_DIM1*mpi_size(5760)
+ * dim2 = SPACE_DIM2(3)
+ * chunk_dim1 = dim1
+ * chunk_dim2 = dim2
+ * block = 1 for all dimensions
+ * stride = 3 for all dimensions
+ * count0 = SPACE_DIM1/stride0(5760/3)
+ * count1 = SPACE_DIM2/stride(3/3 = 1)
+ * start0 = mpi_rank*SPACE_DIM1
+ * start1 = 0
+ *
+ * ------------------------------------------------------------------------
+ */
+void
+coll_chunk2(void)
+{
+ const char *filename = PARATESTFILE /* GetTestParameters() */;
+ int mpi_rank;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, dataset or dataset more aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ coll_chunktest(filename, 1, BYROW_DISCONT, API_NONE, HYPER, HYPER, OUT_OF_ORDER);
+ coll_chunktest(filename, 1, BYROW_DISCONT, API_NONE, HYPER, POINT, OUT_OF_ORDER);
+ coll_chunktest(filename, 1, BYROW_DISCONT, API_NONE, POINT, ALL, OUT_OF_ORDER);
+ coll_chunktest(filename, 1, BYROW_DISCONT, API_NONE, POINT, POINT, OUT_OF_ORDER);
+ coll_chunktest(filename, 1, BYROW_DISCONT, API_NONE, POINT, HYPER, OUT_OF_ORDER);
+
+ coll_chunktest(filename, 1, BYROW_DISCONT, API_NONE, POINT, ALL, IN_ORDER);
+ coll_chunktest(filename, 1, BYROW_DISCONT, API_NONE, POINT, POINT, IN_ORDER);
+ coll_chunktest(filename, 1, BYROW_DISCONT, API_NONE, POINT, HYPER, IN_ORDER);
+}
+
+/*-------------------------------------------------------------------------
+ * Function: coll_chunk3
+ *
+ * Purpose: Wrapper to test the collective chunk IO for regular JOINT
+ selection with at least number of 2*mpi_size chunks
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Unknown
+ * July 12th, 2004
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+
+/* ------------------------------------------------------------------------
+ * Descriptions for the selection: one singular selection across many chunks
+ * Two dimensions, Num of chunks = 2* mpi_size
+ *
+ * dim1 = SPACE_DIM1*mpi_size
+ * dim2 = SPACE_DIM2(3)
+ * chunk_dim1 = SPACE_DIM1
+ * chunk_dim2 = dim2/2
+ * block = 1 for all dimensions
+ * stride = 1 for all dimensions
+ * count0 = SPACE_DIM1
+ * count1 = SPACE_DIM2(3)
+ * start0 = mpi_rank*SPACE_DIM1
+ * start1 = 0
+ *
+ * ------------------------------------------------------------------------
+ */
+
+void
+coll_chunk3(void)
+{
+ const char *filename = PARATESTFILE /* GetTestParameters() */;
+ int mpi_size;
+ int mpi_rank;
+
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, dataset or dataset more aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ coll_chunktest(filename, mpi_size, BYROW_CONT, API_NONE, HYPER, HYPER, OUT_OF_ORDER);
+ coll_chunktest(filename, mpi_size, BYROW_CONT, API_NONE, HYPER, POINT, OUT_OF_ORDER);
+ coll_chunktest(filename, mpi_size, BYROW_CONT, API_NONE, POINT, ALL, OUT_OF_ORDER);
+ coll_chunktest(filename, mpi_size, BYROW_CONT, API_NONE, POINT, POINT, OUT_OF_ORDER);
+ coll_chunktest(filename, mpi_size, BYROW_CONT, API_NONE, POINT, HYPER, OUT_OF_ORDER);
+
+ coll_chunktest(filename, mpi_size, BYROW_CONT, API_NONE, POINT, ALL, IN_ORDER);
+ coll_chunktest(filename, mpi_size, BYROW_CONT, API_NONE, POINT, POINT, IN_ORDER);
+ coll_chunktest(filename, mpi_size, BYROW_CONT, API_NONE, POINT, HYPER, IN_ORDER);
+}
+
+/*-------------------------------------------------------------------------
+ * Function: coll_chunk4
+ *
+ * Purpose: Wrapper to test the collective chunk IO for regular JOINT
+ selection with at least number of 2*mpi_size chunks
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Unknown
+ * July 12th, 2004
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+
+/* ------------------------------------------------------------------------
+ * Descriptions for the selection: one singular selection across many chunks
+ * Two dimensions, Num of chunks = 2* mpi_size
+ *
+ * dim1 = SPACE_DIM1*mpi_size
+ * dim2 = SPACE_DIM2
+ * chunk_dim1 = dim1
+ * chunk_dim2 = dim2
+ * block = 1 for all dimensions
+ * stride = 1 for all dimensions
+ * count0 = SPACE_DIM1
+ * count1 = SPACE_DIM2(3)
+ * start0 = mpi_rank*SPACE_DIM1
+ * start1 = 0
+ *
+ * ------------------------------------------------------------------------
+ */
+
+void
+coll_chunk4(void)
+{
+ const char *filename = PARATESTFILE /* GetTestParameters() */;
+ int mpi_rank;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, dataset or dataset more aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ coll_chunktest(filename, 1, BYROW_SELECTNONE, API_NONE, HYPER, HYPER, OUT_OF_ORDER);
+ coll_chunktest(filename, 1, BYROW_SELECTNONE, API_NONE, HYPER, POINT, OUT_OF_ORDER);
+ coll_chunktest(filename, 1, BYROW_SELECTNONE, API_NONE, POINT, ALL, OUT_OF_ORDER);
+ coll_chunktest(filename, 1, BYROW_SELECTNONE, API_NONE, POINT, POINT, OUT_OF_ORDER);
+ coll_chunktest(filename, 1, BYROW_SELECTNONE, API_NONE, POINT, HYPER, OUT_OF_ORDER);
+
+ coll_chunktest(filename, 1, BYROW_SELECTNONE, API_NONE, POINT, ALL, IN_ORDER);
+ coll_chunktest(filename, 1, BYROW_SELECTNONE, API_NONE, POINT, POINT, IN_ORDER);
+ coll_chunktest(filename, 1, BYROW_SELECTNONE, API_NONE, POINT, HYPER, IN_ORDER);
+}
+
+/*-------------------------------------------------------------------------
+ * Function: coll_chunk4
+ *
+ * Purpose: Wrapper to test the collective chunk IO for regular JOINT
+ selection with at least number of 2*mpi_size chunks
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Unknown
+ * July 12th, 2004
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+
+/* ------------------------------------------------------------------------
+ * Descriptions for the selection: one singular selection across many chunks
+ * Two dimensions, Num of chunks = 2* mpi_size
+ *
+ * dim1 = SPACE_DIM1*mpi_size
+ * dim2 = SPACE_DIM2
+ * chunk_dim1 = dim1
+ * chunk_dim2 = dim2
+ * block = 1 for all dimensions
+ * stride = 1 for all dimensions
+ * count0 = SPACE_DIM1
+ * count1 = SPACE_DIM2(3)
+ * start0 = mpi_rank*SPACE_DIM1
+ * start1 = 0
+ *
+ * ------------------------------------------------------------------------
+ */
+
+void
+coll_chunk5(void)
+{
+ const char *filename = PARATESTFILE /* GetTestParameters() */;
+ int mpi_rank;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, dataset or dataset more aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_LINK_HARD, HYPER, HYPER, OUT_OF_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_LINK_HARD, HYPER, POINT, OUT_OF_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_LINK_HARD, POINT, ALL, OUT_OF_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_LINK_HARD, POINT, POINT, OUT_OF_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_LINK_HARD, POINT, HYPER, OUT_OF_ORDER);
+
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_LINK_HARD, POINT, ALL, IN_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_LINK_HARD, POINT, POINT, IN_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_LINK_HARD, POINT, HYPER, IN_ORDER);
+}
+
+/*-------------------------------------------------------------------------
+ * Function: coll_chunk6
+ *
+ * Purpose: Test direct request for multi-chunk-io.
+ * Wrapper to test the collective chunk IO for regular JOINT
+ * selection with at least number of 2*mpi_size chunks
+ * Test for direct to Multi Chunk I/O.
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Unknown
+ * July 12th, 2004
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+
+/* ------------------------------------------------------------------------
+ * Descriptions for the selection: one singular selection across many chunks
+ * Two dimensions, Num of chunks = 2* mpi_size
+ *
+ * dim1 = SPACE_DIM1*mpi_size
+ * dim2 = SPACE_DIM2
+ * chunk_dim1 = dim1
+ * chunk_dim2 = dim2
+ * block = 1 for all dimensions
+ * stride = 1 for all dimensions
+ * count0 = SPACE_DIM1
+ * count1 = SPACE_DIM2(3)
+ * start0 = mpi_rank*SPACE_DIM1
+ * start1 = 0
+ *
+ * ------------------------------------------------------------------------
+ */
+
+void
+coll_chunk6(void)
+{
+ const char *filename = PARATESTFILE /* GetTestParameters() */;
+ int mpi_rank;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, dataset or dataset more aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_MULTI_HARD, HYPER, HYPER, OUT_OF_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_MULTI_HARD, HYPER, POINT, OUT_OF_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_MULTI_HARD, POINT, ALL, OUT_OF_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_MULTI_HARD, POINT, POINT, OUT_OF_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_MULTI_HARD, POINT, HYPER, OUT_OF_ORDER);
+
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_MULTI_HARD, POINT, ALL, IN_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_MULTI_HARD, POINT, POINT, IN_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_MULTI_HARD, POINT, HYPER, IN_ORDER);
+}
+
+/*-------------------------------------------------------------------------
+ * Function: coll_chunk7
+ *
+ * Purpose: Wrapper to test the collective chunk IO for regular JOINT
+ selection with at least number of 2*mpi_size chunks
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Unknown
+ * July 12th, 2004
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+
+/* ------------------------------------------------------------------------
+ * Descriptions for the selection: one singular selection across many chunks
+ * Two dimensions, Num of chunks = 2* mpi_size
+ *
+ * dim1 = SPACE_DIM1*mpi_size
+ * dim2 = SPACE_DIM2
+ * chunk_dim1 = dim1
+ * chunk_dim2 = dim2
+ * block = 1 for all dimensions
+ * stride = 1 for all dimensions
+ * count0 = SPACE_DIM1
+ * count1 = SPACE_DIM2(3)
+ * start0 = mpi_rank*SPACE_DIM1
+ * start1 = 0
+ *
+ * ------------------------------------------------------------------------
+ */
+
+void
+coll_chunk7(void)
+{
+ const char *filename = PARATESTFILE /* GetTestParameters() */;
+ int mpi_rank;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, dataset or dataset more aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_LINK_TRUE, HYPER, HYPER, OUT_OF_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_LINK_TRUE, HYPER, POINT, OUT_OF_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_LINK_TRUE, POINT, ALL, OUT_OF_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_LINK_TRUE, POINT, POINT, OUT_OF_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_LINK_TRUE, POINT, HYPER, OUT_OF_ORDER);
+
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_LINK_TRUE, POINT, ALL, IN_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_LINK_TRUE, POINT, POINT, IN_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_LINK_TRUE, POINT, HYPER, IN_ORDER);
+}
+
+/*-------------------------------------------------------------------------
+ * Function: coll_chunk8
+ *
+ * Purpose: Wrapper to test the collective chunk IO for regular JOINT
+ selection with at least number of 2*mpi_size chunks
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Unknown
+ * July 12th, 2004
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+
+/* ------------------------------------------------------------------------
+ * Descriptions for the selection: one singular selection across many chunks
+ * Two dimensions, Num of chunks = 2* mpi_size
+ *
+ * dim1 = SPACE_DIM1*mpi_size
+ * dim2 = SPACE_DIM2
+ * chunk_dim1 = dim1
+ * chunk_dim2 = dim2
+ * block = 1 for all dimensions
+ * stride = 1 for all dimensions
+ * count0 = SPACE_DIM1
+ * count1 = SPACE_DIM2(3)
+ * start0 = mpi_rank*SPACE_DIM1
+ * start1 = 0
+ *
+ * ------------------------------------------------------------------------
+ */
+
+void
+coll_chunk8(void)
+{
+ const char *filename = PARATESTFILE /* GetTestParameters() */;
+ int mpi_rank;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, dataset or dataset more aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_LINK_FALSE, HYPER, HYPER, OUT_OF_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_LINK_FALSE, HYPER, POINT, OUT_OF_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_LINK_FALSE, POINT, ALL, OUT_OF_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_LINK_FALSE, POINT, POINT, OUT_OF_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_LINK_FALSE, POINT, HYPER, OUT_OF_ORDER);
+
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_LINK_FALSE, POINT, ALL, IN_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_LINK_FALSE, POINT, POINT, IN_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_LINK_FALSE, POINT, HYPER, IN_ORDER);
+}
+
+/*-------------------------------------------------------------------------
+ * Function: coll_chunk9
+ *
+ * Purpose: Wrapper to test the collective chunk IO for regular JOINT
+ selection with at least number of 2*mpi_size chunks
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Unknown
+ * July 12th, 2004
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+
+/* ------------------------------------------------------------------------
+ * Descriptions for the selection: one singular selection across many chunks
+ * Two dimensions, Num of chunks = 2* mpi_size
+ *
+ * dim1 = SPACE_DIM1*mpi_size
+ * dim2 = SPACE_DIM2
+ * chunk_dim1 = dim1
+ * chunk_dim2 = dim2
+ * block = 1 for all dimensions
+ * stride = 1 for all dimensions
+ * count0 = SPACE_DIM1
+ * count1 = SPACE_DIM2(3)
+ * start0 = mpi_rank*SPACE_DIM1
+ * start1 = 0
+ *
+ * ------------------------------------------------------------------------
+ */
+
+void
+coll_chunk9(void)
+{
+ const char *filename = PARATESTFILE /* GetTestParameters() */;
+ int mpi_rank;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, dataset or dataset more aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_MULTI_COLL, HYPER, HYPER, OUT_OF_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_MULTI_COLL, HYPER, POINT, OUT_OF_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_MULTI_COLL, POINT, ALL, OUT_OF_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_MULTI_COLL, POINT, POINT, OUT_OF_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_MULTI_COLL, POINT, HYPER, OUT_OF_ORDER);
+
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_MULTI_COLL, POINT, ALL, IN_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_MULTI_COLL, POINT, POINT, IN_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTUNBALANCE, API_MULTI_COLL, POINT, HYPER, IN_ORDER);
+}
+
+/*-------------------------------------------------------------------------
+ * Function: coll_chunk10
+ *
+ * Purpose: Wrapper to test the collective chunk IO for regular JOINT
+ selection with at least number of 2*mpi_size chunks
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Unknown
+ * July 12th, 2004
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+
+/* ------------------------------------------------------------------------
+ * Descriptions for the selection: one singular selection across many chunks
+ * Two dimensions, Num of chunks = 2* mpi_size
+ *
+ * dim1 = SPACE_DIM1*mpi_size
+ * dim2 = SPACE_DIM2
+ * chunk_dim1 = dim1
+ * chunk_dim2 = dim2
+ * block = 1 for all dimensions
+ * stride = 1 for all dimensions
+ * count0 = SPACE_DIM1
+ * count1 = SPACE_DIM2(3)
+ * start0 = mpi_rank*SPACE_DIM1
+ * start1 = 0
+ *
+ * ------------------------------------------------------------------------
+ */
+
+void
+coll_chunk10(void)
+{
+ const char *filename = PARATESTFILE /* GetTestParameters() */;
+ int mpi_rank;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, dataset or dataset more aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ coll_chunktest(filename, 4, BYROW_SELECTINCHUNK, API_MULTI_IND, HYPER, HYPER, OUT_OF_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTINCHUNK, API_MULTI_IND, HYPER, POINT, OUT_OF_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTINCHUNK, API_MULTI_IND, POINT, ALL, OUT_OF_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTINCHUNK, API_MULTI_IND, POINT, POINT, OUT_OF_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTINCHUNK, API_MULTI_IND, POINT, HYPER, OUT_OF_ORDER);
+
+ coll_chunktest(filename, 4, BYROW_SELECTINCHUNK, API_MULTI_IND, POINT, ALL, IN_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTINCHUNK, API_MULTI_IND, POINT, POINT, IN_ORDER);
+ coll_chunktest(filename, 4, BYROW_SELECTINCHUNK, API_MULTI_IND, POINT, HYPER, IN_ORDER);
+}
+
+/*-------------------------------------------------------------------------
+ * Function: coll_chunktest
+ *
+ * Purpose: The real testing routine for regular selection of collective
+ chunking storage
+ testing both write and read,
+ If anything fails, it may be read or write. There is no
+ separation test between read and write.
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Modifications:
+ * Remove invalid temporary property checkings for API_LINK_HARD and
+ * API_LINK_TRUE cases.
+ * Programmer: Jonathan Kim
+ * Date: 2012-10-10
+ *
+ * Programmer: Unknown
+ * July 12th, 2004
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+
+static void
+coll_chunktest(const char *filename, int chunk_factor, int select_factor, int api_option, int file_selection,
+ int mem_selection, int mode)
+{
+ hid_t file, dataset, file_dataspace, mem_dataspace;
+ hid_t acc_plist, xfer_plist, crp_plist;
+
+ hsize_t dims[RANK], chunk_dims[RANK];
+ int *data_array1 = NULL;
+ int *data_origin1 = NULL;
+
+ hsize_t start[RANK], count[RANK], stride[RANK], block[RANK];
+
+#ifdef H5_HAVE_INSTRUMENTED_LIBRARY
+ unsigned prop_value;
+#endif /* H5_HAVE_INSTRUMENTED_LIBRARY */
+
+ int mpi_size, mpi_rank;
+
+ herr_t status;
+ MPI_Comm comm = MPI_COMM_WORLD;
+ MPI_Info info = MPI_INFO_NULL;
+
+ size_t num_points; /* for point selection */
+ hsize_t *coords = NULL; /* for point selection */
+ hsize_t current_dims; /* for point selection */
+
+ /* set up MPI parameters */
+ MPI_Comm_size(comm, &mpi_size);
+ MPI_Comm_rank(comm, &mpi_rank);
+
+ /* Create the data space */
+
+ acc_plist = create_faccess_plist(comm, info, facc_type);
+ VRFY((acc_plist >= 0), "");
+
+ file = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, acc_plist);
+ VRFY((file >= 0), "H5Fcreate succeeded");
+
+ status = H5Pclose(acc_plist);
+ VRFY((status >= 0), "");
+
+ /* setup dimensionality object */
+ dims[0] = (hsize_t)(SPACE_DIM1 * mpi_size);
+ dims[1] = SPACE_DIM2;
+
+ /* allocate memory for data buffer */
+ data_array1 = (int *)HDmalloc(dims[0] * dims[1] * sizeof(int));
+ VRFY((data_array1 != NULL), "data_array1 malloc succeeded");
+
+ /* set up dimensions of the slab this process accesses */
+ ccslab_set(mpi_rank, mpi_size, start, count, stride, block, select_factor);
+
+ /* set up the coords array selection */
+ num_points = block[0] * block[1] * count[0] * count[1];
+ coords = (hsize_t *)HDmalloc(num_points * RANK * sizeof(hsize_t));
+ VRFY((coords != NULL), "coords malloc succeeded");
+ point_set(start, count, stride, block, num_points, coords, mode);
+
+ file_dataspace = H5Screate_simple(2, dims, NULL);
+ VRFY((file_dataspace >= 0), "file dataspace created succeeded");
+
+ if (ALL != mem_selection) {
+ mem_dataspace = H5Screate_simple(2, dims, NULL);
+ VRFY((mem_dataspace >= 0), "mem dataspace created succeeded");
+ }
+ else {
+ current_dims = num_points;
+ mem_dataspace = H5Screate_simple(1, &current_dims, NULL);
+ VRFY((mem_dataspace >= 0), "mem_dataspace create succeeded");
+ }
+
+ crp_plist = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY((crp_plist >= 0), "");
+
+ /* Set up chunk information. */
+ chunk_dims[0] = dims[0] / (hsize_t)chunk_factor;
+
+ /* to decrease the testing time, maintain bigger chunk size */
+ (chunk_factor == 1) ? (chunk_dims[1] = SPACE_DIM2) : (chunk_dims[1] = SPACE_DIM2 / 2);
+ status = H5Pset_chunk(crp_plist, 2, chunk_dims);
+ VRFY((status >= 0), "chunk creation property list succeeded");
+
+ dataset = H5Dcreate2(file, DSET_COLLECTIVE_CHUNK_NAME, H5T_NATIVE_INT, file_dataspace, H5P_DEFAULT,
+ crp_plist, H5P_DEFAULT);
+ VRFY((dataset >= 0), "dataset created succeeded");
+
+ status = H5Pclose(crp_plist);
+ VRFY((status >= 0), "");
+
+ /*put some trivial data in the data array */
+ ccdataset_fill(start, stride, count, block, data_array1, mem_selection);
+
+ MESG("data_array initialized");
+
+ switch (file_selection) {
+ case HYPER:
+ status = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((status >= 0), "hyperslab selection succeeded");
+ break;
+
+ case POINT:
+ if (num_points) {
+ status = H5Sselect_elements(file_dataspace, H5S_SELECT_SET, num_points, coords);
+ VRFY((status >= 0), "Element selection succeeded");
+ }
+ else {
+ status = H5Sselect_none(file_dataspace);
+ VRFY((status >= 0), "none selection succeeded");
+ }
+ break;
+
+ case ALL:
+ status = H5Sselect_all(file_dataspace);
+ VRFY((status >= 0), "H5Sselect_all succeeded");
+ break;
+ }
+
+ switch (mem_selection) {
+ case HYPER:
+ status = H5Sselect_hyperslab(mem_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((status >= 0), "hyperslab selection succeeded");
+ break;
+
+ case POINT:
+ if (num_points) {
+ status = H5Sselect_elements(mem_dataspace, H5S_SELECT_SET, num_points, coords);
+ VRFY((status >= 0), "Element selection succeeded");
+ }
+ else {
+ status = H5Sselect_none(mem_dataspace);
+ VRFY((status >= 0), "none selection succeeded");
+ }
+ break;
+
+ case ALL:
+ status = H5Sselect_all(mem_dataspace);
+ VRFY((status >= 0), "H5Sselect_all succeeded");
+ break;
+ }
+
+ /* set up the collective transfer property list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((xfer_plist >= 0), "");
+
+ status = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY((status >= 0), "MPIO collective transfer property succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ status = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((status >= 0), "set independent IO collectively succeeded");
+ }
+
+ switch (api_option) {
+ case API_LINK_HARD:
+ status = H5Pset_dxpl_mpio_chunk_opt(xfer_plist, H5FD_MPIO_CHUNK_ONE_IO);
+ VRFY((status >= 0), "collective chunk optimization succeeded");
+ break;
+
+ case API_MULTI_HARD:
+ status = H5Pset_dxpl_mpio_chunk_opt(xfer_plist, H5FD_MPIO_CHUNK_MULTI_IO);
+ VRFY((status >= 0), "collective chunk optimization succeeded ");
+ break;
+
+ case API_LINK_TRUE:
+ status = H5Pset_dxpl_mpio_chunk_opt_num(xfer_plist, 2);
+ VRFY((status >= 0), "collective chunk optimization set chunk number succeeded");
+ break;
+
+ case API_LINK_FALSE:
+ status = H5Pset_dxpl_mpio_chunk_opt_num(xfer_plist, 6);
+ VRFY((status >= 0), "collective chunk optimization set chunk number succeeded");
+ break;
+
+ case API_MULTI_COLL:
+ status = H5Pset_dxpl_mpio_chunk_opt_num(xfer_plist, 8); /* make sure it is using multi-chunk IO */
+ VRFY((status >= 0), "collective chunk optimization set chunk number succeeded");
+ status = H5Pset_dxpl_mpio_chunk_opt_ratio(xfer_plist, 50);
+ VRFY((status >= 0), "collective chunk optimization set chunk ratio succeeded");
+ break;
+
+ case API_MULTI_IND:
+ status = H5Pset_dxpl_mpio_chunk_opt_num(xfer_plist, 8); /* make sure it is using multi-chunk IO */
+ VRFY((status >= 0), "collective chunk optimization set chunk number succeeded");
+ status = H5Pset_dxpl_mpio_chunk_opt_ratio(xfer_plist, 100);
+ VRFY((status >= 0), "collective chunk optimization set chunk ratio succeeded");
+ break;
+
+ default:;
+ }
+
+#ifdef H5_HAVE_INSTRUMENTED_LIBRARY
+ if (facc_type == FACC_MPIO) {
+ switch (api_option) {
+ case API_LINK_HARD:
+ prop_value = H5D_XFER_COLL_CHUNK_DEF;
+ status = H5Pinsert2(xfer_plist, H5D_XFER_COLL_CHUNK_LINK_HARD_NAME, H5D_XFER_COLL_CHUNK_SIZE,
+ &prop_value, NULL, NULL, NULL, NULL, NULL, NULL);
+ VRFY((status >= 0), "testing property list inserted succeeded");
+ break;
+
+ case API_MULTI_HARD:
+ prop_value = H5D_XFER_COLL_CHUNK_DEF;
+ status = H5Pinsert2(xfer_plist, H5D_XFER_COLL_CHUNK_MULTI_HARD_NAME, H5D_XFER_COLL_CHUNK_SIZE,
+ &prop_value, NULL, NULL, NULL, NULL, NULL, NULL);
+ VRFY((status >= 0), "testing property list inserted succeeded");
+ break;
+
+ case API_LINK_TRUE:
+ prop_value = H5D_XFER_COLL_CHUNK_DEF;
+ status =
+ H5Pinsert2(xfer_plist, H5D_XFER_COLL_CHUNK_LINK_NUM_TRUE_NAME, H5D_XFER_COLL_CHUNK_SIZE,
+ &prop_value, NULL, NULL, NULL, NULL, NULL, NULL);
+ VRFY((status >= 0), "testing property list inserted succeeded");
+ break;
+
+ case API_LINK_FALSE:
+ prop_value = H5D_XFER_COLL_CHUNK_DEF;
+ status =
+ H5Pinsert2(xfer_plist, H5D_XFER_COLL_CHUNK_LINK_NUM_FALSE_NAME, H5D_XFER_COLL_CHUNK_SIZE,
+ &prop_value, NULL, NULL, NULL, NULL, NULL, NULL);
+ VRFY((status >= 0), "testing property list inserted succeeded");
+ break;
+
+ case API_MULTI_COLL:
+ prop_value = H5D_XFER_COLL_CHUNK_DEF;
+ status =
+ H5Pinsert2(xfer_plist, H5D_XFER_COLL_CHUNK_MULTI_RATIO_COLL_NAME,
+ H5D_XFER_COLL_CHUNK_SIZE, &prop_value, NULL, NULL, NULL, NULL, NULL, NULL);
+ VRFY((status >= 0), "testing property list inserted succeeded");
+ break;
+
+ case API_MULTI_IND:
+ prop_value = H5D_XFER_COLL_CHUNK_DEF;
+ status =
+ H5Pinsert2(xfer_plist, H5D_XFER_COLL_CHUNK_MULTI_RATIO_IND_NAME, H5D_XFER_COLL_CHUNK_SIZE,
+ &prop_value, NULL, NULL, NULL, NULL, NULL, NULL);
+ VRFY((status >= 0), "testing property list inserted succeeded");
+ break;
+
+ default:;
+ }
+ }
+#endif
+
+ /* write data collectively */
+ status = H5Dwrite(dataset, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1);
+ VRFY((status >= 0), "dataset write succeeded");
+
+#ifdef H5_HAVE_INSTRUMENTED_LIBRARY
+ /* Only check chunk optimization mode if selection I/O is not being used -
+ * selection I/O bypasses this IO mode decision - it's effectively always
+ * multi chunk currently */
+ if (facc_type == FACC_MPIO && /* !H5_use_selection_io_g */ TRUE) {
+ switch (api_option) {
+ case API_LINK_HARD:
+ status = H5Pget(xfer_plist, H5D_XFER_COLL_CHUNK_LINK_HARD_NAME, &prop_value);
+ VRFY((status >= 0), "testing property list get succeeded");
+ VRFY((prop_value == 0), "API to set LINK COLLECTIVE IO directly succeeded");
+ break;
+
+ case API_MULTI_HARD:
+ status = H5Pget(xfer_plist, H5D_XFER_COLL_CHUNK_MULTI_HARD_NAME, &prop_value);
+ VRFY((status >= 0), "testing property list get succeeded");
+ VRFY((prop_value == 0), "API to set MULTI-CHUNK COLLECTIVE IO optimization succeeded");
+ break;
+
+ case API_LINK_TRUE:
+ status = H5Pget(xfer_plist, H5D_XFER_COLL_CHUNK_LINK_NUM_TRUE_NAME, &prop_value);
+ VRFY((status >= 0), "testing property list get succeeded");
+ VRFY((prop_value == 0), "API to set LINK COLLECTIVE IO succeeded");
+ break;
+
+ case API_LINK_FALSE:
+ status = H5Pget(xfer_plist, H5D_XFER_COLL_CHUNK_LINK_NUM_FALSE_NAME, &prop_value);
+ VRFY((status >= 0), "testing property list get succeeded");
+ VRFY((prop_value == 0), "API to set LINK IO transferring to multi-chunk IO succeeded");
+ break;
+
+ case API_MULTI_COLL:
+ status = H5Pget(xfer_plist, H5D_XFER_COLL_CHUNK_MULTI_RATIO_COLL_NAME, &prop_value);
+ VRFY((status >= 0), "testing property list get succeeded");
+ VRFY((prop_value == 0), "API to set MULTI-CHUNK COLLECTIVE IO with optimization succeeded");
+ break;
+
+ case API_MULTI_IND:
+ status = H5Pget(xfer_plist, H5D_XFER_COLL_CHUNK_MULTI_RATIO_IND_NAME, &prop_value);
+ VRFY((status >= 0), "testing property list get succeeded");
+ VRFY((prop_value == 0),
+ "API to set MULTI-CHUNK IO transferring to independent IO succeeded");
+ break;
+
+ default:;
+ }
+ }
+#endif
+
+ status = H5Dclose(dataset);
+ VRFY((status >= 0), "");
+
+ status = H5Pclose(xfer_plist);
+ VRFY((status >= 0), "property list closed");
+
+ status = H5Sclose(file_dataspace);
+ VRFY((status >= 0), "");
+
+ status = H5Sclose(mem_dataspace);
+ VRFY((status >= 0), "");
+
+ status = H5Fclose(file);
+ VRFY((status >= 0), "");
+
+ if (data_array1)
+ HDfree(data_array1);
+
+ /* Use collective read to verify the correctness of collective write. */
+
+ /* allocate memory for data buffer */
+ data_array1 = (int *)HDmalloc(dims[0] * dims[1] * sizeof(int));
+ VRFY((data_array1 != NULL), "data_array1 malloc succeeded");
+
+ /* allocate memory for data buffer */
+ data_origin1 = (int *)HDmalloc(dims[0] * dims[1] * sizeof(int));
+ VRFY((data_origin1 != NULL), "data_origin1 malloc succeeded");
+
+ acc_plist = create_faccess_plist(comm, info, facc_type);
+ VRFY((acc_plist >= 0), "MPIO creation property list succeeded");
+
+ file = H5Fopen(filename, H5F_ACC_RDONLY, acc_plist);
+ VRFY((file >= 0), "H5Fcreate succeeded");
+
+ status = H5Pclose(acc_plist);
+ VRFY((status >= 0), "");
+
+ /* open the collective dataset*/
+ dataset = H5Dopen2(file, DSET_COLLECTIVE_CHUNK_NAME, H5P_DEFAULT);
+ VRFY((dataset >= 0), "");
+
+ /* set up dimensions of the slab this process accesses */
+ ccslab_set(mpi_rank, mpi_size, start, count, stride, block, select_factor);
+
+ /* obtain the file and mem dataspace*/
+ file_dataspace = H5Dget_space(dataset);
+ VRFY((file_dataspace >= 0), "");
+
+ if (ALL != mem_selection) {
+ mem_dataspace = H5Dget_space(dataset);
+ VRFY((mem_dataspace >= 0), "");
+ }
+ else {
+ current_dims = num_points;
+ mem_dataspace = H5Screate_simple(1, &current_dims, NULL);
+ VRFY((mem_dataspace >= 0), "mem_dataspace create succeeded");
+ }
+
+ switch (file_selection) {
+ case HYPER:
+ status = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((status >= 0), "hyperslab selection succeeded");
+ break;
+
+ case POINT:
+ if (num_points) {
+ status = H5Sselect_elements(file_dataspace, H5S_SELECT_SET, num_points, coords);
+ VRFY((status >= 0), "Element selection succeeded");
+ }
+ else {
+ status = H5Sselect_none(file_dataspace);
+ VRFY((status >= 0), "none selection succeeded");
+ }
+ break;
+
+ case ALL:
+ status = H5Sselect_all(file_dataspace);
+ VRFY((status >= 0), "H5Sselect_all succeeded");
+ break;
+ }
+
+ switch (mem_selection) {
+ case HYPER:
+ status = H5Sselect_hyperslab(mem_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((status >= 0), "hyperslab selection succeeded");
+ break;
+
+ case POINT:
+ if (num_points) {
+ status = H5Sselect_elements(mem_dataspace, H5S_SELECT_SET, num_points, coords);
+ VRFY((status >= 0), "Element selection succeeded");
+ }
+ else {
+ status = H5Sselect_none(mem_dataspace);
+ VRFY((status >= 0), "none selection succeeded");
+ }
+ break;
+
+ case ALL:
+ status = H5Sselect_all(mem_dataspace);
+ VRFY((status >= 0), "H5Sselect_all succeeded");
+ break;
+ }
+
+ /* fill dataset with test data */
+ ccdataset_fill(start, stride, count, block, data_origin1, mem_selection);
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((xfer_plist >= 0), "");
+
+ status = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY((status >= 0), "MPIO collective transfer property succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ status = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((status >= 0), "set independent IO collectively succeeded");
+ }
+
+ status = H5Dread(dataset, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1);
+ VRFY((status >= 0), "dataset read succeeded");
+
+ /* verify the read data with original expected data */
+ status = ccdataset_vrfy(start, count, stride, block, data_array1, data_origin1, mem_selection);
+ if (status)
+ nerrors++;
+
+ status = H5Pclose(xfer_plist);
+ VRFY((status >= 0), "property list closed");
+
+ /* close dataset collectively */
+ status = H5Dclose(dataset);
+ VRFY((status >= 0), "H5Dclose");
+
+ /* release all IDs created */
+ status = H5Sclose(file_dataspace);
+ VRFY((status >= 0), "H5Sclose");
+
+ status = H5Sclose(mem_dataspace);
+ VRFY((status >= 0), "H5Sclose");
+
+ /* close the file collectively */
+ status = H5Fclose(file);
+ VRFY((status >= 0), "H5Fclose");
+
+ /* release data buffers */
+ if (coords)
+ HDfree(coords);
+ if (data_array1)
+ HDfree(data_array1);
+ if (data_origin1)
+ HDfree(data_origin1);
+}
+
+/* Set up the selection */
+static void
+ccslab_set(int mpi_rank, int mpi_size, hsize_t start[], hsize_t count[], hsize_t stride[], hsize_t block[],
+ int mode)
+{
+
+ switch (mode) {
+
+ case BYROW_CONT:
+ /* Each process takes a slabs of rows. */
+ block[0] = 1;
+ block[1] = 1;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = SPACE_DIM1;
+ count[1] = SPACE_DIM2;
+ start[0] = (hsize_t)mpi_rank * count[0];
+ start[1] = 0;
+
+ break;
+
+ case BYROW_DISCONT:
+ /* Each process takes several disjoint blocks. */
+ block[0] = 1;
+ block[1] = 1;
+ stride[0] = 3;
+ stride[1] = 3;
+ count[0] = SPACE_DIM1 / (stride[0] * block[0]);
+ count[1] = (SPACE_DIM2) / (stride[1] * block[1]);
+ start[0] = (hsize_t)SPACE_DIM1 * (hsize_t)mpi_rank;
+ start[1] = 0;
+
+ break;
+
+ case BYROW_SELECTNONE:
+ /* Each process takes a slabs of rows, there are
+ no selections for the last process. */
+ block[0] = 1;
+ block[1] = 1;
+ stride[0] = 1;
+ stride[1] = 1;
+ count[0] = ((mpi_rank >= MAX(1, (mpi_size - 2))) ? 0 : SPACE_DIM1);
+ count[1] = SPACE_DIM2;
+ start[0] = (hsize_t)mpi_rank * count[0];
+ start[1] = 0;
+
+ break;
+
+ case BYROW_SELECTUNBALANCE:
+ /* The first one-third of the number of processes only
+ select top half of the domain, The rest will select the bottom
+ half of the domain. */
+
+ block[0] = 1;
+ count[0] = 2;
+ stride[0] = (hsize_t)SPACE_DIM1 * (hsize_t)mpi_size / 4 + 1;
+ block[1] = SPACE_DIM2;
+ count[1] = 1;
+ start[1] = 0;
+ stride[1] = 1;
+ if ((mpi_rank * 3) < (mpi_size * 2))
+ start[0] = (hsize_t)mpi_rank;
+ else
+ start[0] = (hsize_t)(1 + SPACE_DIM1 * mpi_size / 2 + (mpi_rank - 2 * mpi_size / 3));
+ break;
+
+ case BYROW_SELECTINCHUNK:
+ /* Each process will only select one chunk */
+
+ block[0] = 1;
+ count[0] = 1;
+ start[0] = (hsize_t)(mpi_rank * SPACE_DIM1);
+ stride[0] = 1;
+ block[1] = SPACE_DIM2;
+ count[1] = 1;
+ stride[1] = 1;
+ start[1] = 0;
+
+ break;
+
+ default:
+ /* Unknown mode. Set it to cover the whole dataset. */
+ block[0] = (hsize_t)SPACE_DIM1 * (hsize_t)mpi_size;
+ block[1] = SPACE_DIM2;
+ stride[0] = block[0];
+ stride[1] = block[1];
+ count[0] = 1;
+ count[1] = 1;
+ start[0] = 0;
+ start[1] = 0;
+
+ break;
+ }
+ if (VERBOSE_MED) {
+ HDprintf("start[]=(%lu,%lu), count[]=(%lu,%lu), stride[]=(%lu,%lu), block[]=(%lu,%lu), total "
+ "datapoints=%lu\n",
+ (unsigned long)start[0], (unsigned long)start[1], (unsigned long)count[0],
+ (unsigned long)count[1], (unsigned long)stride[0], (unsigned long)stride[1],
+ (unsigned long)block[0], (unsigned long)block[1],
+ (unsigned long)(block[0] * block[1] * count[0] * count[1]));
+ }
+}
+
+/*
+ * Fill the dataset with trivial data for testing.
+ * Assume dimension rank is 2.
+ */
+static void
+ccdataset_fill(hsize_t start[], hsize_t stride[], hsize_t count[], hsize_t block[], DATATYPE *dataset,
+ int mem_selection)
+{
+ DATATYPE *dataptr = dataset;
+ DATATYPE *tmptr;
+ hsize_t i, j, k1, k2, k = 0;
+ /* put some trivial data in the data_array */
+ tmptr = dataptr;
+
+ /* assign the disjoint block (two-dimensional)data array value
+ through the pointer */
+
+ for (k1 = 0; k1 < count[0]; k1++) {
+ for (i = 0; i < block[0]; i++) {
+ for (k2 = 0; k2 < count[1]; k2++) {
+ for (j = 0; j < block[1]; j++) {
+
+ if (ALL != mem_selection) {
+ dataptr = tmptr + ((start[0] + k1 * stride[0] + i) * SPACE_DIM2 + start[1] +
+ k2 * stride[1] + j);
+ }
+ else {
+ dataptr = tmptr + k;
+ k++;
+ }
+
+ *dataptr = (DATATYPE)(k1 + k2 + i + j);
+ }
+ }
+ }
+ }
+}
+
+/*
+ * Print the first block of the content of the dataset.
+ */
+static void
+ccdataset_print(hsize_t start[], hsize_t block[], DATATYPE *dataset)
+
+{
+ DATATYPE *dataptr = dataset;
+ hsize_t i, j;
+
+ /* print the column heading */
+ HDprintf("Print only the first block of the dataset\n");
+ HDprintf("%-8s", "Cols:");
+ for (j = 0; j < block[1]; j++) {
+ HDprintf("%3lu ", (unsigned long)(start[1] + j));
+ }
+ HDprintf("\n");
+
+ /* print the slab data */
+ for (i = 0; i < block[0]; i++) {
+ HDprintf("Row %2lu: ", (unsigned long)(i + start[0]));
+ for (j = 0; j < block[1]; j++) {
+ HDprintf("%03d ", *dataptr++);
+ }
+ HDprintf("\n");
+ }
+}
+
+/*
+ * Print the content of the dataset.
+ */
+static int
+ccdataset_vrfy(hsize_t start[], hsize_t count[], hsize_t stride[], hsize_t block[], DATATYPE *dataset,
+ DATATYPE *original, int mem_selection)
+{
+ hsize_t i, j, k1, k2, k = 0;
+ int vrfyerrs;
+ DATATYPE *dataptr, *oriptr;
+
+ /* print it if VERBOSE_MED */
+ if (VERBOSE_MED) {
+ HDprintf("dataset_vrfy dumping:::\n");
+ HDprintf("start(%lu, %lu), count(%lu, %lu), stride(%lu, %lu), block(%lu, %lu)\n",
+ (unsigned long)start[0], (unsigned long)start[1], (unsigned long)count[0],
+ (unsigned long)count[1], (unsigned long)stride[0], (unsigned long)stride[1],
+ (unsigned long)block[0], (unsigned long)block[1]);
+ HDprintf("original values:\n");
+ ccdataset_print(start, block, original);
+ HDprintf("compared values:\n");
+ ccdataset_print(start, block, dataset);
+ }
+
+ vrfyerrs = 0;
+
+ for (k1 = 0; k1 < count[0]; k1++) {
+ for (i = 0; i < block[0]; i++) {
+ for (k2 = 0; k2 < count[1]; k2++) {
+ for (j = 0; j < block[1]; j++) {
+ if (ALL != mem_selection) {
+ dataptr = dataset + ((start[0] + k1 * stride[0] + i) * SPACE_DIM2 + start[1] +
+ k2 * stride[1] + j);
+ oriptr = original + ((start[0] + k1 * stride[0] + i) * SPACE_DIM2 + start[1] +
+ k2 * stride[1] + j);
+ }
+ else {
+ dataptr = dataset + k;
+ oriptr = original + k;
+ k++;
+ }
+ if (*dataptr != *oriptr) {
+ if (vrfyerrs++ < MAX_ERR_REPORT || VERBOSE_MED) {
+ HDprintf("Dataset Verify failed at [%lu][%lu]: expect %d, got %d\n",
+ (unsigned long)i, (unsigned long)j, *(oriptr), *(dataptr));
+ }
+ }
+ }
+ }
+ }
+ }
+ if (vrfyerrs > MAX_ERR_REPORT && !VERBOSE_MED)
+ HDprintf("[more errors ...]\n");
+ if (vrfyerrs)
+ HDprintf("%d errors found in ccdataset_vrfy\n", vrfyerrs);
+ return (vrfyerrs);
+}
diff --git a/testpar/API/t_coll_md_read.c b/testpar/API/t_coll_md_read.c
new file mode 100644
index 0000000..f6f99bf
--- /dev/null
+++ b/testpar/API/t_coll_md_read.c
@@ -0,0 +1,654 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * A test suite to test HDF5's collective metadata read and write capabilities,
+ * as enabled by making a call to H5Pset_all_coll_metadata_ops() and/or
+ * H5Pset_coll_metadata_write().
+ */
+
+#include "hdf5.h"
+#include "testphdf5.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+/*
+ * Define the non-participating process as the "last"
+ * rank to avoid any weirdness potentially caused by
+ * an if (mpi_rank == 0) check.
+ */
+#define PARTIAL_NO_SELECTION_NO_SEL_PROCESS (mpi_rank == mpi_size - 1)
+#define PARTIAL_NO_SELECTION_DATASET_NAME "partial_no_selection_dset"
+#define PARTIAL_NO_SELECTION_DATASET_NDIMS 2
+#define PARTIAL_NO_SELECTION_Y_DIM_SCALE 5
+#define PARTIAL_NO_SELECTION_X_DIM_SCALE 5
+
+#define MULTI_CHUNK_IO_ADDRMAP_ISSUE_DIMS 2
+
+#define LINK_CHUNK_IO_SORT_CHUNK_ISSUE_COLL_THRESH_NUM 10000
+#define LINK_CHUNK_IO_SORT_CHUNK_ISSUE_DATASET_NAME "linked_chunk_io_sort_chunk_issue"
+#define LINK_CHUNK_IO_SORT_CHUNK_ISSUE_DIMS 1
+
+#define COLL_GHEAP_WRITE_ATTR_NELEMS 10
+#define COLL_GHEAP_WRITE_ATTR_NAME "coll_gheap_write_attr"
+#define COLL_GHEAP_WRITE_ATTR_DIMS 1
+
+/*
+ * A test for issue HDFFV-10501. A parallel hang was reported which occurred
+ * in linked-chunk I/O when collective metadata reads are enabled and some ranks
+ * do not have any selection in a dataset's dataspace, while others do. The ranks
+ * which have no selection during the read/write operation called H5D__chunk_addrmap()
+ * to retrieve the lowest chunk address, since we require that the read/write be done
+ * in strictly non-decreasing order of chunk address. For version 1 and 2 B-trees,
+ * this caused the non-participating ranks to issue a collective MPI_Bcast() call
+ * which the other ranks did not issue, thus causing a hang.
+ *
+ * However, since these ranks are not actually reading/writing anything, this call
+ * can simply be removed and the address used for the read/write can be set to an
+ * arbitrary number (0 was chosen).
+ */
+void
+test_partial_no_selection_coll_md_read(void)
+{
+ const char *filename;
+ hsize_t *dataset_dims = NULL;
+ hsize_t max_dataset_dims[PARTIAL_NO_SELECTION_DATASET_NDIMS];
+ hsize_t sel_dims[1];
+ hsize_t chunk_dims[PARTIAL_NO_SELECTION_DATASET_NDIMS] = {PARTIAL_NO_SELECTION_Y_DIM_SCALE,
+ PARTIAL_NO_SELECTION_X_DIM_SCALE};
+ hsize_t start[PARTIAL_NO_SELECTION_DATASET_NDIMS];
+ hsize_t stride[PARTIAL_NO_SELECTION_DATASET_NDIMS];
+ hsize_t count[PARTIAL_NO_SELECTION_DATASET_NDIMS];
+ hsize_t block[PARTIAL_NO_SELECTION_DATASET_NDIMS];
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dcpl_id = H5I_INVALID_HID;
+ hid_t dxpl_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ int mpi_rank, mpi_size;
+ void *data = NULL;
+ void *read_buf = NULL;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, dataset or file flush aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ filename = PARATESTFILE /* GetTestParameters() */;
+
+ fapl_id = create_faccess_plist(MPI_COMM_WORLD, MPI_INFO_NULL, facc_type);
+ VRFY((fapl_id >= 0), "create_faccess_plist succeeded");
+
+ /*
+ * Even though the testphdf5 framework currently sets collective metadata reads
+ * on the FAPL, we call it here just to be sure this is futureproof, since
+ * demonstrating this issue relies upon it.
+ */
+ VRFY((H5Pset_all_coll_metadata_ops(fapl_id, true) >= 0), "Set collective metadata reads succeeded");
+
+ file_id = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id);
+ VRFY((file_id >= 0), "H5Fcreate succeeded");
+
+ dataset_dims = HDmalloc(PARTIAL_NO_SELECTION_DATASET_NDIMS * sizeof(*dataset_dims));
+ VRFY((dataset_dims != NULL), "malloc succeeded");
+
+ dataset_dims[0] = (hsize_t)PARTIAL_NO_SELECTION_Y_DIM_SCALE * (hsize_t)mpi_size;
+ dataset_dims[1] = (hsize_t)PARTIAL_NO_SELECTION_X_DIM_SCALE * (hsize_t)mpi_size;
+ max_dataset_dims[0] = H5S_UNLIMITED;
+ max_dataset_dims[1] = H5S_UNLIMITED;
+
+ fspace_id = H5Screate_simple(PARTIAL_NO_SELECTION_DATASET_NDIMS, dataset_dims, max_dataset_dims);
+ VRFY((fspace_id >= 0), "H5Screate_simple succeeded");
+
+ /*
+ * Set up chunking on the dataset in order to reproduce the problem.
+ */
+ dcpl_id = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY((dcpl_id >= 0), "H5Pcreate succeeded");
+
+ VRFY((H5Pset_chunk(dcpl_id, PARTIAL_NO_SELECTION_DATASET_NDIMS, chunk_dims) >= 0),
+ "H5Pset_chunk succeeded");
+
+ dset_id = H5Dcreate2(file_id, PARTIAL_NO_SELECTION_DATASET_NAME, H5T_NATIVE_INT, fspace_id, H5P_DEFAULT,
+ dcpl_id, H5P_DEFAULT);
+ VRFY((dset_id >= 0), "H5Dcreate2 succeeded");
+
+ /*
+ * Setup hyperslab selection to split the dataset among the ranks.
+ *
+ * The ranks will write rows across the dataset.
+ */
+ start[0] = (hsize_t)PARTIAL_NO_SELECTION_Y_DIM_SCALE * (hsize_t)mpi_rank;
+ start[1] = 0;
+ stride[0] = PARTIAL_NO_SELECTION_Y_DIM_SCALE;
+ stride[1] = PARTIAL_NO_SELECTION_X_DIM_SCALE;
+ count[0] = 1;
+ count[1] = (hsize_t)mpi_size;
+ block[0] = PARTIAL_NO_SELECTION_Y_DIM_SCALE;
+ block[1] = PARTIAL_NO_SELECTION_X_DIM_SCALE;
+
+ VRFY((H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, stride, count, block) >= 0),
+ "H5Sselect_hyperslab succeeded");
+
+ sel_dims[0] = count[1] * (PARTIAL_NO_SELECTION_Y_DIM_SCALE * PARTIAL_NO_SELECTION_X_DIM_SCALE);
+
+ mspace_id = H5Screate_simple(1, sel_dims, NULL);
+ VRFY((mspace_id >= 0), "H5Screate_simple succeeded");
+
+ data = HDcalloc(1, count[1] * (PARTIAL_NO_SELECTION_Y_DIM_SCALE * PARTIAL_NO_SELECTION_X_DIM_SCALE) *
+ sizeof(int));
+ VRFY((data != NULL), "calloc succeeded");
+
+ dxpl_id = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((dxpl_id >= 0), "H5Pcreate succeeded");
+
+ /*
+ * Enable collective access for the data transfer.
+ */
+ VRFY((H5Pset_dxpl_mpio(dxpl_id, H5FD_MPIO_COLLECTIVE) >= 0), "H5Pset_dxpl_mpio succeeded");
+
+ VRFY((H5Dwrite(dset_id, H5T_NATIVE_INT, mspace_id, fspace_id, dxpl_id, data) >= 0), "H5Dwrite succeeded");
+
+ VRFY((H5Fflush(file_id, H5F_SCOPE_GLOBAL) >= 0), "H5Fflush succeeded");
+
+ /*
+ * Ensure that linked-chunk I/O is performed since this is
+ * the particular code path where the issue lies and we don't
+ * want the library doing multi-chunk I/O behind our backs.
+ */
+ VRFY((H5Pset_dxpl_mpio_chunk_opt(dxpl_id, H5FD_MPIO_CHUNK_ONE_IO) >= 0),
+ "H5Pset_dxpl_mpio_chunk_opt succeeded");
+
+ read_buf = HDmalloc(count[1] * (PARTIAL_NO_SELECTION_Y_DIM_SCALE * PARTIAL_NO_SELECTION_X_DIM_SCALE) *
+ sizeof(int));
+ VRFY((read_buf != NULL), "malloc succeeded");
+
+ /*
+ * Make sure to call H5Sselect_none() on the non-participating process.
+ */
+ if (PARTIAL_NO_SELECTION_NO_SEL_PROCESS) {
+ VRFY((H5Sselect_none(fspace_id) >= 0), "H5Sselect_none succeeded");
+ VRFY((H5Sselect_none(mspace_id) >= 0), "H5Sselect_none succeeded");
+ }
+
+ /*
+ * Finally have each rank read their section of data back from the dataset.
+ */
+ VRFY((H5Dread(dset_id, H5T_NATIVE_INT, mspace_id, fspace_id, dxpl_id, read_buf) >= 0),
+ "H5Dread succeeded");
+
+ /*
+ * Check data integrity just to be sure.
+ */
+ if (!PARTIAL_NO_SELECTION_NO_SEL_PROCESS) {
+ VRFY((!HDmemcmp(data, read_buf,
+ count[1] * (PARTIAL_NO_SELECTION_Y_DIM_SCALE * PARTIAL_NO_SELECTION_X_DIM_SCALE) *
+ sizeof(int))),
+ "memcmp succeeded");
+ }
+
+ if (dataset_dims) {
+ HDfree(dataset_dims);
+ dataset_dims = NULL;
+ }
+
+ if (data) {
+ HDfree(data);
+ data = NULL;
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ VRFY((H5Sclose(fspace_id) >= 0), "H5Sclose succeeded");
+ VRFY((H5Sclose(mspace_id) >= 0), "H5Sclose succeeded");
+ VRFY((H5Pclose(dcpl_id) >= 0), "H5Pclose succeeded");
+ VRFY((H5Pclose(dxpl_id) >= 0), "H5Pclose succeeded");
+ VRFY((H5Dclose(dset_id) >= 0), "H5Dclose succeeded");
+ VRFY((H5Pclose(fapl_id) >= 0), "H5Pclose succeeded");
+ VRFY((H5Fclose(file_id) >= 0), "H5Fclose succeeded");
+}
+
+/*
+ * A test for HDFFV-10562 which attempts to verify that using multi-chunk
+ * I/O with collective metadata reads enabled doesn't causes issues due to
+ * collective metadata reads being made only by process 0 in H5D__chunk_addrmap().
+ *
+ * Failure in this test may either cause a hang, or, due to how the MPI calls
+ * pertaining to this issue might mistakenly match up, may cause an MPI error
+ * message similar to:
+ *
+ * #008: H5Dmpio.c line 2546 in H5D__obtain_mpio_mode(): MPI_BCast failed
+ * major: Internal error (too specific to document in detail)
+ * minor: Some MPI function failed
+ * #009: H5Dmpio.c line 2546 in H5D__obtain_mpio_mode(): Message truncated, error stack:
+ *PMPI_Bcast(1600)..................: MPI_Bcast(buf=0x1df98e0, count=18, MPI_BYTE, root=0, comm=0x84000006)
+ *failed MPIR_Bcast_impl(1452).............: MPIR_Bcast(1476)..................:
+ *MPIR_Bcast_intra(1249)............:
+ *MPIR_SMP_Bcast(1088)..............:
+ *MPIR_Bcast_binomial(239)..........:
+ *MPIDI_CH3U_Receive_data_found(131): Message from rank 0 and tag 2 truncated; 2616 bytes received but buffer
+ *size is 18 major: Internal error (too specific to document in detail) minor: MPI Error String
+ *
+ */
+void
+test_multi_chunk_io_addrmap_issue(void)
+{
+ const char *filename;
+ hsize_t start[MULTI_CHUNK_IO_ADDRMAP_ISSUE_DIMS];
+ hsize_t stride[MULTI_CHUNK_IO_ADDRMAP_ISSUE_DIMS];
+ hsize_t count[MULTI_CHUNK_IO_ADDRMAP_ISSUE_DIMS];
+ hsize_t block[MULTI_CHUNK_IO_ADDRMAP_ISSUE_DIMS];
+ hsize_t dims[MULTI_CHUNK_IO_ADDRMAP_ISSUE_DIMS] = {10, 5};
+ hsize_t chunk_dims[MULTI_CHUNK_IO_ADDRMAP_ISSUE_DIMS] = {5, 5};
+ hsize_t max_dims[MULTI_CHUNK_IO_ADDRMAP_ISSUE_DIMS] = {H5S_UNLIMITED, H5S_UNLIMITED};
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dcpl_id = H5I_INVALID_HID;
+ hid_t dxpl_id = H5I_INVALID_HID;
+ hid_t space_id = H5I_INVALID_HID;
+ void *read_buf = NULL;
+ int mpi_rank;
+ int data[5][5] = {{0, 1, 2, 3, 4}, {0, 1, 2, 3, 4}, {0, 1, 2, 3, 4}, {0, 1, 2, 3, 4}, {0, 1, 2, 3, 4}};
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, dataset or file flush aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ filename = PARATESTFILE /* GetTestParameters() */;
+
+ fapl_id = create_faccess_plist(MPI_COMM_WORLD, MPI_INFO_NULL, facc_type);
+ VRFY((fapl_id >= 0), "create_faccess_plist succeeded");
+
+ /*
+ * Even though the testphdf5 framework currently sets collective metadata reads
+ * on the FAPL, we call it here just to be sure this is futureproof, since
+ * demonstrating this issue relies upon it.
+ */
+ VRFY((H5Pset_all_coll_metadata_ops(fapl_id, true) >= 0), "Set collective metadata reads succeeded");
+
+ file_id = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id);
+ VRFY((file_id >= 0), "H5Fcreate succeeded");
+
+ space_id = H5Screate_simple(MULTI_CHUNK_IO_ADDRMAP_ISSUE_DIMS, dims, max_dims);
+ VRFY((space_id >= 0), "H5Screate_simple succeeded");
+
+ dcpl_id = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY((dcpl_id >= 0), "H5Pcreate succeeded");
+
+ VRFY((H5Pset_chunk(dcpl_id, MULTI_CHUNK_IO_ADDRMAP_ISSUE_DIMS, chunk_dims) >= 0),
+ "H5Pset_chunk succeeded");
+
+ dset_id = H5Dcreate2(file_id, "dset", H5T_NATIVE_INT, space_id, H5P_DEFAULT, dcpl_id, H5P_DEFAULT);
+ VRFY((dset_id >= 0), "H5Dcreate2 succeeded");
+
+ dxpl_id = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((dxpl_id >= 0), "H5Pcreate succeeded");
+
+ VRFY((H5Pset_dxpl_mpio(dxpl_id, H5FD_MPIO_COLLECTIVE) >= 0), "H5Pset_dxpl_mpio succeeded");
+ VRFY((H5Pset_dxpl_mpio_chunk_opt(dxpl_id, H5FD_MPIO_CHUNK_MULTI_IO) >= 0),
+ "H5Pset_dxpl_mpio_chunk_opt succeeded");
+
+ start[1] = 0;
+ stride[0] = stride[1] = 1;
+ count[0] = count[1] = 5;
+ block[0] = block[1] = 1;
+
+ if (mpi_rank == 0)
+ start[0] = 0;
+ else
+ start[0] = 5;
+
+ VRFY((H5Sselect_hyperslab(space_id, H5S_SELECT_SET, start, stride, count, block) >= 0),
+ "H5Sselect_hyperslab succeeded");
+ if (mpi_rank != 0)
+ VRFY((H5Sselect_none(space_id) >= 0), "H5Sselect_none succeeded");
+
+ VRFY((H5Dwrite(dset_id, H5T_NATIVE_INT, H5S_ALL, space_id, dxpl_id, data) >= 0), "H5Dwrite succeeded");
+
+ VRFY((H5Fflush(file_id, H5F_SCOPE_GLOBAL) >= 0), "H5Fflush succeeded");
+
+ read_buf = HDmalloc(50 * sizeof(int));
+ VRFY((read_buf != NULL), "malloc succeeded");
+
+ VRFY((H5Dread(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl_id, read_buf) >= 0), "H5Dread succeeded");
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ VRFY((H5Sclose(space_id) >= 0), "H5Sclose succeeded");
+ VRFY((H5Pclose(dcpl_id) >= 0), "H5Pclose succeeded");
+ VRFY((H5Pclose(dxpl_id) >= 0), "H5Pclose succeeded");
+ VRFY((H5Dclose(dset_id) >= 0), "H5Dclose succeeded");
+ VRFY((H5Pclose(fapl_id) >= 0), "H5Pclose succeeded");
+ VRFY((H5Fclose(file_id) >= 0), "H5Fclose succeeded");
+}
+
+/*
+ * A test for HDFFV-10562 which attempts to verify that using linked-chunk
+ * I/O with collective metadata reads enabled doesn't cause issues due to
+ * collective metadata reads being made only by process 0 in H5D__sort_chunk().
+ *
+ * NOTE: Due to the way that the threshold value which pertains to this test
+ * is currently calculated within HDF5, the following two conditions must be
+ * true to trigger the issue:
+ *
+ * Condition 1: A certain threshold ratio must be met in order to have HDF5
+ * obtain all chunk addresses collectively inside H5D__sort_chunk(). This is
+ * given by the following:
+ *
+ * (sum_chunk * 100) / (dataset_nchunks * mpi_size) >= 30%
+ *
+ * where:
+ * * `sum_chunk` is the combined sum of the number of chunks selected in
+ * the dataset by all ranks (chunks selected by more than one rank count
+ * individually toward the sum for each rank selecting that chunk)
+ * * `dataset_nchunks` is the number of chunks in the dataset (selected
+ * or not)
+ * * `mpi_size` is the size of the MPI Communicator
+ *
+ * Condition 2: `sum_chunk` divided by `mpi_size` must exceed or equal a certain
+ * threshold (as of this writing, 10000).
+ *
+ * To satisfy both these conditions, we #define a macro,
+ * LINK_CHUNK_IO_SORT_CHUNK_ISSUE_COLL_THRESH_NUM, which corresponds to the
+ * value of the H5D_ALL_CHUNK_ADDR_THRES_COL_NUM macro in H5Dmpio.c (the
+ * 10000 threshold from condition 2). We then create a dataset of that many
+ * chunks and have each MPI rank write to and read from a piece of every single
+ * chunk in the dataset. This ensures chunk utilization is the max possible
+ * and exceeds our 30% target ratio, while always exactly matching the numeric
+ * chunk threshold value of condition 2.
+ *
+ * Failure in this test may either cause a hang, or, due to how the MPI calls
+ * pertaining to this issue might mistakenly match up, may cause an MPI error
+ * message similar to:
+ *
+ * #008: H5Dmpio.c line 2338 in H5D__sort_chunk(): MPI_BCast failed
+ * major: Internal error (too specific to document in detail)
+ * minor: Some MPI function failed
+ * #009: H5Dmpio.c line 2338 in H5D__sort_chunk(): Other MPI error, error stack:
+ *PMPI_Bcast(1600)........: MPI_Bcast(buf=0x7eae610, count=320000, MPI_BYTE, root=0, comm=0x84000006) failed
+ *MPIR_Bcast_impl(1452)...:
+ *MPIR_Bcast(1476)........:
+ *MPIR_Bcast_intra(1249)..:
+ *MPIR_SMP_Bcast(1088)....:
+ *MPIR_Bcast_binomial(250): message sizes do not match across processes in the collective routine: Received
+ *2096 but expected 320000 major: Internal error (too specific to document in detail) minor: MPI Error String
+ */
+void
+test_link_chunk_io_sort_chunk_issue(void)
+{
+ const char *filename;
+ hsize_t dataset_dims[LINK_CHUNK_IO_SORT_CHUNK_ISSUE_DIMS];
+ hsize_t sel_dims[LINK_CHUNK_IO_SORT_CHUNK_ISSUE_DIMS];
+ hsize_t chunk_dims[LINK_CHUNK_IO_SORT_CHUNK_ISSUE_DIMS];
+ hsize_t start[LINK_CHUNK_IO_SORT_CHUNK_ISSUE_DIMS];
+ hsize_t stride[LINK_CHUNK_IO_SORT_CHUNK_ISSUE_DIMS];
+ hsize_t count[LINK_CHUNK_IO_SORT_CHUNK_ISSUE_DIMS];
+ hsize_t block[LINK_CHUNK_IO_SORT_CHUNK_ISSUE_DIMS];
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dcpl_id = H5I_INVALID_HID;
+ hid_t dxpl_id = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hid_t mspace_id = H5I_INVALID_HID;
+ int mpi_rank, mpi_size;
+ void *data = NULL;
+ void *read_buf = NULL;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, dataset or file flush aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ filename = PARATESTFILE /* GetTestParameters() */;
+
+ fapl_id = create_faccess_plist(MPI_COMM_WORLD, MPI_INFO_NULL, facc_type);
+ VRFY((fapl_id >= 0), "create_faccess_plist succeeded");
+
+ /*
+ * Even though the testphdf5 framework currently sets collective metadata reads
+ * on the FAPL, we call it here just to be sure this is futureproof, since
+ * demonstrating this issue relies upon it.
+ */
+ VRFY((H5Pset_all_coll_metadata_ops(fapl_id, true) >= 0), "Set collective metadata reads succeeded");
+
+ file_id = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id);
+ VRFY((file_id >= 0), "H5Fcreate succeeded");
+
+ /*
+ * Create a one-dimensional dataset of exactly LINK_CHUNK_IO_SORT_CHUNK_ISSUE_COLL_THRESH_NUM
+ * chunks, where every rank writes to a piece of every single chunk to keep utilization high.
+ */
+ dataset_dims[0] = (hsize_t)mpi_size * (hsize_t)LINK_CHUNK_IO_SORT_CHUNK_ISSUE_COLL_THRESH_NUM;
+
+ fspace_id = H5Screate_simple(LINK_CHUNK_IO_SORT_CHUNK_ISSUE_DIMS, dataset_dims, NULL);
+ VRFY((fspace_id >= 0), "H5Screate_simple succeeded");
+
+ /*
+ * Set up chunking on the dataset in order to reproduce the problem.
+ */
+ dcpl_id = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY((dcpl_id >= 0), "H5Pcreate succeeded");
+
+ /* Chunk size is equal to MPI size since each rank writes to a piece of every chunk */
+ chunk_dims[0] = (hsize_t)mpi_size;
+
+ VRFY((H5Pset_chunk(dcpl_id, LINK_CHUNK_IO_SORT_CHUNK_ISSUE_DIMS, chunk_dims) >= 0),
+ "H5Pset_chunk succeeded");
+
+ dset_id = H5Dcreate2(file_id, LINK_CHUNK_IO_SORT_CHUNK_ISSUE_DATASET_NAME, H5T_NATIVE_INT, fspace_id,
+ H5P_DEFAULT, dcpl_id, H5P_DEFAULT);
+ VRFY((dset_id >= 0), "H5Dcreate2 succeeded");
+
+ /*
+ * Setup hyperslab selection to split the dataset among the ranks.
+ */
+ start[0] = (hsize_t)mpi_rank;
+ stride[0] = (hsize_t)mpi_size;
+ count[0] = LINK_CHUNK_IO_SORT_CHUNK_ISSUE_COLL_THRESH_NUM;
+ block[0] = 1;
+
+ VRFY((H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, stride, count, block) >= 0),
+ "H5Sselect_hyperslab succeeded");
+
+ sel_dims[0] = count[0];
+
+ mspace_id = H5Screate_simple(1, sel_dims, NULL);
+ VRFY((mspace_id >= 0), "H5Screate_simple succeeded");
+
+ data = HDcalloc(1, count[0] * sizeof(int));
+ VRFY((data != NULL), "calloc succeeded");
+
+ dxpl_id = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((dxpl_id >= 0), "H5Pcreate succeeded");
+
+ /*
+ * Enable collective access for the data transfer.
+ */
+ VRFY((H5Pset_dxpl_mpio(dxpl_id, H5FD_MPIO_COLLECTIVE) >= 0), "H5Pset_dxpl_mpio succeeded");
+
+ VRFY((H5Dwrite(dset_id, H5T_NATIVE_INT, mspace_id, fspace_id, dxpl_id, data) >= 0), "H5Dwrite succeeded");
+
+ VRFY((H5Fflush(file_id, H5F_SCOPE_GLOBAL) >= 0), "H5Fflush succeeded");
+
+ /*
+ * Ensure that linked-chunk I/O is performed since this is
+ * the particular code path where the issue lies and we don't
+ * want the library doing multi-chunk I/O behind our backs.
+ */
+ VRFY((H5Pset_dxpl_mpio_chunk_opt(dxpl_id, H5FD_MPIO_CHUNK_ONE_IO) >= 0),
+ "H5Pset_dxpl_mpio_chunk_opt succeeded");
+
+ read_buf = HDmalloc(count[0] * sizeof(int));
+ VRFY((read_buf != NULL), "malloc succeeded");
+
+ VRFY((H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, stride, count, block) >= 0),
+ "H5Sselect_hyperslab succeeded");
+
+ sel_dims[0] = count[0];
+
+ VRFY((H5Sclose(mspace_id) >= 0), "H5Sclose succeeded");
+
+ mspace_id = H5Screate_simple(1, sel_dims, NULL);
+ VRFY((mspace_id >= 0), "H5Screate_simple succeeded");
+
+ /*
+ * Finally have each rank read their section of data back from the dataset.
+ */
+ VRFY((H5Dread(dset_id, H5T_NATIVE_INT, mspace_id, fspace_id, dxpl_id, read_buf) >= 0),
+ "H5Dread succeeded");
+
+ if (data) {
+ HDfree(data);
+ data = NULL;
+ }
+
+ if (read_buf) {
+ HDfree(read_buf);
+ read_buf = NULL;
+ }
+
+ VRFY((H5Sclose(fspace_id) >= 0), "H5Sclose succeeded");
+ VRFY((H5Sclose(mspace_id) >= 0), "H5Sclose succeeded");
+ VRFY((H5Pclose(dcpl_id) >= 0), "H5Pclose succeeded");
+ VRFY((H5Pclose(dxpl_id) >= 0), "H5Pclose succeeded");
+ VRFY((H5Dclose(dset_id) >= 0), "H5Dclose succeeded");
+ VRFY((H5Pclose(fapl_id) >= 0), "H5Pclose succeeded");
+ VRFY((H5Fclose(file_id) >= 0), "H5Fclose succeeded");
+}
+
+/*
+ * A test for GitHub issue #2433 which causes a collective metadata write
+ * of global heap data. This test is meant to ensure that global heap data
+ * gets correctly mapped as raw data during a collective metadata write
+ * using vector I/O.
+ *
+ * An assertion exists in the library that should be triggered if global
+ * heap data is not correctly mapped as raw data.
+ */
+void
+test_collective_global_heap_write(void)
+{
+ const char *filename;
+ hsize_t attr_dims[COLL_GHEAP_WRITE_ATTR_DIMS];
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id = H5I_INVALID_HID;
+ hid_t attr_id = H5I_INVALID_HID;
+ hid_t vl_type = H5I_INVALID_HID;
+ hid_t fspace_id = H5I_INVALID_HID;
+ hvl_t vl_data;
+ int mpi_rank, mpi_size;
+ int data_buf[COLL_GHEAP_WRITE_ATTR_NELEMS];
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, dataset or file flush aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ filename = PARATESTFILE /* GetTestParameters() */;
+
+ fapl_id = create_faccess_plist(MPI_COMM_WORLD, MPI_INFO_NULL, facc_type);
+ VRFY((fapl_id >= 0), "create_faccess_plist succeeded");
+
+ /*
+ * Even though the testphdf5 framework currently sets collective metadata
+ * writes on the FAPL, we call it here just to be sure this is futureproof,
+ * since demonstrating this issue relies upon it.
+ */
+ VRFY((H5Pset_coll_metadata_write(fapl_id, true) >= 0), "Set collective metadata writes succeeded");
+
+ file_id = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id);
+ VRFY((file_id >= 0), "H5Fcreate succeeded");
+
+ attr_dims[0] = 1;
+
+ fspace_id = H5Screate_simple(COLL_GHEAP_WRITE_ATTR_DIMS, attr_dims, NULL);
+ VRFY((fspace_id >= 0), "H5Screate_simple succeeded");
+
+ vl_type = H5Tvlen_create(H5T_NATIVE_INT);
+ VRFY((vl_type >= 0), "H5Tvlen_create succeeded");
+
+ vl_data.len = COLL_GHEAP_WRITE_ATTR_NELEMS;
+ vl_data.p = data_buf;
+
+ /*
+ * Create a variable-length attribute that will get written to the global heap
+ */
+ attr_id = H5Acreate2(file_id, COLL_GHEAP_WRITE_ATTR_NAME, vl_type, fspace_id, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((attr_id >= 0), "H5Acreate2 succeeded");
+
+ for (size_t i = 0; i < COLL_GHEAP_WRITE_ATTR_NELEMS; i++)
+ data_buf[i] = (int)i;
+
+ VRFY((H5Awrite(attr_id, vl_type, &vl_data) >= 0), "H5Awrite succeeded");
+
+ VRFY((H5Sclose(fspace_id) >= 0), "H5Sclose succeeded");
+ VRFY((H5Tclose(vl_type) >= 0), "H5Sclose succeeded");
+ VRFY((H5Aclose(attr_id) >= 0), "H5Aclose succeeded");
+ VRFY((H5Pclose(fapl_id) >= 0), "H5Pclose succeeded");
+ VRFY((H5Fclose(file_id) >= 0), "H5Fclose succeeded");
+}
diff --git a/testpar/API/t_dset.c b/testpar/API/t_dset.c
new file mode 100644
index 0000000..d005243
--- /dev/null
+++ b/testpar/API/t_dset.c
@@ -0,0 +1,4335 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * Parallel tests for datasets
+ */
+
+/*
+ * Example of using the parallel HDF5 library to access datasets.
+ *
+ * This program contains three major parts. Part 1 tests fixed dimension
+ * datasets, for both independent and collective transfer modes.
+ * Part 2 tests extendible datasets, for independent transfer mode
+ * only.
+ * Part 3 tests extendible datasets, for collective transfer mode
+ * only.
+ */
+
+#include "hdf5.h"
+#include "testphdf5.h"
+
+/*
+ * The following are various utility routines used by the tests.
+ */
+
+/*
+ * Setup the dimensions of the hyperslab.
+ * Two modes--by rows or by columns.
+ * Assume dimension rank is 2.
+ * BYROW divide into slabs of rows
+ * BYCOL divide into blocks of columns
+ * ZROW same as BYROW except process 0 gets 0 rows
+ * ZCOL same as BYCOL except process 0 gets 0 columns
+ */
+static void
+slab_set(int mpi_rank, int mpi_size, hsize_t start[], hsize_t count[], hsize_t stride[], hsize_t block[],
+ int mode)
+{
+ switch (mode) {
+ case BYROW:
+ /* Each process takes a slabs of rows. */
+ block[0] = (hsize_t)(dim0 / mpi_size);
+ block[1] = (hsize_t)dim1;
+ stride[0] = block[0];
+ stride[1] = block[1];
+ count[0] = 1;
+ count[1] = 1;
+ start[0] = (hsize_t)mpi_rank * block[0];
+ start[1] = 0;
+ if (VERBOSE_MED)
+ HDprintf("slab_set BYROW\n");
+ break;
+ case BYCOL:
+ /* Each process takes a block of columns. */
+ block[0] = (hsize_t)dim0;
+ block[1] = (hsize_t)(dim1 / mpi_size);
+ stride[0] = block[0];
+ stride[1] = block[1];
+ count[0] = 1;
+ count[1] = 1;
+ start[0] = 0;
+ start[1] = (hsize_t)mpi_rank * block[1];
+ if (VERBOSE_MED)
+ HDprintf("slab_set BYCOL\n");
+ break;
+ case ZROW:
+ /* Similar to BYROW except process 0 gets 0 row */
+ block[0] = (hsize_t)(mpi_rank ? dim0 / mpi_size : 0);
+ block[1] = (hsize_t)dim1;
+ stride[0] = (mpi_rank ? block[0] : 1); /* avoid setting stride to 0 */
+ stride[1] = block[1];
+ count[0] = 1;
+ count[1] = 1;
+ start[0] = (mpi_rank ? (hsize_t)mpi_rank * block[0] : 0);
+ start[1] = 0;
+ if (VERBOSE_MED)
+ HDprintf("slab_set ZROW\n");
+ break;
+ case ZCOL:
+ /* Similar to BYCOL except process 0 gets 0 column */
+ block[0] = (hsize_t)dim0;
+ block[1] = (hsize_t)(mpi_rank ? dim1 / mpi_size : 0);
+ stride[0] = block[0];
+ stride[1] = (hsize_t)(mpi_rank ? block[1] : 1); /* avoid setting stride to 0 */
+ count[0] = 1;
+ count[1] = 1;
+ start[0] = 0;
+ start[1] = (mpi_rank ? (hsize_t)mpi_rank * block[1] : 0);
+ if (VERBOSE_MED)
+ HDprintf("slab_set ZCOL\n");
+ break;
+ default:
+ /* Unknown mode. Set it to cover the whole dataset. */
+ HDprintf("unknown slab_set mode (%d)\n", mode);
+ block[0] = (hsize_t)dim0;
+ block[1] = (hsize_t)dim1;
+ stride[0] = block[0];
+ stride[1] = block[1];
+ count[0] = 1;
+ count[1] = 1;
+ start[0] = 0;
+ start[1] = 0;
+ if (VERBOSE_MED)
+ HDprintf("slab_set wholeset\n");
+ break;
+ }
+ if (VERBOSE_MED) {
+ HDprintf("start[]=(%lu,%lu), count[]=(%lu,%lu), stride[]=(%lu,%lu), block[]=(%lu,%lu), total "
+ "datapoints=%lu\n",
+ (unsigned long)start[0], (unsigned long)start[1], (unsigned long)count[0],
+ (unsigned long)count[1], (unsigned long)stride[0], (unsigned long)stride[1],
+ (unsigned long)block[0], (unsigned long)block[1],
+ (unsigned long)(block[0] * block[1] * count[0] * count[1]));
+ }
+}
+
+/*
+ * Setup the coordinates for point selection.
+ */
+void
+point_set(hsize_t start[], hsize_t count[], hsize_t stride[], hsize_t block[], size_t num_points,
+ hsize_t coords[], int order)
+{
+ hsize_t i, j, k = 0, m, n, s1, s2;
+
+ HDcompile_assert(RANK == 2);
+
+ if (OUT_OF_ORDER == order)
+ k = (num_points * RANK) - 1;
+ else if (IN_ORDER == order)
+ k = 0;
+
+ s1 = start[0];
+ s2 = start[1];
+
+ for (i = 0; i < count[0]; i++)
+ for (j = 0; j < count[1]; j++)
+ for (m = 0; m < block[0]; m++)
+ for (n = 0; n < block[1]; n++)
+ if (OUT_OF_ORDER == order) {
+ coords[k--] = s2 + (stride[1] * j) + n;
+ coords[k--] = s1 + (stride[0] * i) + m;
+ }
+ else if (IN_ORDER == order) {
+ coords[k++] = s1 + stride[0] * i + m;
+ coords[k++] = s2 + stride[1] * j + n;
+ }
+
+ if (VERBOSE_MED) {
+ HDprintf("start[]=(%lu, %lu), count[]=(%lu, %lu), stride[]=(%lu, %lu), block[]=(%lu, %lu), total "
+ "datapoints=%lu\n",
+ (unsigned long)start[0], (unsigned long)start[1], (unsigned long)count[0],
+ (unsigned long)count[1], (unsigned long)stride[0], (unsigned long)stride[1],
+ (unsigned long)block[0], (unsigned long)block[1],
+ (unsigned long)(block[0] * block[1] * count[0] * count[1]));
+ k = 0;
+ for (i = 0; i < num_points; i++) {
+ HDprintf("(%d, %d)\n", (int)coords[k], (int)coords[k + 1]);
+ k += 2;
+ }
+ }
+}
+
+/*
+ * Fill the dataset with trivial data for testing.
+ * Assume dimension rank is 2 and data is stored contiguous.
+ */
+static void
+dataset_fill(hsize_t start[], hsize_t block[], DATATYPE *dataset)
+{
+ DATATYPE *dataptr = dataset;
+ hsize_t i, j;
+
+ /* put some trivial data in the data_array */
+ for (i = 0; i < block[0]; i++) {
+ for (j = 0; j < block[1]; j++) {
+ *dataptr = (DATATYPE)((i + start[0]) * 100 + (j + start[1] + 1));
+ dataptr++;
+ }
+ }
+}
+
+/*
+ * Print the content of the dataset.
+ */
+static void
+dataset_print(hsize_t start[], hsize_t block[], DATATYPE *dataset)
+{
+ DATATYPE *dataptr = dataset;
+ hsize_t i, j;
+
+ /* print the column heading */
+ HDprintf("%-8s", "Cols:");
+ for (j = 0; j < block[1]; j++) {
+ HDprintf("%3lu ", (unsigned long)(start[1] + j));
+ }
+ HDprintf("\n");
+
+ /* print the slab data */
+ for (i = 0; i < block[0]; i++) {
+ HDprintf("Row %2lu: ", (unsigned long)(i + start[0]));
+ for (j = 0; j < block[1]; j++) {
+ HDprintf("%03d ", *dataptr++);
+ }
+ HDprintf("\n");
+ }
+}
+
+/*
+ * Print the content of the dataset.
+ */
+int
+dataset_vrfy(hsize_t start[], hsize_t count[], hsize_t stride[], hsize_t block[], DATATYPE *dataset,
+ DATATYPE *original)
+{
+ hsize_t i, j;
+ int vrfyerrs;
+
+ /* print it if VERBOSE_MED */
+ if (VERBOSE_MED) {
+ HDprintf("dataset_vrfy dumping:::\n");
+ HDprintf("start(%lu, %lu), count(%lu, %lu), stride(%lu, %lu), block(%lu, %lu)\n",
+ (unsigned long)start[0], (unsigned long)start[1], (unsigned long)count[0],
+ (unsigned long)count[1], (unsigned long)stride[0], (unsigned long)stride[1],
+ (unsigned long)block[0], (unsigned long)block[1]);
+ HDprintf("original values:\n");
+ dataset_print(start, block, original);
+ HDprintf("compared values:\n");
+ dataset_print(start, block, dataset);
+ }
+
+ vrfyerrs = 0;
+ for (i = 0; i < block[0]; i++) {
+ for (j = 0; j < block[1]; j++) {
+ if (*dataset != *original) {
+ if (vrfyerrs++ < MAX_ERR_REPORT || VERBOSE_MED) {
+ HDprintf("Dataset Verify failed at [%lu][%lu](row %lu, col %lu): expect %d, got %d\n",
+ (unsigned long)i, (unsigned long)j, (unsigned long)(i + start[0]),
+ (unsigned long)(j + start[1]), *(original), *(dataset));
+ }
+ dataset++;
+ original++;
+ }
+ }
+ }
+ if (vrfyerrs > MAX_ERR_REPORT && !VERBOSE_MED)
+ HDprintf("[more errors ...]\n");
+ if (vrfyerrs)
+ HDprintf("%d errors found in dataset_vrfy\n", vrfyerrs);
+ return (vrfyerrs);
+}
+
+/*
+ * Part 1.a--Independent read/write for fixed dimension datasets.
+ */
+
+/*
+ * Example of using the parallel HDF5 library to create two datasets
+ * in one HDF5 files with parallel MPIO access support.
+ * The Datasets are of sizes (number-of-mpi-processes x dim0) x dim1.
+ * Each process controls only a slab of size dim0 x dim1 within each
+ * dataset.
+ */
+
+void
+dataset_writeInd(void)
+{
+ hid_t fid; /* HDF5 file ID */
+ hid_t acc_tpl; /* File access templates */
+ hid_t sid; /* Dataspace ID */
+ hid_t file_dataspace; /* File dataspace ID */
+ hid_t mem_dataspace; /* memory dataspace ID */
+ hid_t dataset1, dataset2; /* Dataset ID */
+ hsize_t dims[RANK]; /* dataset dim sizes */
+ DATATYPE *data_array1 = NULL; /* data buffer */
+ const char *filename;
+
+ hsize_t start[RANK]; /* for hyperslab setting */
+ hsize_t count[RANK], stride[RANK]; /* for hyperslab setting */
+ hsize_t block[RANK]; /* for hyperslab setting */
+
+ herr_t ret; /* Generic return value */
+ int mpi_size, mpi_rank;
+
+ MPI_Comm comm = MPI_COMM_WORLD;
+ MPI_Info info = MPI_INFO_NULL;
+
+ filename = PARATESTFILE /* GetTestParameters() */;
+ if (VERBOSE_MED)
+ HDprintf("Independent write test on file %s\n", filename);
+
+ /* set up MPI parameters */
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, basic dataset, or more aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ /* allocate memory for data buffer */
+ data_array1 = (DATATYPE *)HDmalloc((size_t)dim0 * (size_t)dim1 * sizeof(DATATYPE));
+ VRFY((data_array1 != NULL), "data_array1 HDmalloc succeeded");
+
+ /* ----------------------------------------
+ * CREATE AN HDF5 FILE WITH PARALLEL ACCESS
+ * ---------------------------------------*/
+ /* setup file access template */
+ acc_tpl = create_faccess_plist(comm, info, facc_type);
+ VRFY((acc_tpl >= 0), "");
+
+ /* create the file collectively */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, acc_tpl);
+ VRFY((fid >= 0), "H5Fcreate succeeded");
+
+ /* Release file-access template */
+ ret = H5Pclose(acc_tpl);
+ VRFY((ret >= 0), "");
+
+ /* ---------------------------------------------
+ * Define the dimensions of the overall datasets
+ * and the slabs local to the MPI process.
+ * ------------------------------------------- */
+ /* setup dimensionality object */
+ dims[0] = (hsize_t)dim0;
+ dims[1] = (hsize_t)dim1;
+ sid = H5Screate_simple(RANK, dims, NULL);
+ VRFY((sid >= 0), "H5Screate_simple succeeded");
+
+ /* create a dataset collectively */
+ dataset1 = H5Dcreate2(fid, DATASETNAME1, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((dataset1 >= 0), "H5Dcreate2 succeeded");
+
+ /* create another dataset collectively */
+ dataset2 = H5Dcreate2(fid, DATASETNAME2, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((dataset2 >= 0), "H5Dcreate2 succeeded");
+
+ /*
+ * To test the independent orders of writes between processes, all
+ * even number processes write to dataset1 first, then dataset2.
+ * All odd number processes write to dataset2 first, then dataset1.
+ */
+
+ /* set up dimensions of the slab this process accesses */
+ slab_set(mpi_rank, mpi_size, start, count, stride, block, BYROW);
+
+ /* put some trivial data in the data_array */
+ dataset_fill(start, block, data_array1);
+ MESG("data_array initialized");
+
+ /* create a file dataspace independently */
+ file_dataspace = H5Dget_space(dataset1);
+ VRFY((file_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab succeeded");
+
+ /* create a memory dataspace independently */
+ mem_dataspace = H5Screate_simple(RANK, block, NULL);
+ VRFY((mem_dataspace >= 0), "");
+
+ /* write data independently */
+ ret = H5Dwrite(dataset1, H5T_NATIVE_INT, mem_dataspace, file_dataspace, H5P_DEFAULT, data_array1);
+ VRFY((ret >= 0), "H5Dwrite dataset1 succeeded");
+ /* write data independently */
+ ret = H5Dwrite(dataset2, H5T_NATIVE_INT, mem_dataspace, file_dataspace, H5P_DEFAULT, data_array1);
+ VRFY((ret >= 0), "H5Dwrite dataset2 succeeded");
+
+ /* setup dimensions again to write with zero rows for process 0 */
+ if (VERBOSE_MED)
+ HDprintf("writeInd by some with zero row\n");
+ slab_set(mpi_rank, mpi_size, start, count, stride, block, ZROW);
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab succeeded");
+ /* need to make mem_dataspace to match for process 0 */
+ if (MAINPROCESS) {
+ ret = H5Sselect_hyperslab(mem_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab mem_dataspace succeeded");
+ }
+ MESG("writeInd by some with zero row");
+ if ((mpi_rank / 2) * 2 != mpi_rank) {
+ ret = H5Dwrite(dataset1, H5T_NATIVE_INT, mem_dataspace, file_dataspace, H5P_DEFAULT, data_array1);
+ VRFY((ret >= 0), "H5Dwrite dataset1 by ZROW succeeded");
+ }
+#ifdef BARRIER_CHECKS
+ MPI_Barrier(MPI_COMM_WORLD);
+#endif /* BARRIER_CHECKS */
+
+ /* release dataspace ID */
+ H5Sclose(file_dataspace);
+
+ /* close dataset collectively */
+ ret = H5Dclose(dataset1);
+ VRFY((ret >= 0), "H5Dclose1 succeeded");
+ ret = H5Dclose(dataset2);
+ VRFY((ret >= 0), "H5Dclose2 succeeded");
+
+ /* release all IDs created */
+ H5Sclose(sid);
+
+ /* close the file collectively */
+ H5Fclose(fid);
+
+ /* release data buffers */
+ if (data_array1)
+ HDfree(data_array1);
+}
+
+/* Example of using the parallel HDF5 library to read a dataset */
+void
+dataset_readInd(void)
+{
+ hid_t fid; /* HDF5 file ID */
+ hid_t acc_tpl; /* File access templates */
+ hid_t file_dataspace; /* File dataspace ID */
+ hid_t mem_dataspace; /* memory dataspace ID */
+ hid_t dataset1, dataset2; /* Dataset ID */
+ DATATYPE *data_array1 = NULL; /* data buffer */
+ DATATYPE *data_origin1 = NULL; /* expected data buffer */
+ const char *filename;
+
+ hsize_t start[RANK]; /* for hyperslab setting */
+ hsize_t count[RANK], stride[RANK]; /* for hyperslab setting */
+ hsize_t block[RANK]; /* for hyperslab setting */
+
+ herr_t ret; /* Generic return value */
+ int mpi_size, mpi_rank;
+
+ MPI_Comm comm = MPI_COMM_WORLD;
+ MPI_Info info = MPI_INFO_NULL;
+
+ filename = PARATESTFILE /* GetTestParameters() */;
+ if (VERBOSE_MED)
+ HDprintf("Independent read test on file %s\n", filename);
+
+ /* set up MPI parameters */
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, basic dataset, or more aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ /* allocate memory for data buffer */
+ data_array1 = (DATATYPE *)HDmalloc((size_t)dim0 * (size_t)dim1 * sizeof(DATATYPE));
+ VRFY((data_array1 != NULL), "data_array1 HDmalloc succeeded");
+ data_origin1 = (DATATYPE *)HDmalloc((size_t)dim0 * (size_t)dim1 * sizeof(DATATYPE));
+ VRFY((data_origin1 != NULL), "data_origin1 HDmalloc succeeded");
+
+ /* setup file access template */
+ acc_tpl = create_faccess_plist(comm, info, facc_type);
+ VRFY((acc_tpl >= 0), "");
+
+ /* open the file collectively */
+ fid = H5Fopen(filename, H5F_ACC_RDONLY, acc_tpl);
+ VRFY((fid >= 0), "");
+
+ /* Release file-access template */
+ ret = H5Pclose(acc_tpl);
+ VRFY((ret >= 0), "");
+
+ /* open the dataset1 collectively */
+ dataset1 = H5Dopen2(fid, DATASETNAME1, H5P_DEFAULT);
+ VRFY((dataset1 >= 0), "");
+
+ /* open another dataset collectively */
+ dataset2 = H5Dopen2(fid, DATASETNAME1, H5P_DEFAULT);
+ VRFY((dataset2 >= 0), "");
+
+ /* set up dimensions of the slab this process accesses */
+ slab_set(mpi_rank, mpi_size, start, count, stride, block, BYROW);
+
+ /* create a file dataspace independently */
+ file_dataspace = H5Dget_space(dataset1);
+ VRFY((file_dataspace >= 0), "");
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "");
+
+ /* create a memory dataspace independently */
+ mem_dataspace = H5Screate_simple(RANK, block, NULL);
+ VRFY((mem_dataspace >= 0), "");
+
+ /* fill dataset with test data */
+ dataset_fill(start, block, data_origin1);
+
+ /* read data independently */
+ ret = H5Dread(dataset1, H5T_NATIVE_INT, mem_dataspace, file_dataspace, H5P_DEFAULT, data_array1);
+ VRFY((ret >= 0), "");
+
+ /* verify the read data with original expected data */
+ ret = dataset_vrfy(start, count, stride, block, data_array1, data_origin1);
+ if (ret)
+ nerrors++;
+
+ /* read data independently */
+ ret = H5Dread(dataset2, H5T_NATIVE_INT, mem_dataspace, file_dataspace, H5P_DEFAULT, data_array1);
+ VRFY((ret >= 0), "");
+
+ /* verify the read data with original expected data */
+ ret = dataset_vrfy(start, count, stride, block, data_array1, data_origin1);
+ if (ret)
+ nerrors++;
+
+ /* close dataset collectively */
+ ret = H5Dclose(dataset1);
+ VRFY((ret >= 0), "");
+ ret = H5Dclose(dataset2);
+ VRFY((ret >= 0), "");
+
+ /* release all IDs created */
+ H5Sclose(file_dataspace);
+
+ /* close the file collectively */
+ H5Fclose(fid);
+
+ /* release data buffers */
+ if (data_array1)
+ HDfree(data_array1);
+ if (data_origin1)
+ HDfree(data_origin1);
+}
+
+/*
+ * Part 1.b--Collective read/write for fixed dimension datasets.
+ */
+
+/*
+ * Example of using the parallel HDF5 library to create two datasets
+ * in one HDF5 file with collective parallel access support.
+ * The Datasets are of sizes (number-of-mpi-processes x dim0) x dim1.
+ * Each process controls only a slab of size dim0 x dim1 within each
+ * dataset. [Note: not so yet. Datasets are of sizes dim0xdim1 and
+ * each process controls a hyperslab within.]
+ */
+
+void
+dataset_writeAll(void)
+{
+ hid_t fid; /* HDF5 file ID */
+ hid_t acc_tpl; /* File access templates */
+ hid_t xfer_plist; /* Dataset transfer properties list */
+ hid_t sid; /* Dataspace ID */
+ hid_t file_dataspace; /* File dataspace ID */
+ hid_t mem_dataspace; /* memory dataspace ID */
+ hid_t dataset1, dataset2, dataset3, dataset4; /* Dataset ID */
+ hid_t dataset5, dataset6, dataset7; /* Dataset ID */
+ hid_t datatype; /* Datatype ID */
+ hsize_t dims[RANK]; /* dataset dim sizes */
+ DATATYPE *data_array1 = NULL; /* data buffer */
+ const char *filename;
+
+ hsize_t start[RANK]; /* for hyperslab setting */
+ hsize_t count[RANK], stride[RANK]; /* for hyperslab setting */
+ hsize_t block[RANK]; /* for hyperslab setting */
+
+ size_t num_points; /* for point selection */
+ hsize_t *coords = NULL; /* for point selection */
+ hsize_t current_dims; /* for point selection */
+
+ herr_t ret; /* Generic return value */
+ int mpi_size, mpi_rank;
+
+ MPI_Comm comm = MPI_COMM_WORLD;
+ MPI_Info info = MPI_INFO_NULL;
+
+ filename = PARATESTFILE /* GetTestParameters() */;
+ if (VERBOSE_MED)
+ HDprintf("Collective write test on file %s\n", filename);
+
+ /* set up MPI parameters */
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, basic dataset, or more aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ /* set up the coords array selection */
+ num_points = (size_t)dim1;
+ coords = (hsize_t *)HDmalloc((size_t)dim1 * (size_t)RANK * sizeof(hsize_t));
+ VRFY((coords != NULL), "coords malloc succeeded");
+
+ /* allocate memory for data buffer */
+ data_array1 = (DATATYPE *)HDmalloc((size_t)dim0 * (size_t)dim1 * sizeof(DATATYPE));
+ VRFY((data_array1 != NULL), "data_array1 HDmalloc succeeded");
+
+ /* -------------------
+ * START AN HDF5 FILE
+ * -------------------*/
+ /* setup file access template */
+ acc_tpl = create_faccess_plist(comm, info, facc_type);
+ VRFY((acc_tpl >= 0), "");
+
+ /* create the file collectively */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, acc_tpl);
+ VRFY((fid >= 0), "H5Fcreate succeeded");
+
+ /* Release file-access template */
+ ret = H5Pclose(acc_tpl);
+ VRFY((ret >= 0), "");
+
+ /* --------------------------
+ * Define the dimensions of the overall datasets
+ * and create the dataset
+ * ------------------------- */
+ /* setup 2-D dimensionality object */
+ dims[0] = (hsize_t)dim0;
+ dims[1] = (hsize_t)dim1;
+ sid = H5Screate_simple(RANK, dims, NULL);
+ VRFY((sid >= 0), "H5Screate_simple succeeded");
+
+ /* create a dataset collectively */
+ dataset1 = H5Dcreate2(fid, DATASETNAME1, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((dataset1 >= 0), "H5Dcreate2 succeeded");
+
+ /* create another dataset collectively */
+ datatype = H5Tcopy(H5T_NATIVE_INT);
+ ret = H5Tset_order(datatype, H5T_ORDER_LE);
+ VRFY((ret >= 0), "H5Tset_order succeeded");
+
+ dataset2 = H5Dcreate2(fid, DATASETNAME2, datatype, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((dataset2 >= 0), "H5Dcreate2 2 succeeded");
+
+ /* create a third dataset collectively */
+ dataset3 = H5Dcreate2(fid, DATASETNAME3, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((dataset3 >= 0), "H5Dcreate2 succeeded");
+
+ dataset5 = H5Dcreate2(fid, DATASETNAME7, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((dataset5 >= 0), "H5Dcreate2 succeeded");
+ dataset6 = H5Dcreate2(fid, DATASETNAME8, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((dataset6 >= 0), "H5Dcreate2 succeeded");
+ dataset7 = H5Dcreate2(fid, DATASETNAME9, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((dataset7 >= 0), "H5Dcreate2 succeeded");
+
+ /* release 2-D space ID created */
+ H5Sclose(sid);
+
+ /* setup scalar dimensionality object */
+ sid = H5Screate(H5S_SCALAR);
+ VRFY((sid >= 0), "H5Screate succeeded");
+
+ /* create a fourth dataset collectively */
+ dataset4 = H5Dcreate2(fid, DATASETNAME4, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((dataset4 >= 0), "H5Dcreate2 succeeded");
+
+ /* release scalar space ID created */
+ H5Sclose(sid);
+
+ /*
+ * Set up dimensions of the slab this process accesses.
+ */
+
+ /* Dataset1: each process takes a block of rows. */
+ slab_set(mpi_rank, mpi_size, start, count, stride, block, BYROW);
+
+ /* create a file dataspace independently */
+ file_dataspace = H5Dget_space(dataset1);
+ VRFY((file_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab succeeded");
+
+ /* create a memory dataspace independently */
+ mem_dataspace = H5Screate_simple(RANK, block, NULL);
+ VRFY((mem_dataspace >= 0), "");
+
+ /* fill the local slab with some trivial data */
+ dataset_fill(start, block, data_array1);
+ MESG("data_array initialized");
+ if (VERBOSE_MED) {
+ MESG("data_array created");
+ dataset_print(start, block, data_array1);
+ }
+
+ /* set up the collective transfer properties list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((xfer_plist >= 0), "H5Pcreate xfer succeeded");
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pset_dxpl_mpio succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ /* write data collectively */
+ MESG("writeAll by Row");
+ ret = H5Dwrite(dataset1, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1);
+ VRFY((ret >= 0), "H5Dwrite dataset1 succeeded");
+
+ /* setup dimensions again to writeAll with zero rows for process 0 */
+ if (VERBOSE_MED)
+ HDprintf("writeAll by some with zero row\n");
+ slab_set(mpi_rank, mpi_size, start, count, stride, block, ZROW);
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab succeeded");
+ /* need to make mem_dataspace to match for process 0 */
+ if (MAINPROCESS) {
+ ret = H5Sselect_hyperslab(mem_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab mem_dataspace succeeded");
+ }
+ MESG("writeAll by some with zero row");
+ ret = H5Dwrite(dataset1, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1);
+ VRFY((ret >= 0), "H5Dwrite dataset1 by ZROW succeeded");
+
+ /* release all temporary handles. */
+ /* Could have used them for dataset2 but it is cleaner */
+ /* to create them again.*/
+ H5Sclose(file_dataspace);
+ H5Sclose(mem_dataspace);
+ H5Pclose(xfer_plist);
+
+ /* Dataset2: each process takes a block of columns. */
+ slab_set(mpi_rank, mpi_size, start, count, stride, block, BYCOL);
+
+ /* put some trivial data in the data_array */
+ dataset_fill(start, block, data_array1);
+ MESG("data_array initialized");
+ if (VERBOSE_MED) {
+ MESG("data_array created");
+ dataset_print(start, block, data_array1);
+ }
+
+ /* create a file dataspace independently */
+ file_dataspace = H5Dget_space(dataset1);
+ VRFY((file_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab succeeded");
+
+ /* create a memory dataspace independently */
+ mem_dataspace = H5Screate_simple(RANK, block, NULL);
+ VRFY((mem_dataspace >= 0), "");
+
+ /* fill the local slab with some trivial data */
+ dataset_fill(start, block, data_array1);
+ MESG("data_array initialized");
+ if (VERBOSE_MED) {
+ MESG("data_array created");
+ dataset_print(start, block, data_array1);
+ }
+
+ /* set up the collective transfer properties list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((xfer_plist >= 0), "");
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pcreate xfer succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ /* write data independently */
+ ret = H5Dwrite(dataset2, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1);
+ VRFY((ret >= 0), "H5Dwrite dataset2 succeeded");
+
+ /* setup dimensions again to writeAll with zero columns for process 0 */
+ if (VERBOSE_MED)
+ HDprintf("writeAll by some with zero col\n");
+ slab_set(mpi_rank, mpi_size, start, count, stride, block, ZCOL);
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab succeeded");
+ /* need to make mem_dataspace to match for process 0 */
+ if (MAINPROCESS) {
+ ret = H5Sselect_hyperslab(mem_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab mem_dataspace succeeded");
+ }
+ MESG("writeAll by some with zero col");
+ ret = H5Dwrite(dataset1, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1);
+ VRFY((ret >= 0), "H5Dwrite dataset1 by ZCOL succeeded");
+
+ /* release all temporary handles. */
+ /* Could have used them for dataset3 but it is cleaner */
+ /* to create them again.*/
+ H5Sclose(file_dataspace);
+ H5Sclose(mem_dataspace);
+ H5Pclose(xfer_plist);
+
+ /* Dataset3: each process takes a block of rows, except process zero uses "none" selection. */
+ slab_set(mpi_rank, mpi_size, start, count, stride, block, BYROW);
+
+ /* create a file dataspace independently */
+ file_dataspace = H5Dget_space(dataset3);
+ VRFY((file_dataspace >= 0), "H5Dget_space succeeded");
+ if (MAINPROCESS) {
+ ret = H5Sselect_none(file_dataspace);
+ VRFY((ret >= 0), "H5Sselect_none file_dataspace succeeded");
+ } /* end if */
+ else {
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sselect_hyperslab succeeded");
+ } /* end else */
+
+ /* create a memory dataspace independently */
+ mem_dataspace = H5Screate_simple(RANK, block, NULL);
+ VRFY((mem_dataspace >= 0), "");
+ if (MAINPROCESS) {
+ ret = H5Sselect_none(mem_dataspace);
+ VRFY((ret >= 0), "H5Sselect_none mem_dataspace succeeded");
+ } /* end if */
+
+ /* fill the local slab with some trivial data */
+ dataset_fill(start, block, data_array1);
+ MESG("data_array initialized");
+ if (VERBOSE_MED) {
+ MESG("data_array created");
+ dataset_print(start, block, data_array1);
+ } /* end if */
+
+ /* set up the collective transfer properties list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((xfer_plist >= 0), "");
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pcreate xfer succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ /* write data collectively */
+ MESG("writeAll with none");
+ ret = H5Dwrite(dataset3, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1);
+ VRFY((ret >= 0), "H5Dwrite dataset3 succeeded");
+
+ /* write data collectively (with datatype conversion) */
+ MESG("writeAll with none");
+ ret = H5Dwrite(dataset3, H5T_NATIVE_UCHAR, mem_dataspace, file_dataspace, xfer_plist, data_array1);
+ VRFY((ret >= 0), "H5Dwrite dataset3 succeeded");
+
+ /* release all temporary handles. */
+ /* Could have used them for dataset4 but it is cleaner */
+ /* to create them again.*/
+ H5Sclose(file_dataspace);
+ H5Sclose(mem_dataspace);
+ H5Pclose(xfer_plist);
+
+ /* Dataset4: each process writes no data, except process zero uses "all" selection. */
+ /* Additionally, these are in a scalar dataspace */
+
+ /* create a file dataspace independently */
+ file_dataspace = H5Dget_space(dataset4);
+ VRFY((file_dataspace >= 0), "H5Dget_space succeeded");
+ if (MAINPROCESS) {
+ ret = H5Sselect_none(file_dataspace);
+ VRFY((ret >= 0), "H5Sselect_all file_dataspace succeeded");
+ } /* end if */
+ else {
+ ret = H5Sselect_all(file_dataspace);
+ VRFY((ret >= 0), "H5Sselect_none succeeded");
+ } /* end else */
+
+ /* create a memory dataspace independently */
+ mem_dataspace = H5Screate(H5S_SCALAR);
+ VRFY((mem_dataspace >= 0), "");
+ if (MAINPROCESS) {
+ ret = H5Sselect_none(mem_dataspace);
+ VRFY((ret >= 0), "H5Sselect_all mem_dataspace succeeded");
+ } /* end if */
+ else {
+ ret = H5Sselect_all(mem_dataspace);
+ VRFY((ret >= 0), "H5Sselect_none succeeded");
+ } /* end else */
+
+ /* fill the local slab with some trivial data */
+ dataset_fill(start, block, data_array1);
+ MESG("data_array initialized");
+ if (VERBOSE_MED) {
+ MESG("data_array created");
+ dataset_print(start, block, data_array1);
+ } /* end if */
+
+ /* set up the collective transfer properties list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((xfer_plist >= 0), "");
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pcreate xfer succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ /* write data collectively */
+ MESG("writeAll with scalar dataspace");
+ ret = H5Dwrite(dataset4, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1);
+ VRFY((ret >= 0), "H5Dwrite dataset4 succeeded");
+
+ /* write data collectively (with datatype conversion) */
+ MESG("writeAll with scalar dataspace");
+ ret = H5Dwrite(dataset4, H5T_NATIVE_UCHAR, mem_dataspace, file_dataspace, xfer_plist, data_array1);
+ VRFY((ret >= 0), "H5Dwrite dataset4 succeeded");
+
+ /* release all temporary handles. */
+ H5Sclose(file_dataspace);
+ H5Sclose(mem_dataspace);
+ H5Pclose(xfer_plist);
+
+ if (data_array1)
+ free(data_array1);
+ data_array1 = (DATATYPE *)HDmalloc((size_t)dim0 * (size_t)dim1 * sizeof(DATATYPE));
+ VRFY((data_array1 != NULL), "data_array1 malloc succeeded");
+
+ block[0] = 1;
+ block[1] = (hsize_t)dim1;
+ stride[0] = 1;
+ stride[1] = (hsize_t)dim1;
+ count[0] = 1;
+ count[1] = 1;
+ start[0] = (hsize_t)(dim0 / mpi_size * mpi_rank);
+ start[1] = 0;
+
+ dataset_fill(start, block, data_array1);
+ MESG("data_array initialized");
+ if (VERBOSE_MED) {
+ MESG("data_array created");
+ dataset_print(start, block, data_array1);
+ }
+
+ /* Dataset5: point selection in File - Hyperslab selection in Memory*/
+ /* create a file dataspace independently */
+ point_set(start, count, stride, block, num_points, coords, OUT_OF_ORDER);
+ file_dataspace = H5Dget_space(dataset5);
+ VRFY((file_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_elements(file_dataspace, H5S_SELECT_SET, num_points, coords);
+ VRFY((ret >= 0), "H5Sselect_elements succeeded");
+
+ start[0] = 0;
+ start[1] = 0;
+ mem_dataspace = H5Dget_space(dataset5);
+ VRFY((mem_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_hyperslab(mem_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab succeeded");
+
+ /* set up the collective transfer properties list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((xfer_plist >= 0), "");
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pcreate xfer succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ /* write data collectively */
+ ret = H5Dwrite(dataset5, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1);
+ VRFY((ret >= 0), "H5Dwrite dataset5 succeeded");
+
+ /* release all temporary handles. */
+ H5Sclose(file_dataspace);
+ H5Sclose(mem_dataspace);
+ H5Pclose(xfer_plist);
+
+ /* Dataset6: point selection in File - Point selection in Memory*/
+ /* create a file dataspace independently */
+ start[0] = (hsize_t)(dim0 / mpi_size * mpi_rank);
+ start[1] = 0;
+ point_set(start, count, stride, block, num_points, coords, OUT_OF_ORDER);
+ file_dataspace = H5Dget_space(dataset6);
+ VRFY((file_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_elements(file_dataspace, H5S_SELECT_SET, num_points, coords);
+ VRFY((ret >= 0), "H5Sselect_elements succeeded");
+
+ start[0] = 0;
+ start[1] = 0;
+ point_set(start, count, stride, block, num_points, coords, IN_ORDER);
+ mem_dataspace = H5Dget_space(dataset6);
+ VRFY((mem_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_elements(mem_dataspace, H5S_SELECT_SET, num_points, coords);
+ VRFY((ret >= 0), "H5Sselect_elements succeeded");
+
+ /* set up the collective transfer properties list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((xfer_plist >= 0), "");
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pcreate xfer succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ /* write data collectively */
+ ret = H5Dwrite(dataset6, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1);
+ VRFY((ret >= 0), "H5Dwrite dataset6 succeeded");
+
+ /* release all temporary handles. */
+ H5Sclose(file_dataspace);
+ H5Sclose(mem_dataspace);
+ H5Pclose(xfer_plist);
+
+ /* Dataset7: point selection in File - All selection in Memory*/
+ /* create a file dataspace independently */
+ start[0] = (hsize_t)(dim0 / mpi_size * mpi_rank);
+ start[1] = 0;
+ point_set(start, count, stride, block, num_points, coords, IN_ORDER);
+ file_dataspace = H5Dget_space(dataset7);
+ VRFY((file_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_elements(file_dataspace, H5S_SELECT_SET, num_points, coords);
+ VRFY((ret >= 0), "H5Sselect_elements succeeded");
+
+ current_dims = num_points;
+ mem_dataspace = H5Screate_simple(1, &current_dims, NULL);
+ VRFY((mem_dataspace >= 0), "mem_dataspace create succeeded");
+
+ ret = H5Sselect_all(mem_dataspace);
+ VRFY((ret >= 0), "H5Sselect_all succeeded");
+
+ /* set up the collective transfer properties list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((xfer_plist >= 0), "");
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pcreate xfer succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ /* write data collectively */
+ ret = H5Dwrite(dataset7, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1);
+ VRFY((ret >= 0), "H5Dwrite dataset7 succeeded");
+
+ /* release all temporary handles. */
+ H5Sclose(file_dataspace);
+ H5Sclose(mem_dataspace);
+ H5Pclose(xfer_plist);
+
+ /*
+ * All writes completed. Close datasets collectively
+ */
+ ret = H5Dclose(dataset1);
+ VRFY((ret >= 0), "H5Dclose1 succeeded");
+ ret = H5Dclose(dataset2);
+ VRFY((ret >= 0), "H5Dclose2 succeeded");
+ ret = H5Dclose(dataset3);
+ VRFY((ret >= 0), "H5Dclose3 succeeded");
+ ret = H5Dclose(dataset4);
+ VRFY((ret >= 0), "H5Dclose4 succeeded");
+ ret = H5Dclose(dataset5);
+ VRFY((ret >= 0), "H5Dclose5 succeeded");
+ ret = H5Dclose(dataset6);
+ VRFY((ret >= 0), "H5Dclose6 succeeded");
+ ret = H5Dclose(dataset7);
+ VRFY((ret >= 0), "H5Dclose7 succeeded");
+
+ /* close the file collectively */
+ H5Fclose(fid);
+
+ /* release data buffers */
+ if (coords)
+ HDfree(coords);
+ if (data_array1)
+ HDfree(data_array1);
+}
+
+/*
+ * Example of using the parallel HDF5 library to read two datasets
+ * in one HDF5 file with collective parallel access support.
+ * The Datasets are of sizes (number-of-mpi-processes x dim0) x dim1.
+ * Each process controls only a slab of size dim0 x dim1 within each
+ * dataset. [Note: not so yet. Datasets are of sizes dim0xdim1 and
+ * each process controls a hyperslab within.]
+ */
+
+void
+dataset_readAll(void)
+{
+ hid_t fid; /* HDF5 file ID */
+ hid_t acc_tpl; /* File access templates */
+ hid_t xfer_plist; /* Dataset transfer properties list */
+ hid_t file_dataspace; /* File dataspace ID */
+ hid_t mem_dataspace; /* memory dataspace ID */
+ hid_t dataset1, dataset2, dataset5, dataset6, dataset7; /* Dataset ID */
+ DATATYPE *data_array1 = NULL; /* data buffer */
+ DATATYPE *data_origin1 = NULL; /* expected data buffer */
+ const char *filename;
+
+ hsize_t start[RANK]; /* for hyperslab setting */
+ hsize_t count[RANK], stride[RANK]; /* for hyperslab setting */
+ hsize_t block[RANK]; /* for hyperslab setting */
+
+ size_t num_points; /* for point selection */
+ hsize_t *coords = NULL; /* for point selection */
+ int i, j, k;
+
+ herr_t ret; /* Generic return value */
+ int mpi_size, mpi_rank;
+
+ MPI_Comm comm = MPI_COMM_WORLD;
+ MPI_Info info = MPI_INFO_NULL;
+
+ filename = PARATESTFILE /* GetTestParameters() */;
+ if (VERBOSE_MED)
+ HDprintf("Collective read test on file %s\n", filename);
+
+ /* set up MPI parameters */
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, basic dataset, or more aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ /* set up the coords array selection */
+ num_points = (size_t)dim1;
+ coords = (hsize_t *)HDmalloc((size_t)dim0 * (size_t)dim1 * RANK * sizeof(hsize_t));
+ VRFY((coords != NULL), "coords malloc succeeded");
+
+ /* allocate memory for data buffer */
+ data_array1 = (DATATYPE *)HDmalloc((size_t)dim0 * (size_t)dim1 * sizeof(DATATYPE));
+ VRFY((data_array1 != NULL), "data_array1 HDmalloc succeeded");
+ data_origin1 = (DATATYPE *)HDmalloc((size_t)dim0 * (size_t)dim1 * sizeof(DATATYPE));
+ VRFY((data_origin1 != NULL), "data_origin1 HDmalloc succeeded");
+
+ /* -------------------
+ * OPEN AN HDF5 FILE
+ * -------------------*/
+ /* setup file access template */
+ acc_tpl = create_faccess_plist(comm, info, facc_type);
+ VRFY((acc_tpl >= 0), "");
+
+ /* open the file collectively */
+ fid = H5Fopen(filename, H5F_ACC_RDONLY, acc_tpl);
+ VRFY((fid >= 0), "H5Fopen succeeded");
+
+ /* Release file-access template */
+ ret = H5Pclose(acc_tpl);
+ VRFY((ret >= 0), "");
+
+ /* --------------------------
+ * Open the datasets in it
+ * ------------------------- */
+ /* open the dataset1 collectively */
+ dataset1 = H5Dopen2(fid, DATASETNAME1, H5P_DEFAULT);
+ VRFY((dataset1 >= 0), "H5Dopen2 succeeded");
+
+ /* open another dataset collectively */
+ dataset2 = H5Dopen2(fid, DATASETNAME2, H5P_DEFAULT);
+ VRFY((dataset2 >= 0), "H5Dopen2 2 succeeded");
+
+ /* open another dataset collectively */
+ dataset5 = H5Dopen2(fid, DATASETNAME7, H5P_DEFAULT);
+ VRFY((dataset5 >= 0), "H5Dopen2 5 succeeded");
+ dataset6 = H5Dopen2(fid, DATASETNAME8, H5P_DEFAULT);
+ VRFY((dataset6 >= 0), "H5Dopen2 6 succeeded");
+ dataset7 = H5Dopen2(fid, DATASETNAME9, H5P_DEFAULT);
+ VRFY((dataset7 >= 0), "H5Dopen2 7 succeeded");
+
+ /*
+ * Set up dimensions of the slab this process accesses.
+ */
+
+ /* Dataset1: each process takes a block of columns. */
+ slab_set(mpi_rank, mpi_size, start, count, stride, block, BYCOL);
+
+ /* create a file dataspace independently */
+ file_dataspace = H5Dget_space(dataset1);
+ VRFY((file_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab succeeded");
+
+ /* create a memory dataspace independently */
+ mem_dataspace = H5Screate_simple(RANK, block, NULL);
+ VRFY((mem_dataspace >= 0), "");
+
+ /* fill dataset with test data */
+ dataset_fill(start, block, data_origin1);
+ MESG("data_array initialized");
+ if (VERBOSE_MED) {
+ MESG("data_array created");
+ dataset_print(start, block, data_origin1);
+ }
+
+ /* set up the collective transfer properties list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((xfer_plist >= 0), "");
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pcreate xfer succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ /* read data collectively */
+ ret = H5Dread(dataset1, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1);
+ VRFY((ret >= 0), "H5Dread dataset1 succeeded");
+
+ /* verify the read data with original expected data */
+ ret = dataset_vrfy(start, count, stride, block, data_array1, data_origin1);
+ if (ret)
+ nerrors++;
+
+ /* setup dimensions again to readAll with zero columns for process 0 */
+ if (VERBOSE_MED)
+ HDprintf("readAll by some with zero col\n");
+ slab_set(mpi_rank, mpi_size, start, count, stride, block, ZCOL);
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab succeeded");
+ /* need to make mem_dataspace to match for process 0 */
+ if (MAINPROCESS) {
+ ret = H5Sselect_hyperslab(mem_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab mem_dataspace succeeded");
+ }
+ MESG("readAll by some with zero col");
+ ret = H5Dread(dataset1, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1);
+ VRFY((ret >= 0), "H5Dread dataset1 by ZCOL succeeded");
+
+ /* verify the read data with original expected data */
+ ret = dataset_vrfy(start, count, stride, block, data_array1, data_origin1);
+ if (ret)
+ nerrors++;
+
+ /* release all temporary handles. */
+ /* Could have used them for dataset2 but it is cleaner */
+ /* to create them again.*/
+ H5Sclose(file_dataspace);
+ H5Sclose(mem_dataspace);
+ H5Pclose(xfer_plist);
+
+ /* Dataset2: each process takes a block of rows. */
+ slab_set(mpi_rank, mpi_size, start, count, stride, block, BYROW);
+
+ /* create a file dataspace independently */
+ file_dataspace = H5Dget_space(dataset1);
+ VRFY((file_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab succeeded");
+
+ /* create a memory dataspace independently */
+ mem_dataspace = H5Screate_simple(RANK, block, NULL);
+ VRFY((mem_dataspace >= 0), "");
+
+ /* fill dataset with test data */
+ dataset_fill(start, block, data_origin1);
+ MESG("data_array initialized");
+ if (VERBOSE_MED) {
+ MESG("data_array created");
+ dataset_print(start, block, data_origin1);
+ }
+
+ /* set up the collective transfer properties list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((xfer_plist >= 0), "");
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pcreate xfer succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ /* read data collectively */
+ ret = H5Dread(dataset2, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1);
+ VRFY((ret >= 0), "H5Dread dataset2 succeeded");
+
+ /* verify the read data with original expected data */
+ ret = dataset_vrfy(start, count, stride, block, data_array1, data_origin1);
+ if (ret)
+ nerrors++;
+
+ /* setup dimensions again to readAll with zero rows for process 0 */
+ if (VERBOSE_MED)
+ HDprintf("readAll by some with zero row\n");
+ slab_set(mpi_rank, mpi_size, start, count, stride, block, ZROW);
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab succeeded");
+ /* need to make mem_dataspace to match for process 0 */
+ if (MAINPROCESS) {
+ ret = H5Sselect_hyperslab(mem_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab mem_dataspace succeeded");
+ }
+ MESG("readAll by some with zero row");
+ ret = H5Dread(dataset1, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1);
+ VRFY((ret >= 0), "H5Dread dataset1 by ZROW succeeded");
+
+ /* verify the read data with original expected data */
+ ret = dataset_vrfy(start, count, stride, block, data_array1, data_origin1);
+ if (ret)
+ nerrors++;
+
+ /* release all temporary handles. */
+ H5Sclose(file_dataspace);
+ H5Sclose(mem_dataspace);
+ H5Pclose(xfer_plist);
+
+ if (data_array1)
+ free(data_array1);
+ if (data_origin1)
+ free(data_origin1);
+ data_array1 = (DATATYPE *)HDmalloc((size_t)dim0 * (size_t)dim1 * sizeof(DATATYPE));
+ VRFY((data_array1 != NULL), "data_array1 malloc succeeded");
+ data_origin1 = (DATATYPE *)HDmalloc((size_t)dim0 * (size_t)dim1 * sizeof(DATATYPE));
+ VRFY((data_origin1 != NULL), "data_origin1 malloc succeeded");
+
+ block[0] = 1;
+ block[1] = (hsize_t)dim1;
+ stride[0] = 1;
+ stride[1] = (hsize_t)dim1;
+ count[0] = 1;
+ count[1] = 1;
+ start[0] = (hsize_t)(dim0 / mpi_size * mpi_rank);
+ start[1] = 0;
+
+ dataset_fill(start, block, data_origin1);
+ MESG("data_array initialized");
+ if (VERBOSE_MED) {
+ MESG("data_array created");
+ dataset_print(start, block, data_origin1);
+ }
+
+ /* Dataset5: point selection in memory - Hyperslab selection in file*/
+ /* create a file dataspace independently */
+ file_dataspace = H5Dget_space(dataset5);
+ VRFY((file_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab succeeded");
+
+ start[0] = 0;
+ start[1] = 0;
+ point_set(start, count, stride, block, num_points, coords, OUT_OF_ORDER);
+ mem_dataspace = H5Dget_space(dataset5);
+ VRFY((mem_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_elements(mem_dataspace, H5S_SELECT_SET, num_points, coords);
+ VRFY((ret >= 0), "H5Sselect_elements succeeded");
+
+ /* set up the collective transfer properties list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((xfer_plist >= 0), "");
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pcreate xfer succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ /* read data collectively */
+ ret = H5Dread(dataset5, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1);
+ VRFY((ret >= 0), "H5Dread dataset5 succeeded");
+
+ ret = dataset_vrfy(start, count, stride, block, data_array1, data_origin1);
+ if (ret)
+ nerrors++;
+
+ /* release all temporary handles. */
+ H5Sclose(file_dataspace);
+ H5Sclose(mem_dataspace);
+ H5Pclose(xfer_plist);
+
+ if (data_array1)
+ free(data_array1);
+ data_array1 = (DATATYPE *)HDmalloc((size_t)dim0 * (size_t)dim1 * sizeof(DATATYPE));
+ VRFY((data_array1 != NULL), "data_array1 malloc succeeded");
+
+ /* Dataset6: point selection in File - Point selection in Memory*/
+ /* create a file dataspace independently */
+ start[0] = (hsize_t)(dim0 / mpi_size * mpi_rank);
+ start[1] = 0;
+ point_set(start, count, stride, block, num_points, coords, IN_ORDER);
+ file_dataspace = H5Dget_space(dataset6);
+ VRFY((file_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_elements(file_dataspace, H5S_SELECT_SET, num_points, coords);
+ VRFY((ret >= 0), "H5Sselect_elements succeeded");
+
+ start[0] = 0;
+ start[1] = 0;
+ point_set(start, count, stride, block, num_points, coords, OUT_OF_ORDER);
+ mem_dataspace = H5Dget_space(dataset6);
+ VRFY((mem_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_elements(mem_dataspace, H5S_SELECT_SET, num_points, coords);
+ VRFY((ret >= 0), "H5Sselect_elements succeeded");
+
+ /* set up the collective transfer properties list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((xfer_plist >= 0), "");
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pcreate xfer succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ /* read data collectively */
+ ret = H5Dread(dataset6, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1);
+ VRFY((ret >= 0), "H5Dread dataset6 succeeded");
+
+ ret = dataset_vrfy(start, count, stride, block, data_array1, data_origin1);
+ if (ret)
+ nerrors++;
+
+ /* release all temporary handles. */
+ H5Sclose(file_dataspace);
+ H5Sclose(mem_dataspace);
+ H5Pclose(xfer_plist);
+
+ if (data_array1)
+ free(data_array1);
+ data_array1 = (DATATYPE *)HDmalloc((size_t)dim0 * (size_t)dim1 * sizeof(DATATYPE));
+ VRFY((data_array1 != NULL), "data_array1 malloc succeeded");
+
+ /* Dataset7: point selection in memory - All selection in file*/
+ /* create a file dataspace independently */
+ file_dataspace = H5Dget_space(dataset7);
+ VRFY((file_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_all(file_dataspace);
+ VRFY((ret >= 0), "H5Sselect_all succeeded");
+
+ num_points = (size_t)(dim0 * dim1);
+ k = 0;
+ for (i = 0; i < dim0; i++) {
+ for (j = 0; j < dim1; j++) {
+ coords[k++] = (hsize_t)i;
+ coords[k++] = (hsize_t)j;
+ }
+ }
+ mem_dataspace = H5Dget_space(dataset7);
+ VRFY((mem_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_elements(mem_dataspace, H5S_SELECT_SET, num_points, coords);
+ VRFY((ret >= 0), "H5Sselect_elements succeeded");
+
+ /* set up the collective transfer properties list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((xfer_plist >= 0), "");
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pcreate xfer succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ /* read data collectively */
+ ret = H5Dread(dataset7, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1);
+ VRFY((ret >= 0), "H5Dread dataset7 succeeded");
+
+ start[0] = (hsize_t)(dim0 / mpi_size * mpi_rank);
+ start[1] = 0;
+ ret = dataset_vrfy(start, count, stride, block, data_array1 + (dim0 / mpi_size * dim1 * mpi_rank),
+ data_origin1);
+ if (ret)
+ nerrors++;
+
+ /* release all temporary handles. */
+ H5Sclose(file_dataspace);
+ H5Sclose(mem_dataspace);
+ H5Pclose(xfer_plist);
+
+ /*
+ * All reads completed. Close datasets collectively
+ */
+ ret = H5Dclose(dataset1);
+ VRFY((ret >= 0), "H5Dclose1 succeeded");
+ ret = H5Dclose(dataset2);
+ VRFY((ret >= 0), "H5Dclose2 succeeded");
+ ret = H5Dclose(dataset5);
+ VRFY((ret >= 0), "H5Dclose5 succeeded");
+ ret = H5Dclose(dataset6);
+ VRFY((ret >= 0), "H5Dclose6 succeeded");
+ ret = H5Dclose(dataset7);
+ VRFY((ret >= 0), "H5Dclose7 succeeded");
+
+ /* close the file collectively */
+ H5Fclose(fid);
+
+ /* release data buffers */
+ if (coords)
+ HDfree(coords);
+ if (data_array1)
+ HDfree(data_array1);
+ if (data_origin1)
+ HDfree(data_origin1);
+}
+
+/*
+ * Part 2--Independent read/write for extendible datasets.
+ */
+
+/*
+ * Example of using the parallel HDF5 library to create two extendible
+ * datasets in one HDF5 file with independent parallel MPIO access support.
+ * The Datasets are of sizes (number-of-mpi-processes x dim0) x dim1.
+ * Each process controls only a slab of size dim0 x dim1 within each
+ * dataset.
+ */
+
+void
+extend_writeInd(void)
+{
+ hid_t fid; /* HDF5 file ID */
+ hid_t acc_tpl; /* File access templates */
+ hid_t sid; /* Dataspace ID */
+ hid_t file_dataspace; /* File dataspace ID */
+ hid_t mem_dataspace; /* memory dataspace ID */
+ hid_t dataset1, dataset2; /* Dataset ID */
+ const char *filename;
+ hsize_t dims[RANK]; /* dataset dim sizes */
+ hsize_t max_dims[RANK] = {H5S_UNLIMITED, H5S_UNLIMITED}; /* dataset maximum dim sizes */
+ DATATYPE *data_array1 = NULL; /* data buffer */
+ hsize_t chunk_dims[RANK]; /* chunk sizes */
+ hid_t dataset_pl; /* dataset create prop. list */
+
+ hsize_t start[RANK]; /* for hyperslab setting */
+ hsize_t count[RANK]; /* for hyperslab setting */
+ hsize_t stride[RANK]; /* for hyperslab setting */
+ hsize_t block[RANK]; /* for hyperslab setting */
+
+ herr_t ret; /* Generic return value */
+ int mpi_size, mpi_rank;
+
+ MPI_Comm comm = MPI_COMM_WORLD;
+ MPI_Info info = MPI_INFO_NULL;
+
+ filename = PARATESTFILE /* GetTestParameters() */;
+ if (VERBOSE_MED)
+ HDprintf("Extend independent write test on file %s\n", filename);
+
+ /* set up MPI parameters */
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, basic dataset, or more aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ /* setup chunk-size. Make sure sizes are > 0 */
+ chunk_dims[0] = (hsize_t)chunkdim0;
+ chunk_dims[1] = (hsize_t)chunkdim1;
+
+ /* allocate memory for data buffer */
+ data_array1 = (DATATYPE *)HDmalloc((size_t)dim0 * (size_t)dim1 * sizeof(DATATYPE));
+ VRFY((data_array1 != NULL), "data_array1 HDmalloc succeeded");
+
+ /* -------------------
+ * START AN HDF5 FILE
+ * -------------------*/
+ /* setup file access template */
+ acc_tpl = create_faccess_plist(comm, info, facc_type);
+ VRFY((acc_tpl >= 0), "");
+
+ /* Reduce the number of metadata cache slots, so that there are cache
+ * collisions during the raw data I/O on the chunked dataset. This stresses
+ * the metadata cache and tests for cache bugs. -QAK
+ */
+ {
+ int mdc_nelmts;
+ size_t rdcc_nelmts;
+ size_t rdcc_nbytes;
+ double rdcc_w0;
+
+ ret = H5Pget_cache(acc_tpl, &mdc_nelmts, &rdcc_nelmts, &rdcc_nbytes, &rdcc_w0);
+ VRFY((ret >= 0), "H5Pget_cache succeeded");
+ mdc_nelmts = 4;
+ ret = H5Pset_cache(acc_tpl, mdc_nelmts, rdcc_nelmts, rdcc_nbytes, rdcc_w0);
+ VRFY((ret >= 0), "H5Pset_cache succeeded");
+ }
+
+ /* create the file collectively */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, acc_tpl);
+ VRFY((fid >= 0), "H5Fcreate succeeded");
+
+ /* Release file-access template */
+ ret = H5Pclose(acc_tpl);
+ VRFY((ret >= 0), "");
+
+ /* --------------------------------------------------------------
+ * Define the dimensions of the overall datasets and create them.
+ * ------------------------------------------------------------- */
+
+ /* set up dataset storage chunk sizes and creation property list */
+ if (VERBOSE_MED)
+ HDprintf("chunks[]=%lu,%lu\n", (unsigned long)chunk_dims[0], (unsigned long)chunk_dims[1]);
+ dataset_pl = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY((dataset_pl >= 0), "H5Pcreate succeeded");
+ ret = H5Pset_chunk(dataset_pl, RANK, chunk_dims);
+ VRFY((ret >= 0), "H5Pset_chunk succeeded");
+
+ /* setup dimensionality object */
+ /* start out with no rows, extend it later. */
+ dims[0] = dims[1] = 0;
+ sid = H5Screate_simple(RANK, dims, max_dims);
+ VRFY((sid >= 0), "H5Screate_simple succeeded");
+
+ /* create an extendible dataset collectively */
+ dataset1 = H5Dcreate2(fid, DATASETNAME1, H5T_NATIVE_INT, sid, H5P_DEFAULT, dataset_pl, H5P_DEFAULT);
+ VRFY((dataset1 >= 0), "H5Dcreate2 succeeded");
+
+ /* create another extendible dataset collectively */
+ dataset2 = H5Dcreate2(fid, DATASETNAME2, H5T_NATIVE_INT, sid, H5P_DEFAULT, dataset_pl, H5P_DEFAULT);
+ VRFY((dataset2 >= 0), "H5Dcreate2 succeeded");
+
+ /* release resource */
+ H5Sclose(sid);
+ H5Pclose(dataset_pl);
+
+ /* -------------------------
+ * Test writing to dataset1
+ * -------------------------*/
+ /* set up dimensions of the slab this process accesses */
+ slab_set(mpi_rank, mpi_size, start, count, stride, block, BYROW);
+
+ /* put some trivial data in the data_array */
+ dataset_fill(start, block, data_array1);
+ MESG("data_array initialized");
+ if (VERBOSE_MED) {
+ MESG("data_array created");
+ dataset_print(start, block, data_array1);
+ }
+
+ /* create a memory dataspace independently */
+ mem_dataspace = H5Screate_simple(RANK, block, NULL);
+ VRFY((mem_dataspace >= 0), "");
+
+ /* Extend its current dim sizes before writing */
+ dims[0] = (hsize_t)dim0;
+ dims[1] = (hsize_t)dim1;
+ ret = H5Dset_extent(dataset1, dims);
+ VRFY((ret >= 0), "H5Dset_extent succeeded");
+
+ /* create a file dataspace independently */
+ file_dataspace = H5Dget_space(dataset1);
+ VRFY((file_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab succeeded");
+
+ /* write data independently */
+ ret = H5Dwrite(dataset1, H5T_NATIVE_INT, mem_dataspace, file_dataspace, H5P_DEFAULT, data_array1);
+ VRFY((ret >= 0), "H5Dwrite succeeded");
+
+ /* release resource */
+ H5Sclose(file_dataspace);
+ H5Sclose(mem_dataspace);
+
+ /* -------------------------
+ * Test writing to dataset2
+ * -------------------------*/
+ /* set up dimensions of the slab this process accesses */
+ slab_set(mpi_rank, mpi_size, start, count, stride, block, BYCOL);
+
+ /* put some trivial data in the data_array */
+ dataset_fill(start, block, data_array1);
+ MESG("data_array initialized");
+ if (VERBOSE_MED) {
+ MESG("data_array created");
+ dataset_print(start, block, data_array1);
+ }
+
+ /* create a memory dataspace independently */
+ mem_dataspace = H5Screate_simple(RANK, block, NULL);
+ VRFY((mem_dataspace >= 0), "");
+
+ /* Try write to dataset2 beyond its current dim sizes. Should fail. */
+
+ /* create a file dataspace independently */
+ file_dataspace = H5Dget_space(dataset2);
+ VRFY((file_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab succeeded");
+
+ /* write data independently. Should fail. */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Dwrite(dataset2, H5T_NATIVE_INT, mem_dataspace, file_dataspace, H5P_DEFAULT, data_array1);
+ }
+ H5E_END_TRY
+ VRFY((ret < 0), "H5Dwrite failed as expected");
+
+ H5Sclose(file_dataspace);
+
+ /* Extend dataset2 and try again. Should succeed. */
+ dims[0] = (hsize_t)dim0;
+ dims[1] = (hsize_t)dim1;
+ ret = H5Dset_extent(dataset2, dims);
+ VRFY((ret >= 0), "H5Dset_extent succeeded");
+
+ /* create a file dataspace independently */
+ file_dataspace = H5Dget_space(dataset2);
+ VRFY((file_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab succeeded");
+
+ /* write data independently */
+ ret = H5Dwrite(dataset2, H5T_NATIVE_INT, mem_dataspace, file_dataspace, H5P_DEFAULT, data_array1);
+ VRFY((ret >= 0), "H5Dwrite succeeded");
+
+ /* release resource */
+ ret = H5Sclose(file_dataspace);
+ VRFY((ret >= 0), "H5Sclose succeeded");
+ ret = H5Sclose(mem_dataspace);
+ VRFY((ret >= 0), "H5Sclose succeeded");
+
+ /* close dataset collectively */
+ ret = H5Dclose(dataset1);
+ VRFY((ret >= 0), "H5Dclose1 succeeded");
+ ret = H5Dclose(dataset2);
+ VRFY((ret >= 0), "H5Dclose2 succeeded");
+
+ /* close the file collectively */
+ H5Fclose(fid);
+
+ /* release data buffers */
+ if (data_array1)
+ HDfree(data_array1);
+}
+
+/*
+ * Example of using the parallel HDF5 library to create an extendable dataset
+ * and perform I/O on it in a way that verifies that the chunk cache is
+ * bypassed for parallel I/O.
+ */
+
+void
+extend_writeInd2(void)
+{
+ const char *filename;
+ hid_t fid; /* HDF5 file ID */
+ hid_t fapl; /* File access templates */
+ hid_t fs; /* File dataspace ID */
+ hid_t ms; /* Memory dataspace ID */
+ hid_t dataset; /* Dataset ID */
+ hsize_t orig_size = 10; /* Original dataset dim size */
+ hsize_t new_size = 20; /* Extended dataset dim size */
+ hsize_t one = 1;
+ hsize_t max_size = H5S_UNLIMITED; /* dataset maximum dim size */
+ hsize_t chunk_size = 16384; /* chunk size */
+ hid_t dcpl; /* dataset create prop. list */
+ int written[10], /* Data to write */
+ retrieved[10]; /* Data read in */
+ int mpi_size, mpi_rank; /* MPI settings */
+ int i; /* Local index variable */
+ herr_t ret; /* Generic return value */
+
+ filename = PARATESTFILE /* GetTestParameters() */;
+ if (VERBOSE_MED)
+ HDprintf("Extend independent write test #2 on file %s\n", filename);
+
+ /* set up MPI parameters */
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, basic dataset, or more aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ /* -------------------
+ * START AN HDF5 FILE
+ * -------------------*/
+ /* setup file access template */
+ fapl = create_faccess_plist(MPI_COMM_WORLD, MPI_INFO_NULL, facc_type);
+ VRFY((fapl >= 0), "create_faccess_plist succeeded");
+
+ /* create the file collectively */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ VRFY((fid >= 0), "H5Fcreate succeeded");
+
+ /* Release file-access template */
+ ret = H5Pclose(fapl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ /* --------------------------------------------------------------
+ * Define the dimensions of the overall datasets and create them.
+ * ------------------------------------------------------------- */
+
+ /* set up dataset storage chunk sizes and creation property list */
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY((dcpl >= 0), "H5Pcreate succeeded");
+ ret = H5Pset_chunk(dcpl, 1, &chunk_size);
+ VRFY((ret >= 0), "H5Pset_chunk succeeded");
+
+ /* setup dimensionality object */
+ fs = H5Screate_simple(1, &orig_size, &max_size);
+ VRFY((fs >= 0), "H5Screate_simple succeeded");
+
+ /* create an extendible dataset collectively */
+ dataset = H5Dcreate2(fid, DATASETNAME1, H5T_NATIVE_INT, fs, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ VRFY((dataset >= 0), "H5Dcreat2e succeeded");
+
+ /* release resource */
+ ret = H5Pclose(dcpl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ /* -------------------------
+ * Test writing to dataset
+ * -------------------------*/
+ /* create a memory dataspace independently */
+ ms = H5Screate_simple(1, &orig_size, &max_size);
+ VRFY((ms >= 0), "H5Screate_simple succeeded");
+
+ /* put some trivial data in the data_array */
+ for (i = 0; i < (int)orig_size; i++)
+ written[i] = i;
+ MESG("data array initialized");
+ if (VERBOSE_MED) {
+ MESG("writing at offset zero: ");
+ for (i = 0; i < (int)orig_size; i++)
+ HDprintf("%s%d", i ? ", " : "", written[i]);
+ HDprintf("\n");
+ }
+ ret = H5Dwrite(dataset, H5T_NATIVE_INT, ms, fs, H5P_DEFAULT, written);
+ VRFY((ret >= 0), "H5Dwrite succeeded");
+
+ /* -------------------------
+ * Read initial data from dataset.
+ * -------------------------*/
+ ret = H5Dread(dataset, H5T_NATIVE_INT, ms, fs, H5P_DEFAULT, retrieved);
+ VRFY((ret >= 0), "H5Dread succeeded");
+ for (i = 0; i < (int)orig_size; i++)
+ if (written[i] != retrieved[i]) {
+ HDprintf("Line #%d: written!=retrieved: written[%d]=%d, retrieved[%d]=%d\n", __LINE__, i,
+ written[i], i, retrieved[i]);
+ nerrors++;
+ }
+ if (VERBOSE_MED) {
+ MESG("read at offset zero: ");
+ for (i = 0; i < (int)orig_size; i++)
+ HDprintf("%s%d", i ? ", " : "", retrieved[i]);
+ HDprintf("\n");
+ }
+
+ /* -------------------------
+ * Extend the dataset & retrieve new dataspace
+ * -------------------------*/
+ ret = H5Dset_extent(dataset, &new_size);
+ VRFY((ret >= 0), "H5Dset_extent succeeded");
+ ret = H5Sclose(fs);
+ VRFY((ret >= 0), "H5Sclose succeeded");
+ fs = H5Dget_space(dataset);
+ VRFY((fs >= 0), "H5Dget_space succeeded");
+
+ /* -------------------------
+ * Write to the second half of the dataset
+ * -------------------------*/
+ for (i = 0; i < (int)orig_size; i++)
+ written[i] = (int)orig_size + i;
+ MESG("data array re-initialized");
+ if (VERBOSE_MED) {
+ MESG("writing at offset 10: ");
+ for (i = 0; i < (int)orig_size; i++)
+ HDprintf("%s%d", i ? ", " : "", written[i]);
+ HDprintf("\n");
+ }
+ ret = H5Sselect_hyperslab(fs, H5S_SELECT_SET, &orig_size, NULL, &one, &orig_size);
+ VRFY((ret >= 0), "H5Sselect_hyperslab succeeded");
+ ret = H5Dwrite(dataset, H5T_NATIVE_INT, ms, fs, H5P_DEFAULT, written);
+ VRFY((ret >= 0), "H5Dwrite succeeded");
+
+ /* -------------------------
+ * Read the new data
+ * -------------------------*/
+ ret = H5Dread(dataset, H5T_NATIVE_INT, ms, fs, H5P_DEFAULT, retrieved);
+ VRFY((ret >= 0), "H5Dread succeeded");
+ for (i = 0; i < (int)orig_size; i++)
+ if (written[i] != retrieved[i]) {
+ HDprintf("Line #%d: written!=retrieved: written[%d]=%d, retrieved[%d]=%d\n", __LINE__, i,
+ written[i], i, retrieved[i]);
+ nerrors++;
+ }
+ if (VERBOSE_MED) {
+ MESG("read at offset 10: ");
+ for (i = 0; i < (int)orig_size; i++)
+ HDprintf("%s%d", i ? ", " : "", retrieved[i]);
+ HDprintf("\n");
+ }
+
+ /* Close dataset collectively */
+ ret = H5Dclose(dataset);
+ VRFY((ret >= 0), "H5Dclose succeeded");
+
+ /* Close the file collectively */
+ ret = H5Fclose(fid);
+ VRFY((ret >= 0), "H5Fclose succeeded");
+}
+
+/* Example of using the parallel HDF5 library to read an extendible dataset */
+void
+extend_readInd(void)
+{
+ hid_t fid; /* HDF5 file ID */
+ hid_t acc_tpl; /* File access templates */
+ hid_t file_dataspace; /* File dataspace ID */
+ hid_t mem_dataspace; /* memory dataspace ID */
+ hid_t dataset1, dataset2; /* Dataset ID */
+ hsize_t dims[RANK]; /* dataset dim sizes */
+ DATATYPE *data_array1 = NULL; /* data buffer */
+ DATATYPE *data_array2 = NULL; /* data buffer */
+ DATATYPE *data_origin1 = NULL; /* expected data buffer */
+ const char *filename;
+
+ hsize_t start[RANK]; /* for hyperslab setting */
+ hsize_t count[RANK], stride[RANK]; /* for hyperslab setting */
+ hsize_t block[RANK]; /* for hyperslab setting */
+
+ herr_t ret; /* Generic return value */
+ int mpi_size, mpi_rank;
+
+ MPI_Comm comm = MPI_COMM_WORLD;
+ MPI_Info info = MPI_INFO_NULL;
+
+ filename = PARATESTFILE /* GetTestParameters() */;
+ if (VERBOSE_MED)
+ HDprintf("Extend independent read test on file %s\n", filename);
+
+ /* set up MPI parameters */
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, basic dataset, or more aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ /* allocate memory for data buffer */
+ data_array1 = (DATATYPE *)HDmalloc((size_t)dim0 * (size_t)dim1 * sizeof(DATATYPE));
+ VRFY((data_array1 != NULL), "data_array1 HDmalloc succeeded");
+ data_array2 = (DATATYPE *)HDmalloc((size_t)dim0 * (size_t)dim1 * sizeof(DATATYPE));
+ VRFY((data_array2 != NULL), "data_array2 HDmalloc succeeded");
+ data_origin1 = (DATATYPE *)HDmalloc((size_t)dim0 * (size_t)dim1 * sizeof(DATATYPE));
+ VRFY((data_origin1 != NULL), "data_origin1 HDmalloc succeeded");
+
+ /* -------------------
+ * OPEN AN HDF5 FILE
+ * -------------------*/
+ /* setup file access template */
+ acc_tpl = create_faccess_plist(comm, info, facc_type);
+ VRFY((acc_tpl >= 0), "");
+
+ /* open the file collectively */
+ fid = H5Fopen(filename, H5F_ACC_RDONLY, acc_tpl);
+ VRFY((fid >= 0), "");
+
+ /* Release file-access template */
+ ret = H5Pclose(acc_tpl);
+ VRFY((ret >= 0), "");
+
+ /* open the dataset1 collectively */
+ dataset1 = H5Dopen2(fid, DATASETNAME1, H5P_DEFAULT);
+ VRFY((dataset1 >= 0), "");
+
+ /* open another dataset collectively */
+ dataset2 = H5Dopen2(fid, DATASETNAME1, H5P_DEFAULT);
+ VRFY((dataset2 >= 0), "");
+
+ /* Try extend dataset1 which is open RDONLY. Should fail. */
+
+ file_dataspace = H5Dget_space(dataset1);
+ VRFY((file_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sget_simple_extent_dims(file_dataspace, dims, NULL);
+ VRFY((ret > 0), "H5Sget_simple_extent_dims succeeded");
+ dims[0]++;
+ H5E_BEGIN_TRY
+ {
+ ret = H5Dset_extent(dataset1, dims);
+ }
+ H5E_END_TRY
+ VRFY((ret < 0), "H5Dset_extent failed as expected");
+
+ H5Sclose(file_dataspace);
+
+ /* Read dataset1 using BYROW pattern */
+ /* set up dimensions of the slab this process accesses */
+ slab_set(mpi_rank, mpi_size, start, count, stride, block, BYROW);
+
+ /* create a file dataspace independently */
+ file_dataspace = H5Dget_space(dataset1);
+ VRFY((file_dataspace >= 0), "");
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "");
+
+ /* create a memory dataspace independently */
+ mem_dataspace = H5Screate_simple(RANK, block, NULL);
+ VRFY((mem_dataspace >= 0), "");
+
+ /* fill dataset with test data */
+ dataset_fill(start, block, data_origin1);
+ if (VERBOSE_MED) {
+ MESG("data_array created");
+ dataset_print(start, block, data_array1);
+ }
+
+ /* read data independently */
+ ret = H5Dread(dataset1, H5T_NATIVE_INT, mem_dataspace, file_dataspace, H5P_DEFAULT, data_array1);
+ VRFY((ret >= 0), "H5Dread succeeded");
+
+ /* verify the read data with original expected data */
+ ret = dataset_vrfy(start, count, stride, block, data_array1, data_origin1);
+ VRFY((ret == 0), "dataset1 read verified correct");
+ if (ret)
+ nerrors++;
+
+ H5Sclose(mem_dataspace);
+ H5Sclose(file_dataspace);
+
+ /* Read dataset2 using BYCOL pattern */
+ /* set up dimensions of the slab this process accesses */
+ slab_set(mpi_rank, mpi_size, start, count, stride, block, BYCOL);
+
+ /* create a file dataspace independently */
+ file_dataspace = H5Dget_space(dataset2);
+ VRFY((file_dataspace >= 0), "");
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "");
+
+ /* create a memory dataspace independently */
+ mem_dataspace = H5Screate_simple(RANK, block, NULL);
+ VRFY((mem_dataspace >= 0), "");
+
+ /* fill dataset with test data */
+ dataset_fill(start, block, data_origin1);
+ if (VERBOSE_MED) {
+ MESG("data_array created");
+ dataset_print(start, block, data_array1);
+ }
+
+ /* read data independently */
+ ret = H5Dread(dataset2, H5T_NATIVE_INT, mem_dataspace, file_dataspace, H5P_DEFAULT, data_array1);
+ VRFY((ret >= 0), "H5Dread succeeded");
+
+ /* verify the read data with original expected data */
+ ret = dataset_vrfy(start, count, stride, block, data_array1, data_origin1);
+ VRFY((ret == 0), "dataset2 read verified correct");
+ if (ret)
+ nerrors++;
+
+ H5Sclose(mem_dataspace);
+ H5Sclose(file_dataspace);
+
+ /* close dataset collectively */
+ ret = H5Dclose(dataset1);
+ VRFY((ret >= 0), "");
+ ret = H5Dclose(dataset2);
+ VRFY((ret >= 0), "");
+
+ /* close the file collectively */
+ H5Fclose(fid);
+
+ /* release data buffers */
+ if (data_array1)
+ HDfree(data_array1);
+ if (data_array2)
+ HDfree(data_array2);
+ if (data_origin1)
+ HDfree(data_origin1);
+}
+
+/*
+ * Part 3--Collective read/write for extendible datasets.
+ */
+
+/*
+ * Example of using the parallel HDF5 library to create two extendible
+ * datasets in one HDF5 file with collective parallel MPIO access support.
+ * The Datasets are of sizes (number-of-mpi-processes x dim0) x dim1.
+ * Each process controls only a slab of size dim0 x dim1 within each
+ * dataset.
+ */
+
+void
+extend_writeAll(void)
+{
+ hid_t fid; /* HDF5 file ID */
+ hid_t acc_tpl; /* File access templates */
+ hid_t xfer_plist; /* Dataset transfer properties list */
+ hid_t sid; /* Dataspace ID */
+ hid_t file_dataspace; /* File dataspace ID */
+ hid_t mem_dataspace; /* memory dataspace ID */
+ hid_t dataset1, dataset2; /* Dataset ID */
+ const char *filename;
+ hsize_t dims[RANK]; /* dataset dim sizes */
+ hsize_t max_dims[RANK] = {H5S_UNLIMITED, H5S_UNLIMITED}; /* dataset maximum dim sizes */
+ DATATYPE *data_array1 = NULL; /* data buffer */
+ hsize_t chunk_dims[RANK]; /* chunk sizes */
+ hid_t dataset_pl; /* dataset create prop. list */
+
+ hsize_t start[RANK]; /* for hyperslab setting */
+ hsize_t count[RANK]; /* for hyperslab setting */
+ hsize_t stride[RANK]; /* for hyperslab setting */
+ hsize_t block[RANK]; /* for hyperslab setting */
+
+ herr_t ret; /* Generic return value */
+ int mpi_size, mpi_rank;
+
+ MPI_Comm comm = MPI_COMM_WORLD;
+ MPI_Info info = MPI_INFO_NULL;
+
+ filename = PARATESTFILE /* GetTestParameters() */;
+ if (VERBOSE_MED)
+ HDprintf("Extend independent write test on file %s\n", filename);
+
+ /* set up MPI parameters */
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, basic dataset, or more aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ /* setup chunk-size. Make sure sizes are > 0 */
+ chunk_dims[0] = (hsize_t)chunkdim0;
+ chunk_dims[1] = (hsize_t)chunkdim1;
+
+ /* allocate memory for data buffer */
+ data_array1 = (DATATYPE *)HDmalloc((size_t)dim0 * (size_t)dim1 * sizeof(DATATYPE));
+ VRFY((data_array1 != NULL), "data_array1 HDmalloc succeeded");
+
+ /* -------------------
+ * START AN HDF5 FILE
+ * -------------------*/
+ /* setup file access template */
+ acc_tpl = create_faccess_plist(comm, info, facc_type);
+ VRFY((acc_tpl >= 0), "");
+
+ /* Reduce the number of metadata cache slots, so that there are cache
+ * collisions during the raw data I/O on the chunked dataset. This stresses
+ * the metadata cache and tests for cache bugs. -QAK
+ */
+ {
+ int mdc_nelmts;
+ size_t rdcc_nelmts;
+ size_t rdcc_nbytes;
+ double rdcc_w0;
+
+ ret = H5Pget_cache(acc_tpl, &mdc_nelmts, &rdcc_nelmts, &rdcc_nbytes, &rdcc_w0);
+ VRFY((ret >= 0), "H5Pget_cache succeeded");
+ mdc_nelmts = 4;
+ ret = H5Pset_cache(acc_tpl, mdc_nelmts, rdcc_nelmts, rdcc_nbytes, rdcc_w0);
+ VRFY((ret >= 0), "H5Pset_cache succeeded");
+ }
+
+ /* create the file collectively */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, acc_tpl);
+ VRFY((fid >= 0), "H5Fcreate succeeded");
+
+ /* Release file-access template */
+ ret = H5Pclose(acc_tpl);
+ VRFY((ret >= 0), "");
+
+ /* --------------------------------------------------------------
+ * Define the dimensions of the overall datasets and create them.
+ * ------------------------------------------------------------- */
+
+ /* set up dataset storage chunk sizes and creation property list */
+ if (VERBOSE_MED)
+ HDprintf("chunks[]=%lu,%lu\n", (unsigned long)chunk_dims[0], (unsigned long)chunk_dims[1]);
+ dataset_pl = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY((dataset_pl >= 0), "H5Pcreate succeeded");
+ ret = H5Pset_chunk(dataset_pl, RANK, chunk_dims);
+ VRFY((ret >= 0), "H5Pset_chunk succeeded");
+
+ /* setup dimensionality object */
+ /* start out with no rows, extend it later. */
+ dims[0] = dims[1] = 0;
+ sid = H5Screate_simple(RANK, dims, max_dims);
+ VRFY((sid >= 0), "H5Screate_simple succeeded");
+
+ /* create an extendible dataset collectively */
+ dataset1 = H5Dcreate2(fid, DATASETNAME1, H5T_NATIVE_INT, sid, H5P_DEFAULT, dataset_pl, H5P_DEFAULT);
+ VRFY((dataset1 >= 0), "H5Dcreate2 succeeded");
+
+ /* create another extendible dataset collectively */
+ dataset2 = H5Dcreate2(fid, DATASETNAME2, H5T_NATIVE_INT, sid, H5P_DEFAULT, dataset_pl, H5P_DEFAULT);
+ VRFY((dataset2 >= 0), "H5Dcreate2 succeeded");
+
+ /* release resource */
+ H5Sclose(sid);
+ H5Pclose(dataset_pl);
+
+ /* -------------------------
+ * Test writing to dataset1
+ * -------------------------*/
+ /* set up dimensions of the slab this process accesses */
+ slab_set(mpi_rank, mpi_size, start, count, stride, block, BYROW);
+
+ /* put some trivial data in the data_array */
+ dataset_fill(start, block, data_array1);
+ MESG("data_array initialized");
+ if (VERBOSE_MED) {
+ MESG("data_array created");
+ dataset_print(start, block, data_array1);
+ }
+
+ /* create a memory dataspace independently */
+ mem_dataspace = H5Screate_simple(RANK, block, NULL);
+ VRFY((mem_dataspace >= 0), "");
+
+ /* Extend its current dim sizes before writing */
+ dims[0] = (hsize_t)dim0;
+ dims[1] = (hsize_t)dim1;
+ ret = H5Dset_extent(dataset1, dims);
+ VRFY((ret >= 0), "H5Dset_extent succeeded");
+
+ /* create a file dataspace independently */
+ file_dataspace = H5Dget_space(dataset1);
+ VRFY((file_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab succeeded");
+
+ /* set up the collective transfer properties list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((xfer_plist >= 0), "H5Pcreate xfer succeeded");
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pset_dxpl_mpio succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ /* write data collectively */
+ ret = H5Dwrite(dataset1, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1);
+ VRFY((ret >= 0), "H5Dwrite succeeded");
+
+ /* release resource */
+ H5Sclose(file_dataspace);
+ H5Sclose(mem_dataspace);
+ H5Pclose(xfer_plist);
+
+ /* -------------------------
+ * Test writing to dataset2
+ * -------------------------*/
+ /* set up dimensions of the slab this process accesses */
+ slab_set(mpi_rank, mpi_size, start, count, stride, block, BYCOL);
+
+ /* put some trivial data in the data_array */
+ dataset_fill(start, block, data_array1);
+ MESG("data_array initialized");
+ if (VERBOSE_MED) {
+ MESG("data_array created");
+ dataset_print(start, block, data_array1);
+ }
+
+ /* create a memory dataspace independently */
+ mem_dataspace = H5Screate_simple(RANK, block, NULL);
+ VRFY((mem_dataspace >= 0), "");
+
+ /* set up the collective transfer properties list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((xfer_plist >= 0), "H5Pcreate xfer succeeded");
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pset_dxpl_mpio succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ /* Try write to dataset2 beyond its current dim sizes. Should fail. */
+
+ /* create a file dataspace independently */
+ file_dataspace = H5Dget_space(dataset2);
+ VRFY((file_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab succeeded");
+
+ /* write data independently. Should fail. */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Dwrite(dataset2, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1);
+ }
+ H5E_END_TRY
+ VRFY((ret < 0), "H5Dwrite failed as expected");
+
+ H5Sclose(file_dataspace);
+
+ /* Extend dataset2 and try again. Should succeed. */
+ dims[0] = (hsize_t)dim0;
+ dims[1] = (hsize_t)dim1;
+ ret = H5Dset_extent(dataset2, dims);
+ VRFY((ret >= 0), "H5Dset_extent succeeded");
+
+ /* create a file dataspace independently */
+ file_dataspace = H5Dget_space(dataset2);
+ VRFY((file_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab succeeded");
+
+ /* write data independently */
+ ret = H5Dwrite(dataset2, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1);
+ VRFY((ret >= 0), "H5Dwrite succeeded");
+
+ /* release resource */
+ ret = H5Sclose(file_dataspace);
+ VRFY((ret >= 0), "H5Sclose succeeded");
+ ret = H5Sclose(mem_dataspace);
+ VRFY((ret >= 0), "H5Sclose succeeded");
+ ret = H5Pclose(xfer_plist);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ /* close dataset collectively */
+ ret = H5Dclose(dataset1);
+ VRFY((ret >= 0), "H5Dclose1 succeeded");
+ ret = H5Dclose(dataset2);
+ VRFY((ret >= 0), "H5Dclose2 succeeded");
+
+ /* close the file collectively */
+ H5Fclose(fid);
+
+ /* release data buffers */
+ if (data_array1)
+ HDfree(data_array1);
+}
+
+/* Example of using the parallel HDF5 library to read an extendible dataset */
+void
+extend_readAll(void)
+{
+ hid_t fid; /* HDF5 file ID */
+ hid_t acc_tpl; /* File access templates */
+ hid_t xfer_plist; /* Dataset transfer properties list */
+ hid_t file_dataspace; /* File dataspace ID */
+ hid_t mem_dataspace; /* memory dataspace ID */
+ hid_t dataset1, dataset2; /* Dataset ID */
+ const char *filename;
+ hsize_t dims[RANK]; /* dataset dim sizes */
+ DATATYPE *data_array1 = NULL; /* data buffer */
+ DATATYPE *data_array2 = NULL; /* data buffer */
+ DATATYPE *data_origin1 = NULL; /* expected data buffer */
+
+ hsize_t start[RANK]; /* for hyperslab setting */
+ hsize_t count[RANK], stride[RANK]; /* for hyperslab setting */
+ hsize_t block[RANK]; /* for hyperslab setting */
+
+ herr_t ret; /* Generic return value */
+ int mpi_size, mpi_rank;
+
+ MPI_Comm comm = MPI_COMM_WORLD;
+ MPI_Info info = MPI_INFO_NULL;
+
+ filename = PARATESTFILE /* GetTestParameters() */;
+ if (VERBOSE_MED)
+ HDprintf("Extend independent read test on file %s\n", filename);
+
+ /* set up MPI parameters */
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, basic dataset, or more aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ /* allocate memory for data buffer */
+ data_array1 = (DATATYPE *)HDmalloc((size_t)dim0 * (size_t)dim1 * sizeof(DATATYPE));
+ VRFY((data_array1 != NULL), "data_array1 HDmalloc succeeded");
+ data_array2 = (DATATYPE *)HDmalloc((size_t)dim0 * (size_t)dim1 * sizeof(DATATYPE));
+ VRFY((data_array2 != NULL), "data_array2 HDmalloc succeeded");
+ data_origin1 = (DATATYPE *)HDmalloc((size_t)dim0 * (size_t)dim1 * sizeof(DATATYPE));
+ VRFY((data_origin1 != NULL), "data_origin1 HDmalloc succeeded");
+
+ /* -------------------
+ * OPEN AN HDF5 FILE
+ * -------------------*/
+ /* setup file access template */
+ acc_tpl = create_faccess_plist(comm, info, facc_type);
+ VRFY((acc_tpl >= 0), "");
+
+ /* open the file collectively */
+ fid = H5Fopen(filename, H5F_ACC_RDONLY, acc_tpl);
+ VRFY((fid >= 0), "");
+
+ /* Release file-access template */
+ ret = H5Pclose(acc_tpl);
+ VRFY((ret >= 0), "");
+
+ /* open the dataset1 collectively */
+ dataset1 = H5Dopen2(fid, DATASETNAME1, H5P_DEFAULT);
+ VRFY((dataset1 >= 0), "");
+
+ /* open another dataset collectively */
+ dataset2 = H5Dopen2(fid, DATASETNAME1, H5P_DEFAULT);
+ VRFY((dataset2 >= 0), "");
+
+ /* Try extend dataset1 which is open RDONLY. Should fail. */
+
+ file_dataspace = H5Dget_space(dataset1);
+ VRFY((file_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sget_simple_extent_dims(file_dataspace, dims, NULL);
+ VRFY((ret > 0), "H5Sget_simple_extent_dims succeeded");
+ dims[0]++;
+ H5E_BEGIN_TRY
+ {
+ ret = H5Dset_extent(dataset1, dims);
+ }
+ H5E_END_TRY
+ VRFY((ret < 0), "H5Dset_extent failed as expected");
+
+ H5Sclose(file_dataspace);
+
+ /* Read dataset1 using BYROW pattern */
+ /* set up dimensions of the slab this process accesses */
+ slab_set(mpi_rank, mpi_size, start, count, stride, block, BYROW);
+
+ /* create a file dataspace independently */
+ file_dataspace = H5Dget_space(dataset1);
+ VRFY((file_dataspace >= 0), "");
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "");
+
+ /* create a memory dataspace independently */
+ mem_dataspace = H5Screate_simple(RANK, block, NULL);
+ VRFY((mem_dataspace >= 0), "");
+
+ /* fill dataset with test data */
+ dataset_fill(start, block, data_origin1);
+ if (VERBOSE_MED) {
+ MESG("data_array created");
+ dataset_print(start, block, data_array1);
+ }
+
+ /* set up the collective transfer properties list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((xfer_plist >= 0), "H5Pcreate xfer succeeded");
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pset_dxpl_mpio succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ /* read data collectively */
+ ret = H5Dread(dataset1, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1);
+ VRFY((ret >= 0), "H5Dread succeeded");
+
+ /* verify the read data with original expected data */
+ ret = dataset_vrfy(start, count, stride, block, data_array1, data_origin1);
+ VRFY((ret == 0), "dataset1 read verified correct");
+ if (ret)
+ nerrors++;
+
+ H5Sclose(mem_dataspace);
+ H5Sclose(file_dataspace);
+ H5Pclose(xfer_plist);
+
+ /* Read dataset2 using BYCOL pattern */
+ /* set up dimensions of the slab this process accesses */
+ slab_set(mpi_rank, mpi_size, start, count, stride, block, BYCOL);
+
+ /* create a file dataspace independently */
+ file_dataspace = H5Dget_space(dataset2);
+ VRFY((file_dataspace >= 0), "");
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "");
+
+ /* create a memory dataspace independently */
+ mem_dataspace = H5Screate_simple(RANK, block, NULL);
+ VRFY((mem_dataspace >= 0), "");
+
+ /* fill dataset with test data */
+ dataset_fill(start, block, data_origin1);
+ if (VERBOSE_MED) {
+ MESG("data_array created");
+ dataset_print(start, block, data_array1);
+ }
+
+ /* set up the collective transfer properties list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((xfer_plist >= 0), "H5Pcreate xfer succeeded");
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pset_dxpl_mpio succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ /* read data collectively */
+ ret = H5Dread(dataset2, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_array1);
+ VRFY((ret >= 0), "H5Dread succeeded");
+
+ /* verify the read data with original expected data */
+ ret = dataset_vrfy(start, count, stride, block, data_array1, data_origin1);
+ VRFY((ret == 0), "dataset2 read verified correct");
+ if (ret)
+ nerrors++;
+
+ H5Sclose(mem_dataspace);
+ H5Sclose(file_dataspace);
+ H5Pclose(xfer_plist);
+
+ /* close dataset collectively */
+ ret = H5Dclose(dataset1);
+ VRFY((ret >= 0), "");
+ ret = H5Dclose(dataset2);
+ VRFY((ret >= 0), "");
+
+ /* close the file collectively */
+ H5Fclose(fid);
+
+ /* release data buffers */
+ if (data_array1)
+ HDfree(data_array1);
+ if (data_array2)
+ HDfree(data_array2);
+ if (data_origin1)
+ HDfree(data_origin1);
+}
+
+#ifdef H5_HAVE_FILTER_DEFLATE
+static const char *
+h5_rmprefix(const char *filename)
+{
+ const char *ret_ptr;
+
+ if ((ret_ptr = HDstrstr(filename, ":")) == NULL)
+ ret_ptr = filename;
+ else
+ ret_ptr++;
+
+ return (ret_ptr);
+}
+
+/*
+ * Example of using the parallel HDF5 library to read a compressed
+ * dataset in an HDF5 file with collective parallel access support.
+ */
+void
+compress_readAll(void)
+{
+ hid_t fid; /* HDF5 file ID */
+ hid_t acc_tpl; /* File access templates */
+ hid_t dcpl; /* Dataset creation property list */
+ hid_t xfer_plist; /* Dataset transfer properties list */
+ hid_t dataspace; /* Dataspace ID */
+ hid_t dataset; /* Dataset ID */
+ int rank = 1; /* Dataspace rank */
+ hsize_t dim = (hsize_t)dim0; /* Dataspace dimensions */
+ unsigned u; /* Local index variable */
+ unsigned chunk_opts; /* Chunk options */
+ unsigned disable_partial_chunk_filters; /* Whether filters are disabled on partial chunks */
+ DATATYPE *data_read = NULL; /* data buffer */
+ DATATYPE *data_orig = NULL; /* expected data buffer */
+ const char *filename;
+ MPI_Comm comm = MPI_COMM_WORLD;
+ MPI_Info info = MPI_INFO_NULL;
+ int mpi_size, mpi_rank;
+ herr_t ret; /* Generic return value */
+
+ filename = PARATESTFILE /* GetTestParameters() */;
+ if (VERBOSE_MED)
+ HDprintf("Collective chunked dataset read test on file %s\n", filename);
+
+ /* Retrieve MPI parameters */
+ MPI_Comm_size(comm, &mpi_size);
+ MPI_Comm_rank(comm, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file or dataset aren't supported with this connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ /* Allocate data buffer */
+ data_orig = (DATATYPE *)HDmalloc((size_t)dim * sizeof(DATATYPE));
+ VRFY((data_orig != NULL), "data_origin1 HDmalloc succeeded");
+ data_read = (DATATYPE *)HDmalloc((size_t)dim * sizeof(DATATYPE));
+ VRFY((data_read != NULL), "data_array1 HDmalloc succeeded");
+
+ /* Initialize data buffers */
+ for (u = 0; u < dim; u++)
+ data_orig[u] = (DATATYPE)u;
+
+ /* Run test both with and without filters disabled on partial chunks */
+ for (disable_partial_chunk_filters = 0; disable_partial_chunk_filters <= 1;
+ disable_partial_chunk_filters++) {
+ /* Process zero creates the file with a compressed, chunked dataset */
+ if (mpi_rank == 0) {
+ hsize_t chunk_dim; /* Chunk dimensions */
+
+ /* Create the file */
+ fid = H5Fcreate(h5_rmprefix(filename), H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((fid > 0), "H5Fcreate succeeded");
+
+ /* Create property list for chunking and compression */
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY((dcpl > 0), "H5Pcreate succeeded");
+
+ ret = H5Pset_layout(dcpl, H5D_CHUNKED);
+ VRFY((ret >= 0), "H5Pset_layout succeeded");
+
+ /* Use eight chunks */
+ chunk_dim = dim / 8;
+ ret = H5Pset_chunk(dcpl, rank, &chunk_dim);
+ VRFY((ret >= 0), "H5Pset_chunk succeeded");
+
+ /* Set chunk options appropriately */
+ if (disable_partial_chunk_filters) {
+ ret = H5Pget_chunk_opts(dcpl, &chunk_opts);
+ VRFY((ret >= 0), "H5Pget_chunk_opts succeeded");
+
+ chunk_opts |= H5D_CHUNK_DONT_FILTER_PARTIAL_CHUNKS;
+
+ ret = H5Pset_chunk_opts(dcpl, chunk_opts);
+ VRFY((ret >= 0), "H5Pset_chunk_opts succeeded");
+ } /* end if */
+
+ ret = H5Pset_deflate(dcpl, 9);
+ VRFY((ret >= 0), "H5Pset_deflate succeeded");
+
+ /* Create dataspace */
+ dataspace = H5Screate_simple(rank, &dim, NULL);
+ VRFY((dataspace > 0), "H5Screate_simple succeeded");
+
+ /* Create dataset */
+ dataset =
+ H5Dcreate2(fid, "compressed_data", H5T_NATIVE_INT, dataspace, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ VRFY((dataset > 0), "H5Dcreate2 succeeded");
+
+ /* Write compressed data */
+ ret = H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, data_orig);
+ VRFY((ret >= 0), "H5Dwrite succeeded");
+
+ /* Close objects */
+ ret = H5Pclose(dcpl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+ ret = H5Sclose(dataspace);
+ VRFY((ret >= 0), "H5Sclose succeeded");
+ ret = H5Dclose(dataset);
+ VRFY((ret >= 0), "H5Dclose succeeded");
+ ret = H5Fclose(fid);
+ VRFY((ret >= 0), "H5Fclose succeeded");
+ }
+
+ /* Wait for file to be created */
+ MPI_Barrier(comm);
+
+ /* -------------------
+ * OPEN AN HDF5 FILE
+ * -------------------*/
+
+ /* setup file access template */
+ acc_tpl = create_faccess_plist(comm, info, facc_type);
+ VRFY((acc_tpl >= 0), "");
+
+ /* open the file collectively */
+ fid = H5Fopen(filename, H5F_ACC_RDWR, acc_tpl);
+ VRFY((fid > 0), "H5Fopen succeeded");
+
+ /* Release file-access template */
+ ret = H5Pclose(acc_tpl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ /* Open dataset with compressed chunks */
+ dataset = H5Dopen2(fid, "compressed_data", H5P_DEFAULT);
+ VRFY((dataset > 0), "H5Dopen2 succeeded");
+
+ /* Try reading & writing data */
+ if (dataset > 0) {
+ /* Create dataset transfer property list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((xfer_plist > 0), "H5Pcreate succeeded");
+
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pset_dxpl_mpio succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ /* Try reading the data */
+ ret = H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, xfer_plist, data_read);
+ VRFY((ret >= 0), "H5Dread succeeded");
+
+ /* Verify data read */
+ for (u = 0; u < dim; u++)
+ if (data_orig[u] != data_read[u]) {
+ HDprintf("Line #%d: written!=retrieved: data_orig[%u]=%d, data_read[%u]=%d\n", __LINE__,
+ (unsigned)u, data_orig[u], (unsigned)u, data_read[u]);
+ nerrors++;
+ }
+
+#ifdef H5_HAVE_PARALLEL_FILTERED_WRITES
+ ret = H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, xfer_plist, data_read);
+ VRFY((ret >= 0), "H5Dwrite succeeded");
+#endif
+
+ ret = H5Pclose(xfer_plist);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+ ret = H5Dclose(dataset);
+ VRFY((ret >= 0), "H5Dclose succeeded");
+ } /* end if */
+
+ /* Close file */
+ ret = H5Fclose(fid);
+ VRFY((ret >= 0), "H5Fclose succeeded");
+ } /* end for */
+
+ /* release data buffers */
+ if (data_read)
+ HDfree(data_read);
+ if (data_orig)
+ HDfree(data_orig);
+}
+#endif /* H5_HAVE_FILTER_DEFLATE */
+
+/*
+ * Part 4--Non-selection for chunked dataset
+ */
+
+/*
+ * Example of using the parallel HDF5 library to create chunked
+ * dataset in one HDF5 file with collective and independent parallel
+ * MPIO access support. The Datasets are of sizes dim0 x dim1.
+ * Each process controls only a slab of size dim0 x dim1 within the
+ * dataset with the exception that one processor selects no element.
+ */
+
+void
+none_selection_chunk(void)
+{
+ hid_t fid; /* HDF5 file ID */
+ hid_t acc_tpl; /* File access templates */
+ hid_t xfer_plist; /* Dataset transfer properties list */
+ hid_t sid; /* Dataspace ID */
+ hid_t file_dataspace; /* File dataspace ID */
+ hid_t mem_dataspace; /* memory dataspace ID */
+ hid_t dataset1, dataset2; /* Dataset ID */
+ const char *filename;
+ hsize_t dims[RANK]; /* dataset dim sizes */
+ DATATYPE *data_origin = NULL; /* data buffer */
+ DATATYPE *data_array = NULL; /* data buffer */
+ hsize_t chunk_dims[RANK]; /* chunk sizes */
+ hid_t dataset_pl; /* dataset create prop. list */
+
+ hsize_t start[RANK]; /* for hyperslab setting */
+ hsize_t count[RANK]; /* for hyperslab setting */
+ hsize_t stride[RANK]; /* for hyperslab setting */
+ hsize_t block[RANK]; /* for hyperslab setting */
+ hsize_t mstart[RANK]; /* for data buffer in memory */
+
+ herr_t ret; /* Generic return value */
+ int mpi_size, mpi_rank;
+
+ MPI_Comm comm = MPI_COMM_WORLD;
+ MPI_Info info = MPI_INFO_NULL;
+
+ filename = PARATESTFILE /* GetTestParameters() */;
+ if (VERBOSE_MED)
+ HDprintf("Extend independent write test on file %s\n", filename);
+
+ /* set up MPI parameters */
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file or dataset aren't supported with this connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ /* setup chunk-size. Make sure sizes are > 0 */
+ chunk_dims[0] = (hsize_t)chunkdim0;
+ chunk_dims[1] = (hsize_t)chunkdim1;
+
+ /* -------------------
+ * START AN HDF5 FILE
+ * -------------------*/
+ /* setup file access template */
+ acc_tpl = create_faccess_plist(comm, info, facc_type);
+ VRFY((acc_tpl >= 0), "");
+
+ /* create the file collectively */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, acc_tpl);
+ VRFY((fid >= 0), "H5Fcreate succeeded");
+
+ /* Release file-access template */
+ ret = H5Pclose(acc_tpl);
+ VRFY((ret >= 0), "");
+
+ /* --------------------------------------------------------------
+ * Define the dimensions of the overall datasets and create them.
+ * ------------------------------------------------------------- */
+
+ /* set up dataset storage chunk sizes and creation property list */
+ if (VERBOSE_MED)
+ HDprintf("chunks[]=%lu,%lu\n", (unsigned long)chunk_dims[0], (unsigned long)chunk_dims[1]);
+ dataset_pl = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY((dataset_pl >= 0), "H5Pcreate succeeded");
+ ret = H5Pset_chunk(dataset_pl, RANK, chunk_dims);
+ VRFY((ret >= 0), "H5Pset_chunk succeeded");
+
+ /* setup dimensionality object */
+ dims[0] = (hsize_t)dim0;
+ dims[1] = (hsize_t)dim1;
+ sid = H5Screate_simple(RANK, dims, NULL);
+ VRFY((sid >= 0), "H5Screate_simple succeeded");
+
+ /* create an extendible dataset collectively */
+ dataset1 = H5Dcreate2(fid, DATASETNAME1, H5T_NATIVE_INT, sid, H5P_DEFAULT, dataset_pl, H5P_DEFAULT);
+ VRFY((dataset1 >= 0), "H5Dcreate2 succeeded");
+
+ /* create another extendible dataset collectively */
+ dataset2 = H5Dcreate2(fid, DATASETNAME2, H5T_NATIVE_INT, sid, H5P_DEFAULT, dataset_pl, H5P_DEFAULT);
+ VRFY((dataset2 >= 0), "H5Dcreate2 succeeded");
+
+ /* release resource */
+ H5Sclose(sid);
+ H5Pclose(dataset_pl);
+
+ /* -------------------------
+ * Test collective writing to dataset1
+ * -------------------------*/
+ /* set up dimensions of the slab this process accesses */
+ slab_set(mpi_rank, mpi_size, start, count, stride, block, BYROW);
+
+ /* allocate memory for data buffer. Only allocate enough buffer for
+ * each processor's data. */
+ if (mpi_rank) {
+ data_origin = (DATATYPE *)HDmalloc(block[0] * block[1] * sizeof(DATATYPE));
+ VRFY((data_origin != NULL), "data_origin HDmalloc succeeded");
+
+ data_array = (DATATYPE *)HDmalloc(block[0] * block[1] * sizeof(DATATYPE));
+ VRFY((data_array != NULL), "data_array HDmalloc succeeded");
+
+ /* put some trivial data in the data_array */
+ mstart[0] = mstart[1] = 0;
+ dataset_fill(mstart, block, data_origin);
+ MESG("data_array initialized");
+ if (VERBOSE_MED) {
+ MESG("data_array created");
+ dataset_print(mstart, block, data_origin);
+ }
+ }
+
+ /* create a memory dataspace independently */
+ mem_dataspace = H5Screate_simple(RANK, block, NULL);
+ VRFY((mem_dataspace >= 0), "");
+
+ /* Process 0 has no selection */
+ if (!mpi_rank) {
+ ret = H5Sselect_none(mem_dataspace);
+ VRFY((ret >= 0), "H5Sselect_none succeeded");
+ }
+
+ /* create a file dataspace independently */
+ file_dataspace = H5Dget_space(dataset1);
+ VRFY((file_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab succeeded");
+
+ /* Process 0 has no selection */
+ if (!mpi_rank) {
+ ret = H5Sselect_none(file_dataspace);
+ VRFY((ret >= 0), "H5Sselect_none succeeded");
+ }
+
+ /* set up the collective transfer properties list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((xfer_plist >= 0), "H5Pcreate xfer succeeded");
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pset_dxpl_mpio succeeded");
+
+ /* write data collectively */
+ ret = H5Dwrite(dataset1, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_origin);
+ VRFY((ret >= 0), "H5Dwrite succeeded");
+
+ /* read data independently */
+ ret = H5Dread(dataset1, H5T_NATIVE_INT, mem_dataspace, file_dataspace, H5P_DEFAULT, data_array);
+ VRFY((ret >= 0), "");
+
+ /* verify the read data with original expected data */
+ if (mpi_rank) {
+ ret = dataset_vrfy(mstart, count, stride, block, data_array, data_origin);
+ if (ret)
+ nerrors++;
+ }
+
+ /* -------------------------
+ * Test independent writing to dataset2
+ * -------------------------*/
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_INDEPENDENT);
+ VRFY((ret >= 0), "H5Pset_dxpl_mpio succeeded");
+
+ /* write data collectively */
+ ret = H5Dwrite(dataset2, H5T_NATIVE_INT, mem_dataspace, file_dataspace, xfer_plist, data_origin);
+ VRFY((ret >= 0), "H5Dwrite succeeded");
+
+ /* read data independently */
+ ret = H5Dread(dataset2, H5T_NATIVE_INT, mem_dataspace, file_dataspace, H5P_DEFAULT, data_array);
+ VRFY((ret >= 0), "");
+
+ /* verify the read data with original expected data */
+ if (mpi_rank) {
+ ret = dataset_vrfy(mstart, count, stride, block, data_array, data_origin);
+ if (ret)
+ nerrors++;
+ }
+
+ /* release resource */
+ ret = H5Sclose(file_dataspace);
+ VRFY((ret >= 0), "H5Sclose succeeded");
+ ret = H5Sclose(mem_dataspace);
+ VRFY((ret >= 0), "H5Sclose succeeded");
+ ret = H5Pclose(xfer_plist);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ /* close dataset collectively */
+ ret = H5Dclose(dataset1);
+ VRFY((ret >= 0), "H5Dclose1 succeeded");
+ ret = H5Dclose(dataset2);
+ VRFY((ret >= 0), "H5Dclose2 succeeded");
+
+ /* close the file collectively */
+ H5Fclose(fid);
+
+ /* release data buffers */
+ if (data_origin)
+ HDfree(data_origin);
+ if (data_array)
+ HDfree(data_array);
+}
+
+/* Function: test_actual_io_mode
+ *
+ * Purpose: tests one specific case of collective I/O and checks that the
+ * actual_chunk_opt_mode property and the actual_io_mode
+ * properties in the DXPL have the correct values.
+ *
+ * Input: selection_mode: changes the way processes select data from the space, as well
+ * as some dxpl flags to get collective I/O to break in different ways.
+ *
+ * The relevant I/O function and expected response for each mode:
+ * TEST_ACTUAL_IO_MULTI_CHUNK_IND:
+ * H5D_mpi_chunk_collective_io, each process reports independent I/O
+ *
+ * TEST_ACTUAL_IO_MULTI_CHUNK_COL:
+ * H5D_mpi_chunk_collective_io, each process reports collective I/O
+ *
+ * TEST_ACTUAL_IO_MULTI_CHUNK_MIX:
+ * H5D_mpi_chunk_collective_io, each process reports mixed I/O
+ *
+ * TEST_ACTUAL_IO_MULTI_CHUNK_MIX_DISAGREE:
+ * H5D_mpi_chunk_collective_io, processes disagree. The root reports
+ * collective, the rest report independent I/O
+ *
+ * TEST_ACTUAL_IO_DIRECT_MULTI_CHUNK_IND:
+ * Same test TEST_ACTUAL_IO_MULTI_CHUNK_IND.
+ * Set directly go to multi-chunk-io without num threshold calc.
+ * TEST_ACTUAL_IO_DIRECT_MULTI_CHUNK_COL:
+ * Same test TEST_ACTUAL_IO_MULTI_CHUNK_COL.
+ * Set directly go to multi-chunk-io without num threshold calc.
+ *
+ * TEST_ACTUAL_IO_LINK_CHUNK:
+ * H5D_link_chunk_collective_io, processes report linked chunk I/O
+ *
+ * TEST_ACTUAL_IO_CONTIGUOUS:
+ * H5D__contig_collective_write or H5D__contig_collective_read
+ * each process reports contiguous collective I/O
+ *
+ * TEST_ACTUAL_IO_NO_COLLECTIVE:
+ * Simple independent I/O. This tests that the defaults are properly set.
+ *
+ * TEST_ACTUAL_IO_RESET:
+ * Performs collective and then independent I/O with hthe same dxpl to
+ * make sure the peroperty is correctly reset to the default on each use.
+ * Specifically, this test runs TEST_ACTUAL_IO_MULTI_CHUNK_NO_OPT_MIX_DISAGREE
+ * (The most complex case that works on all builds) and then performs
+ * an independent read and write with the same dxpls.
+ *
+ * Note: DIRECT_MULTI_CHUNK_MIX and DIRECT_MULTI_CHUNK_MIX_DISAGREE
+ * is not needed as they are covered by DIRECT_CHUNK_MIX and
+ * MULTI_CHUNK_MIX_DISAGREE cases. _DIRECT_ cases are only for testing
+ * path way to multi-chunk-io by H5FD_MPIO_CHUNK_MULTI_IO instead of num-threshold.
+ *
+ * Modification:
+ * - Refctore to remove multi-chunk-without-opimization test and update for
+ * testing direct to multi-chunk-io
+ * Programmer: Jonathan Kim
+ * Date: 2012-10-10
+ *
+ *
+ * Programmer: Jacob Gruber
+ * Date: 2011-04-06
+ */
+static void
+test_actual_io_mode(int selection_mode)
+{
+ H5D_mpio_actual_chunk_opt_mode_t actual_chunk_opt_mode_write = H5D_MPIO_NO_CHUNK_OPTIMIZATION;
+ H5D_mpio_actual_chunk_opt_mode_t actual_chunk_opt_mode_read = H5D_MPIO_NO_CHUNK_OPTIMIZATION;
+ H5D_mpio_actual_chunk_opt_mode_t actual_chunk_opt_mode_expected = H5D_MPIO_NO_CHUNK_OPTIMIZATION;
+ H5D_mpio_actual_io_mode_t actual_io_mode_write = H5D_MPIO_NO_COLLECTIVE;
+ H5D_mpio_actual_io_mode_t actual_io_mode_read = H5D_MPIO_NO_COLLECTIVE;
+ H5D_mpio_actual_io_mode_t actual_io_mode_expected = H5D_MPIO_NO_COLLECTIVE;
+ const char *filename;
+ const char *test_name;
+ hbool_t direct_multi_chunk_io;
+ hbool_t multi_chunk_io;
+ hbool_t is_chunked;
+ hbool_t is_collective;
+ int mpi_size = -1;
+ int mpi_rank = -1;
+ int length;
+ int *buffer;
+ int i;
+ MPI_Comm mpi_comm = MPI_COMM_NULL;
+ MPI_Info mpi_info = MPI_INFO_NULL;
+ hid_t fid = -1;
+ hid_t sid = -1;
+ hid_t dataset = -1;
+ hid_t data_type = H5T_NATIVE_INT;
+ hid_t fapl = -1;
+ hid_t mem_space = -1;
+ hid_t file_space = -1;
+ hid_t dcpl = -1;
+ hid_t dxpl_write = -1;
+ hid_t dxpl_read = -1;
+ hsize_t dims[RANK];
+ hsize_t chunk_dims[RANK];
+ hsize_t start[RANK];
+ hsize_t stride[RANK];
+ hsize_t count[RANK];
+ hsize_t block[RANK];
+ char message[256];
+ herr_t ret;
+
+ /* Set up some flags to make some future if statements slightly more readable */
+ direct_multi_chunk_io = (selection_mode == TEST_ACTUAL_IO_DIRECT_MULTI_CHUNK_IND ||
+ selection_mode == TEST_ACTUAL_IO_DIRECT_MULTI_CHUNK_COL);
+
+ /* Note: RESET performs the same tests as MULTI_CHUNK_MIX_DISAGREE and then
+ * tests independent I/O
+ */
+ multi_chunk_io =
+ (selection_mode == TEST_ACTUAL_IO_MULTI_CHUNK_IND ||
+ selection_mode == TEST_ACTUAL_IO_MULTI_CHUNK_COL ||
+ selection_mode == TEST_ACTUAL_IO_MULTI_CHUNK_MIX ||
+ selection_mode == TEST_ACTUAL_IO_MULTI_CHUNK_MIX_DISAGREE || selection_mode == TEST_ACTUAL_IO_RESET);
+
+ is_chunked =
+ (selection_mode != TEST_ACTUAL_IO_CONTIGUOUS && selection_mode != TEST_ACTUAL_IO_NO_COLLECTIVE);
+
+ is_collective = selection_mode != TEST_ACTUAL_IO_NO_COLLECTIVE;
+
+ /* Set up MPI parameters */
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file or dataset aren't supported with this connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ MPI_Barrier(MPI_COMM_WORLD);
+
+ HDassert(mpi_size >= 1);
+
+ mpi_comm = MPI_COMM_WORLD;
+ mpi_info = MPI_INFO_NULL;
+
+ filename = (const char *)PARATESTFILE /* GetTestParameters() */;
+ HDassert(filename != NULL);
+
+ /* Setup the file access template */
+ fapl = create_faccess_plist(mpi_comm, mpi_info, facc_type);
+ VRFY((fapl >= 0), "create_faccess_plist() succeeded");
+
+ /* Create the file */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ VRFY((fid >= 0), "H5Fcreate succeeded");
+
+ /* Create the basic Space */
+ dims[0] = (hsize_t)dim0;
+ dims[1] = (hsize_t)dim1;
+ sid = H5Screate_simple(RANK, dims, NULL);
+ VRFY((sid >= 0), "H5Screate_simple succeeded");
+
+ /* Create the dataset creation plist */
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY((dcpl >= 0), "dataset creation plist created successfully");
+
+ /* If we are not testing contiguous datasets */
+ if (is_chunked) {
+ /* Set up chunk information. */
+ chunk_dims[0] = dims[0] / (hsize_t)mpi_size;
+ chunk_dims[1] = dims[1];
+ ret = H5Pset_chunk(dcpl, 2, chunk_dims);
+ VRFY((ret >= 0), "chunk creation property list succeeded");
+ }
+
+ /* Create the dataset */
+ dataset = H5Dcreate2(fid, "actual_io", data_type, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ VRFY((dataset >= 0), "H5Dcreate2() dataset succeeded");
+
+ /* Create the file dataspace */
+ file_space = H5Dget_space(dataset);
+ VRFY((file_space >= 0), "H5Dget_space succeeded");
+
+ /* Choose a selection method based on the type of I/O we want to occur,
+ * and also set up some selection-dependeent test info. */
+ switch (selection_mode) {
+
+ /* Independent I/O with optimization */
+ case TEST_ACTUAL_IO_MULTI_CHUNK_IND:
+ case TEST_ACTUAL_IO_DIRECT_MULTI_CHUNK_IND:
+ /* Since the dataset is chunked by row and each process selects a row,
+ * each process writes to a different chunk. This forces all I/O to be
+ * independent.
+ */
+ slab_set(mpi_rank, mpi_size, start, count, stride, block, BYROW);
+
+ test_name = "Multi Chunk - Independent";
+ actual_chunk_opt_mode_expected = H5D_MPIO_MULTI_CHUNK;
+ actual_io_mode_expected = H5D_MPIO_CHUNK_INDEPENDENT;
+ break;
+
+ /* Collective I/O with optimization */
+ case TEST_ACTUAL_IO_MULTI_CHUNK_COL:
+ case TEST_ACTUAL_IO_DIRECT_MULTI_CHUNK_COL:
+ /* The dataset is chunked by rows, so each process takes a column which
+ * spans all chunks. Since the processes write non-overlapping regular
+ * selections to each chunk, the operation is purely collective.
+ */
+ slab_set(mpi_rank, mpi_size, start, count, stride, block, BYCOL);
+
+ test_name = "Multi Chunk - Collective";
+ actual_chunk_opt_mode_expected = H5D_MPIO_MULTI_CHUNK;
+ if (mpi_size > 1)
+ actual_io_mode_expected = H5D_MPIO_CHUNK_COLLECTIVE;
+ else
+ actual_io_mode_expected = H5D_MPIO_CHUNK_INDEPENDENT;
+ break;
+
+ /* Mixed I/O with optimization */
+ case TEST_ACTUAL_IO_MULTI_CHUNK_MIX:
+ /* A chunk will be assigned collective I/O only if it is selected by each
+ * process. To get mixed I/O, have the root select all chunks and each
+ * subsequent process select the first and nth chunk. The first chunk,
+ * accessed by all, will be assigned collective I/O while each other chunk
+ * will be accessed only by the root and the nth process and will be
+ * assigned independent I/O. Each process will access one chunk collectively
+ * and at least one chunk independently, reporting mixed I/O.
+ */
+
+ if (mpi_rank == 0) {
+ /* Select the first column */
+ slab_set(mpi_rank, mpi_size, start, count, stride, block, BYCOL);
+ }
+ else {
+ /* Select the first and the nth chunk in the nth column */
+ block[0] = (hsize_t)(dim0 / mpi_size);
+ block[1] = (hsize_t)(dim1 / mpi_size);
+ count[0] = 2;
+ count[1] = 1;
+ stride[0] = (hsize_t)mpi_rank * block[0];
+ stride[1] = 1;
+ start[0] = 0;
+ start[1] = (hsize_t)mpi_rank * block[1];
+ }
+
+ test_name = "Multi Chunk - Mixed";
+ actual_chunk_opt_mode_expected = H5D_MPIO_MULTI_CHUNK;
+ actual_io_mode_expected = H5D_MPIO_CHUNK_MIXED;
+ break;
+
+ /* RESET tests that the properties are properly reset to defaults each time I/O is
+ * performed. To achieve this, we have RESET perform collective I/O (which would change
+ * the values from the defaults) followed by independent I/O (which should report the
+ * default values). RESET doesn't need to have a unique selection, so we reuse
+ * MULTI_CHUMK_MIX_DISAGREE, which was chosen because it is a complex case that works
+ * on all builds. The independent section of RESET can be found at the end of this function.
+ */
+ case TEST_ACTUAL_IO_RESET:
+
+ /* Mixed I/O with optimization and internal disagreement */
+ case TEST_ACTUAL_IO_MULTI_CHUNK_MIX_DISAGREE:
+ /* A chunk will be assigned collective I/O only if it is selected by each
+ * process. To get mixed I/O with disagreement, assign process n to the
+ * first chunk and the nth chunk. The first chunk, selected by all, is
+ * assgigned collective I/O, while each other process gets independent I/O.
+ * Since the root process with only access the first chunk, it will report
+ * collective I/O. The subsequent processes will access the first chunk
+ * collectively, and their other chunk independently, reporting mixed I/O.
+ */
+
+ if (mpi_rank == 0) {
+ /* Select the first chunk in the first column */
+ slab_set(mpi_rank, mpi_size, start, count, stride, block, BYCOL);
+ block[0] = block[0] / (hsize_t)mpi_size;
+ }
+ else {
+ /* Select the first and the nth chunk in the nth column */
+ block[0] = (hsize_t)(dim0 / mpi_size);
+ block[1] = (hsize_t)(dim1 / mpi_size);
+ count[0] = 2;
+ count[1] = 1;
+ stride[0] = (hsize_t)mpi_rank * block[0];
+ stride[1] = 1;
+ start[0] = 0;
+ start[1] = (hsize_t)mpi_rank * block[1];
+ }
+
+ /* If the testname was not already set by the RESET case */
+ if (selection_mode == TEST_ACTUAL_IO_RESET)
+ test_name = "RESET";
+ else
+ test_name = "Multi Chunk - Mixed (Disagreement)";
+
+ actual_chunk_opt_mode_expected = H5D_MPIO_MULTI_CHUNK;
+ if (mpi_size > 1) {
+ if (mpi_rank == 0)
+ actual_io_mode_expected = H5D_MPIO_CHUNK_COLLECTIVE;
+ else
+ actual_io_mode_expected = H5D_MPIO_CHUNK_MIXED;
+ }
+ else
+ actual_io_mode_expected = H5D_MPIO_CHUNK_INDEPENDENT;
+
+ break;
+
+ /* Linked Chunk I/O */
+ case TEST_ACTUAL_IO_LINK_CHUNK:
+ /* Nothing special; link chunk I/O is forced in the dxpl settings. */
+ slab_set(mpi_rank, mpi_size, start, count, stride, block, BYROW);
+
+ test_name = "Link Chunk";
+ actual_chunk_opt_mode_expected = H5D_MPIO_LINK_CHUNK;
+ actual_io_mode_expected = H5D_MPIO_CHUNK_COLLECTIVE;
+ break;
+
+ /* Contiguous Dataset */
+ case TEST_ACTUAL_IO_CONTIGUOUS:
+ /* A non overlapping, regular selection in a contiguous dataset leads to
+ * collective I/O */
+ slab_set(mpi_rank, mpi_size, start, count, stride, block, BYROW);
+
+ test_name = "Contiguous";
+ actual_chunk_opt_mode_expected = H5D_MPIO_NO_CHUNK_OPTIMIZATION;
+ actual_io_mode_expected = H5D_MPIO_CONTIGUOUS_COLLECTIVE;
+ break;
+
+ case TEST_ACTUAL_IO_NO_COLLECTIVE:
+ slab_set(mpi_rank, mpi_size, start, count, stride, block, BYROW);
+
+ test_name = "Independent";
+ actual_chunk_opt_mode_expected = H5D_MPIO_NO_CHUNK_OPTIMIZATION;
+ actual_io_mode_expected = H5D_MPIO_NO_COLLECTIVE;
+ break;
+
+ default:
+ test_name = "Undefined Selection Mode";
+ actual_chunk_opt_mode_expected = H5D_MPIO_NO_CHUNK_OPTIMIZATION;
+ actual_io_mode_expected = H5D_MPIO_NO_COLLECTIVE;
+ break;
+ }
+
+ ret = H5Sselect_hyperslab(file_space, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab succeeded");
+
+ /* Create a memory dataspace mirroring the dataset and select the same hyperslab
+ * as in the file space.
+ */
+ mem_space = H5Screate_simple(RANK, dims, NULL);
+ VRFY((mem_space >= 0), "mem_space created");
+
+ ret = H5Sselect_hyperslab(mem_space, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab succeeded");
+
+ /* Get the number of elements in the selection */
+ length = dim0 * dim1;
+
+ /* Allocate and initialize the buffer */
+ buffer = (int *)HDmalloc(sizeof(int) * (size_t)length);
+ VRFY((buffer != NULL), "HDmalloc of buffer succeeded");
+ for (i = 0; i < length; i++)
+ buffer[i] = i;
+
+ /* Set up the dxpl for the write */
+ dxpl_write = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((dxpl_write >= 0), "H5Pcreate(H5P_DATASET_XFER) succeeded");
+
+ /* Set collective I/O properties in the dxpl. */
+ if (is_collective) {
+ /* Request collective I/O */
+ ret = H5Pset_dxpl_mpio(dxpl_write, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pset_dxpl_mpio succeeded");
+
+ /* Set the threshold number of processes per chunk to twice mpi_size.
+ * This will prevent the threshold from ever being met, thus forcing
+ * multi chunk io instead of link chunk io.
+ * This is via default.
+ */
+ if (multi_chunk_io) {
+ /* force multi-chunk-io by threshold */
+ ret = H5Pset_dxpl_mpio_chunk_opt_num(dxpl_write, (unsigned)mpi_size * 2);
+ VRFY((ret >= 0), "H5Pset_dxpl_mpio_chunk_opt_num succeeded");
+
+ /* set this to manipulate testing scenario about allocating processes
+ * to chunks */
+ ret = H5Pset_dxpl_mpio_chunk_opt_ratio(dxpl_write, (unsigned)99);
+ VRFY((ret >= 0), "H5Pset_dxpl_mpio_chunk_opt_ratio succeeded");
+ }
+
+ /* Set directly go to multi-chunk-io without threshold calc. */
+ if (direct_multi_chunk_io) {
+ /* set for multi chunk io by property*/
+ ret = H5Pset_dxpl_mpio_chunk_opt(dxpl_write, H5FD_MPIO_CHUNK_MULTI_IO);
+ VRFY((ret >= 0), "H5Pset_dxpl_mpio succeeded");
+ }
+ }
+
+ /* Make a copy of the dxpl to test the read operation */
+ dxpl_read = H5Pcopy(dxpl_write);
+ VRFY((dxpl_read >= 0), "H5Pcopy succeeded");
+
+ /* Write */
+ ret = H5Dwrite(dataset, data_type, mem_space, file_space, dxpl_write, buffer);
+ if (ret < 0)
+ H5Eprint2(H5E_DEFAULT, stdout);
+ VRFY((ret >= 0), "H5Dwrite() dataset multichunk write succeeded");
+
+ /* Retrieve Actual io values */
+ ret = H5Pget_mpio_actual_io_mode(dxpl_write, &actual_io_mode_write);
+ VRFY((ret >= 0), "retrieving actual io mode succeeded");
+
+ ret = H5Pget_mpio_actual_chunk_opt_mode(dxpl_write, &actual_chunk_opt_mode_write);
+ VRFY((ret >= 0), "retrieving actual chunk opt mode succeeded");
+
+ /* Read */
+ ret = H5Dread(dataset, data_type, mem_space, file_space, dxpl_read, buffer);
+ if (ret < 0)
+ H5Eprint2(H5E_DEFAULT, stdout);
+ VRFY((ret >= 0), "H5Dread() dataset multichunk read succeeded");
+
+ /* Retrieve Actual io values */
+ ret = H5Pget_mpio_actual_io_mode(dxpl_read, &actual_io_mode_read);
+ VRFY((ret >= 0), "retrieving actual io mode succeeded");
+
+ ret = H5Pget_mpio_actual_chunk_opt_mode(dxpl_read, &actual_chunk_opt_mode_read);
+ VRFY((ret >= 0), "retrieving actual chunk opt mode succeeded");
+
+ /* Check write vs read */
+ VRFY((actual_io_mode_read == actual_io_mode_write),
+ "reading and writing are the same for actual_io_mode");
+ VRFY((actual_chunk_opt_mode_read == actual_chunk_opt_mode_write),
+ "reading and writing are the same for actual_chunk_opt_mode");
+
+ /* Test values */
+ if (actual_chunk_opt_mode_expected != (H5D_mpio_actual_chunk_opt_mode_t)-1 &&
+ actual_io_mode_expected != (H5D_mpio_actual_io_mode_t)-1) {
+ HDsnprintf(message, sizeof(message), "Actual Chunk Opt Mode has the correct value for %s.\n",
+ test_name);
+ VRFY((actual_chunk_opt_mode_write == actual_chunk_opt_mode_expected), message);
+ HDsnprintf(message, sizeof(message), "Actual IO Mode has the correct value for %s.\n", test_name);
+ VRFY((actual_io_mode_write == actual_io_mode_expected), message);
+ }
+ else {
+ HDfprintf(stderr, "%s %d -> (%d,%d)\n", test_name, mpi_rank, actual_chunk_opt_mode_write,
+ actual_io_mode_write);
+ }
+
+ /* To test that the property is successfully reset to the default, we perform some
+ * independent I/O after the collective I/O
+ */
+ if (selection_mode == TEST_ACTUAL_IO_RESET) {
+ if (mpi_rank == 0) {
+ /* Switch to independent io */
+ ret = H5Pset_dxpl_mpio(dxpl_write, H5FD_MPIO_INDEPENDENT);
+ VRFY((ret >= 0), "H5Pset_dxpl_mpio succeeded");
+ ret = H5Pset_dxpl_mpio(dxpl_read, H5FD_MPIO_INDEPENDENT);
+ VRFY((ret >= 0), "H5Pset_dxpl_mpio succeeded");
+
+ /* Write */
+ ret = H5Dwrite(dataset, data_type, H5S_ALL, H5S_ALL, dxpl_write, buffer);
+ VRFY((ret >= 0), "H5Dwrite() dataset multichunk write succeeded");
+
+ /* Check Properties */
+ ret = H5Pget_mpio_actual_io_mode(dxpl_write, &actual_io_mode_write);
+ VRFY((ret >= 0), "retrieving actual io mode succeeded");
+ ret = H5Pget_mpio_actual_chunk_opt_mode(dxpl_write, &actual_chunk_opt_mode_write);
+ VRFY((ret >= 0), "retrieving actual chunk opt mode succeeded");
+
+ VRFY(actual_chunk_opt_mode_write == H5D_MPIO_NO_CHUNK_OPTIMIZATION,
+ "actual_chunk_opt_mode has correct value for reset write (independent)");
+ VRFY(actual_io_mode_write == H5D_MPIO_NO_COLLECTIVE,
+ "actual_io_mode has correct value for reset write (independent)");
+
+ /* Read */
+ ret = H5Dread(dataset, data_type, H5S_ALL, H5S_ALL, dxpl_read, buffer);
+ VRFY((ret >= 0), "H5Dwrite() dataset multichunk write succeeded");
+
+ /* Check Properties */
+ ret = H5Pget_mpio_actual_io_mode(dxpl_read, &actual_io_mode_read);
+ VRFY((ret >= 0), "retrieving actual io mode succeeded");
+ ret = H5Pget_mpio_actual_chunk_opt_mode(dxpl_read, &actual_chunk_opt_mode_read);
+ VRFY((ret >= 0), "retrieving actual chunk opt mode succeeded");
+
+ VRFY(actual_chunk_opt_mode_read == H5D_MPIO_NO_CHUNK_OPTIMIZATION,
+ "actual_chunk_opt_mode has correct value for reset read (independent)");
+ VRFY(actual_io_mode_read == H5D_MPIO_NO_COLLECTIVE,
+ "actual_io_mode has correct value for reset read (independent)");
+ }
+ }
+
+ /* Release some resources */
+ ret = H5Sclose(sid);
+ VRFY((ret >= 0), "H5Sclose succeeded");
+ ret = H5Pclose(fapl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+ ret = H5Pclose(dcpl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+ ret = H5Pclose(dxpl_write);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+ ret = H5Pclose(dxpl_read);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+ ret = H5Dclose(dataset);
+ VRFY((ret >= 0), "H5Dclose succeeded");
+ ret = H5Sclose(mem_space);
+ VRFY((ret >= 0), "H5Sclose succeeded");
+ ret = H5Sclose(file_space);
+ VRFY((ret >= 0), "H5Sclose succeeded");
+ ret = H5Fclose(fid);
+ VRFY((ret >= 0), "H5Fclose succeeded");
+ HDfree(buffer);
+ return;
+}
+
+/* Function: actual_io_mode_tests
+ *
+ * Purpose: Tests all possible cases of the actual_io_mode property.
+ *
+ * Programmer: Jacob Gruber
+ * Date: 2011-04-06
+ */
+void
+actual_io_mode_tests(void)
+{
+ int mpi_size = -1;
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+
+ /* Only run these tests if selection I/O is not being used - selection I/O
+ * bypasses this IO mode decision - it's effectively always multi chunk
+ * currently */
+ if (/* !H5_use_selection_io_g */ TRUE) {
+ test_actual_io_mode(TEST_ACTUAL_IO_NO_COLLECTIVE);
+
+ /*
+ * Test multi-chunk-io via proc_num threshold
+ */
+ test_actual_io_mode(TEST_ACTUAL_IO_MULTI_CHUNK_IND);
+ test_actual_io_mode(TEST_ACTUAL_IO_MULTI_CHUNK_COL);
+
+ /* The Multi Chunk Mixed test requires at least three processes. */
+ if (mpi_size > 2)
+ test_actual_io_mode(TEST_ACTUAL_IO_MULTI_CHUNK_MIX);
+ else
+ HDfprintf(stdout, "Multi Chunk Mixed test requires 3 processes minimum\n");
+
+ test_actual_io_mode(TEST_ACTUAL_IO_MULTI_CHUNK_MIX_DISAGREE);
+
+ /*
+ * Test multi-chunk-io via setting direct property
+ */
+ test_actual_io_mode(TEST_ACTUAL_IO_DIRECT_MULTI_CHUNK_IND);
+ test_actual_io_mode(TEST_ACTUAL_IO_DIRECT_MULTI_CHUNK_COL);
+
+ test_actual_io_mode(TEST_ACTUAL_IO_LINK_CHUNK);
+ test_actual_io_mode(TEST_ACTUAL_IO_CONTIGUOUS);
+
+ test_actual_io_mode(TEST_ACTUAL_IO_RESET);
+ }
+
+ return;
+}
+
+/*
+ * Function: test_no_collective_cause_mode
+ *
+ * Purpose:
+ * tests cases for broken collective I/O and checks that the
+ * H5Pget_mpio_no_collective_cause properties in the DXPL have the correct values.
+ *
+ * Input:
+ * selection_mode: various mode to cause broken collective I/O
+ * Note: Originally, each TEST case is supposed to be used alone.
+ * After some discussion, this is updated to take multiple TEST cases
+ * with '|'. However there is no error check for any of combined
+ * test cases, so a tester is responsible to understand and feed
+ * proper combination of TESTs if needed.
+ *
+ *
+ * TEST_COLLECTIVE:
+ * Test for regular collective I/O without cause of breaking.
+ * Just to test normal behavior.
+ *
+ * TEST_SET_INDEPENDENT:
+ * Test for Independent I/O as the cause of breaking collective I/O.
+ *
+ * TEST_DATATYPE_CONVERSION:
+ * Test for Data Type Conversion as the cause of breaking collective I/O.
+ *
+ * TEST_DATA_TRANSFORMS:
+ * Test for Data Transform feature as the cause of breaking collective I/O.
+ *
+ * TEST_NOT_SIMPLE_OR_SCALAR_DATASPACES:
+ * Test for NULL dataspace as the cause of breaking collective I/O.
+ *
+ * TEST_NOT_CONTIGUOUS_OR_CHUNKED_DATASET_COMPACT:
+ * Test for Compact layout as the cause of breaking collective I/O.
+ *
+ * TEST_NOT_CONTIGUOUS_OR_CHUNKED_DATASET_EXTERNAL:
+ * Test for Externl-File storage as the cause of breaking collective I/O.
+ *
+ * Programmer: Jonathan Kim
+ * Date: Aug, 2012
+ */
+#ifdef LATER
+#define DSET_NOCOLCAUSE "nocolcause"
+#endif
+#define FILE_EXTERNAL "nocolcause_extern.data"
+static void
+test_no_collective_cause_mode(int selection_mode)
+{
+ uint32_t no_collective_cause_local_write = 0;
+ uint32_t no_collective_cause_local_read = 0;
+ uint32_t no_collective_cause_local_expected = 0;
+ uint32_t no_collective_cause_global_write = 0;
+ uint32_t no_collective_cause_global_read = 0;
+ uint32_t no_collective_cause_global_expected = 0;
+
+ const char *filename;
+ const char *test_name;
+ hbool_t is_chunked = 1;
+ hbool_t is_independent = 0;
+ int mpi_size = -1;
+ int mpi_rank = -1;
+ int length;
+ int *buffer;
+ int i;
+ MPI_Comm mpi_comm;
+ MPI_Info mpi_info;
+ hid_t fid = -1;
+ hid_t sid = -1;
+ hid_t dataset = -1;
+ hid_t data_type = H5T_NATIVE_INT;
+ hid_t fapl = -1;
+ hid_t dcpl = -1;
+ hid_t dxpl_write = -1;
+ hid_t dxpl_read = -1;
+ hsize_t dims[RANK];
+ hid_t mem_space = -1;
+ hid_t file_space = -1;
+ hsize_t chunk_dims[RANK];
+ herr_t ret;
+ /* set to global value as default */
+ int l_facc_type = facc_type;
+ char message[256];
+
+ /* Set up MPI parameters */
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ MPI_Barrier(MPI_COMM_WORLD);
+
+ HDassert(mpi_size >= 1);
+
+ mpi_comm = MPI_COMM_WORLD;
+ mpi_info = MPI_INFO_NULL;
+
+ /* Create the dataset creation plist */
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY((dcpl >= 0), "dataset creation plist created successfully");
+
+ if (selection_mode & TEST_NOT_CONTIGUOUS_OR_CHUNKED_DATASET_COMPACT) {
+ ret = H5Pset_layout(dcpl, H5D_COMPACT);
+ VRFY((ret >= 0), "set COMPACT layout succeeded");
+ is_chunked = 0;
+ }
+
+ if (selection_mode & TEST_NOT_CONTIGUOUS_OR_CHUNKED_DATASET_EXTERNAL) {
+ ret = H5Pset_external(dcpl, FILE_EXTERNAL, (off_t)0, H5F_UNLIMITED);
+ VRFY((ret >= 0), "set EXTERNAL file layout succeeded");
+ is_chunked = 0;
+ }
+
+ if (selection_mode & TEST_NOT_SIMPLE_OR_SCALAR_DATASPACES) {
+ sid = H5Screate(H5S_NULL);
+ VRFY((sid >= 0), "H5Screate_simple succeeded");
+ is_chunked = 0;
+ }
+ else {
+ /* Create the basic Space */
+ /* if this is a compact dataset, create a small dataspace that does not exceed 64K */
+ if (selection_mode & TEST_NOT_CONTIGUOUS_OR_CHUNKED_DATASET_COMPACT) {
+ dims[0] = ROW_FACTOR * 6;
+ dims[1] = COL_FACTOR * 6;
+ }
+ else {
+ dims[0] = (hsize_t)dim0;
+ dims[1] = (hsize_t)dim1;
+ }
+ sid = H5Screate_simple(RANK, dims, NULL);
+ VRFY((sid >= 0), "H5Screate_simple succeeded");
+ }
+
+ filename = (const char *)PARATESTFILE /* GetTestParameters() */;
+ HDassert(filename != NULL);
+
+ /* Setup the file access template */
+ fapl = create_faccess_plist(mpi_comm, mpi_info, l_facc_type);
+ VRFY((fapl >= 0), "create_faccess_plist() succeeded");
+
+ /* Create the file */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+
+ VRFY((fid >= 0), "H5Fcreate succeeded");
+
+ /* If we are not testing contiguous datasets */
+ if (is_chunked) {
+ /* Set up chunk information. */
+ chunk_dims[0] = dims[0] / (hsize_t)mpi_size;
+ chunk_dims[1] = dims[1];
+ ret = H5Pset_chunk(dcpl, 2, chunk_dims);
+ VRFY((ret >= 0), "chunk creation property list succeeded");
+ }
+
+ /* Create the dataset */
+ dataset = H5Dcreate2(fid, "nocolcause", data_type, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ VRFY((dataset >= 0), "H5Dcreate2() dataset succeeded");
+
+ /*
+ * Set expected causes and some tweaks based on the type of test
+ */
+ if (selection_mode & TEST_DATATYPE_CONVERSION) {
+ test_name = "Broken Collective I/O - Datatype Conversion";
+ no_collective_cause_local_expected |= H5D_MPIO_DATATYPE_CONVERSION;
+ no_collective_cause_global_expected |= H5D_MPIO_DATATYPE_CONVERSION;
+ /* set different sign to trigger type conversion */
+ data_type = H5T_NATIVE_UINT;
+ }
+
+ if (selection_mode & TEST_DATA_TRANSFORMS) {
+ test_name = "Broken Collective I/O - DATA Transforms";
+ no_collective_cause_local_expected |= H5D_MPIO_DATA_TRANSFORMS;
+ no_collective_cause_global_expected |= H5D_MPIO_DATA_TRANSFORMS;
+ }
+
+ if (selection_mode & TEST_NOT_SIMPLE_OR_SCALAR_DATASPACES) {
+ test_name = "Broken Collective I/O - No Simple or Scalar DataSpace";
+ no_collective_cause_local_expected |= H5D_MPIO_NOT_SIMPLE_OR_SCALAR_DATASPACES;
+ no_collective_cause_global_expected |= H5D_MPIO_NOT_SIMPLE_OR_SCALAR_DATASPACES;
+ }
+
+ if (selection_mode & TEST_NOT_CONTIGUOUS_OR_CHUNKED_DATASET_COMPACT ||
+ selection_mode & TEST_NOT_CONTIGUOUS_OR_CHUNKED_DATASET_EXTERNAL) {
+ test_name = "Broken Collective I/O - No CONTI or CHUNKED Dataset";
+ no_collective_cause_local_expected |= H5D_MPIO_NOT_CONTIGUOUS_OR_CHUNKED_DATASET;
+ no_collective_cause_global_expected |= H5D_MPIO_NOT_CONTIGUOUS_OR_CHUNKED_DATASET;
+ }
+
+ if (selection_mode & TEST_COLLECTIVE) {
+ test_name = "Broken Collective I/O - Not Broken";
+ no_collective_cause_local_expected = H5D_MPIO_COLLECTIVE;
+ no_collective_cause_global_expected = H5D_MPIO_COLLECTIVE;
+ }
+
+ if (selection_mode & TEST_SET_INDEPENDENT) {
+ test_name = "Broken Collective I/O - Independent";
+ no_collective_cause_local_expected = H5D_MPIO_SET_INDEPENDENT;
+ no_collective_cause_global_expected = H5D_MPIO_SET_INDEPENDENT;
+ /* switch to independent io */
+ is_independent = 1;
+ }
+
+ /* use all spaces for certain tests */
+ if (selection_mode & TEST_NOT_SIMPLE_OR_SCALAR_DATASPACES ||
+ selection_mode & TEST_NOT_CONTIGUOUS_OR_CHUNKED_DATASET_EXTERNAL) {
+ file_space = H5S_ALL;
+ mem_space = H5S_ALL;
+ }
+ else {
+ /* Get the file dataspace */
+ file_space = H5Dget_space(dataset);
+ VRFY((file_space >= 0), "H5Dget_space succeeded");
+
+ /* Create the memory dataspace */
+ mem_space = H5Screate_simple(RANK, dims, NULL);
+ VRFY((mem_space >= 0), "mem_space created");
+ }
+
+ /* Get the number of elements in the selection */
+ length = (int)(dims[0] * dims[1]);
+
+ /* Allocate and initialize the buffer */
+ buffer = (int *)HDmalloc(sizeof(int) * (size_t)length);
+ VRFY((buffer != NULL), "HDmalloc of buffer succeeded");
+ for (i = 0; i < length; i++)
+ buffer[i] = i;
+
+ /* Set up the dxpl for the write */
+ dxpl_write = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((dxpl_write >= 0), "H5Pcreate(H5P_DATASET_XFER) succeeded");
+
+ if (is_independent) {
+ /* Set Independent I/O */
+ ret = H5Pset_dxpl_mpio(dxpl_write, H5FD_MPIO_INDEPENDENT);
+ VRFY((ret >= 0), "H5Pset_dxpl_mpio succeeded");
+ }
+ else {
+ /* Set Collective I/O */
+ ret = H5Pset_dxpl_mpio(dxpl_write, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pset_dxpl_mpio succeeded");
+ }
+
+ if (selection_mode & TEST_DATA_TRANSFORMS) {
+ ret = H5Pset_data_transform(dxpl_write, "x+1");
+ VRFY((ret >= 0), "H5Pset_data_transform succeeded");
+ }
+
+ /*---------------------
+ * Test Write access
+ *---------------------*/
+
+ /* Write */
+ ret = H5Dwrite(dataset, data_type, mem_space, file_space, dxpl_write, buffer);
+ if (ret < 0)
+ H5Eprint2(H5E_DEFAULT, stdout);
+ VRFY((ret >= 0), "H5Dwrite() dataset multichunk write succeeded");
+
+ /* Get the cause of broken collective I/O */
+ ret = H5Pget_mpio_no_collective_cause(dxpl_write, &no_collective_cause_local_write,
+ &no_collective_cause_global_write);
+ VRFY((ret >= 0), "retrieving no collective cause succeeded");
+
+ /*---------------------
+ * Test Read access
+ *---------------------*/
+
+ /* Make a copy of the dxpl to test the read operation */
+ dxpl_read = H5Pcopy(dxpl_write);
+ VRFY((dxpl_read >= 0), "H5Pcopy succeeded");
+
+ /* Read */
+ ret = H5Dread(dataset, data_type, mem_space, file_space, dxpl_read, buffer);
+
+ if (ret < 0)
+ H5Eprint2(H5E_DEFAULT, stdout);
+ VRFY((ret >= 0), "H5Dread() dataset multichunk read succeeded");
+
+ /* Get the cause of broken collective I/O */
+ ret = H5Pget_mpio_no_collective_cause(dxpl_read, &no_collective_cause_local_read,
+ &no_collective_cause_global_read);
+ VRFY((ret >= 0), "retrieving no collective cause succeeded");
+
+ /* Check write vs read */
+ VRFY((no_collective_cause_local_read == no_collective_cause_local_write),
+ "reading and writing are the same for local cause of Broken Collective I/O");
+ VRFY((no_collective_cause_global_read == no_collective_cause_global_write),
+ "reading and writing are the same for global cause of Broken Collective I/O");
+
+ /* Test values */
+ HDmemset(message, 0, sizeof(message));
+ HDsnprintf(message, sizeof(message),
+ "Local cause of Broken Collective I/O has the correct value for %s.\n", test_name);
+ VRFY((no_collective_cause_local_write == no_collective_cause_local_expected), message);
+ HDmemset(message, 0, sizeof(message));
+ HDsnprintf(message, sizeof(message),
+ "Global cause of Broken Collective I/O has the correct value for %s.\n", test_name);
+ VRFY((no_collective_cause_global_write == no_collective_cause_global_expected), message);
+
+ /* Release some resources */
+ if (sid)
+ H5Sclose(sid);
+ if (dcpl)
+ H5Pclose(dcpl);
+ if (dxpl_write)
+ H5Pclose(dxpl_write);
+ if (dxpl_read)
+ H5Pclose(dxpl_read);
+ if (dataset)
+ H5Dclose(dataset);
+ if (mem_space)
+ H5Sclose(mem_space);
+ if (file_space)
+ H5Sclose(file_space);
+ if (fid)
+ H5Fclose(fid);
+ HDfree(buffer);
+
+ /* clean up external file */
+ if (selection_mode & TEST_NOT_CONTIGUOUS_OR_CHUNKED_DATASET_EXTERNAL)
+ H5Fdelete(FILE_EXTERNAL, fapl);
+
+ if (fapl)
+ H5Pclose(fapl);
+
+ return;
+}
+
+/* Function: no_collective_cause_tests
+ *
+ * Purpose: Tests cases for broken collective IO.
+ *
+ * Programmer: Jonathan Kim
+ * Date: Aug, 2012
+ */
+void
+no_collective_cause_tests(void)
+{
+ /*
+ * Test individual cause
+ */
+ test_no_collective_cause_mode(TEST_COLLECTIVE);
+ test_no_collective_cause_mode(TEST_SET_INDEPENDENT);
+ test_no_collective_cause_mode(TEST_DATATYPE_CONVERSION);
+ test_no_collective_cause_mode(TEST_DATA_TRANSFORMS);
+ test_no_collective_cause_mode(TEST_NOT_SIMPLE_OR_SCALAR_DATASPACES);
+ test_no_collective_cause_mode(TEST_NOT_CONTIGUOUS_OR_CHUNKED_DATASET_COMPACT);
+ test_no_collective_cause_mode(TEST_NOT_CONTIGUOUS_OR_CHUNKED_DATASET_EXTERNAL);
+
+ /*
+ * Test combined causes
+ */
+ test_no_collective_cause_mode(TEST_NOT_CONTIGUOUS_OR_CHUNKED_DATASET_EXTERNAL | TEST_DATATYPE_CONVERSION);
+ test_no_collective_cause_mode(TEST_DATATYPE_CONVERSION | TEST_DATA_TRANSFORMS);
+ test_no_collective_cause_mode(TEST_NOT_CONTIGUOUS_OR_CHUNKED_DATASET_EXTERNAL | TEST_DATATYPE_CONVERSION |
+ TEST_DATA_TRANSFORMS);
+
+ return;
+}
+
+/*
+ * Test consistency semantics of atomic mode
+ */
+
+/*
+ * Example of using the parallel HDF5 library to create a dataset,
+ * where process 0 writes and the other processes read at the same
+ * time. If atomic mode is set correctly, the other processes should
+ * read the old values in the dataset or the new ones.
+ */
+
+void
+dataset_atomicity(void)
+{
+ hid_t fid; /* HDF5 file ID */
+ hid_t acc_tpl; /* File access templates */
+ hid_t sid; /* Dataspace ID */
+ hid_t dataset1; /* Dataset IDs */
+ hsize_t dims[RANK]; /* dataset dim sizes */
+ int *write_buf = NULL; /* data buffer */
+ int *read_buf = NULL; /* data buffer */
+ int buf_size;
+ hid_t dataset2;
+ hid_t file_dataspace; /* File dataspace ID */
+ hid_t mem_dataspace; /* Memory dataspace ID */
+ hsize_t start[RANK];
+ hsize_t stride[RANK];
+ hsize_t count[RANK];
+ hsize_t block[RANK];
+ const char *filename;
+ herr_t ret; /* Generic return value */
+ int mpi_size, mpi_rank;
+ int i, j, k;
+ hbool_t atomicity = FALSE;
+ MPI_Comm comm = MPI_COMM_WORLD;
+ MPI_Info info = MPI_INFO_NULL;
+
+ dim0 = 64;
+ dim1 = 32;
+ filename = PARATESTFILE /* GetTestParameters() */;
+ if (facc_type != FACC_MPIO) {
+ HDprintf("Atomicity tests will not work without the MPIO VFD\n");
+ return;
+ }
+ if (VERBOSE_MED)
+ HDprintf("atomic writes to file %s\n", filename);
+
+ /* set up MPI parameters */
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, basic dataset, or more aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ buf_size = dim0 * dim1;
+ /* allocate memory for data buffer */
+ write_buf = (int *)HDcalloc((size_t)buf_size, sizeof(int));
+ VRFY((write_buf != NULL), "write_buf HDcalloc succeeded");
+ /* allocate memory for data buffer */
+ read_buf = (int *)HDcalloc((size_t)buf_size, sizeof(int));
+ VRFY((read_buf != NULL), "read_buf HDcalloc succeeded");
+
+ /* setup file access template */
+ acc_tpl = create_faccess_plist(comm, info, facc_type);
+ VRFY((acc_tpl >= 0), "");
+
+ /* create the file collectively */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, acc_tpl);
+ VRFY((fid >= 0), "H5Fcreate succeeded");
+
+ /* Release file-access template */
+ ret = H5Pclose(acc_tpl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ /* setup dimensionality object */
+ dims[0] = (hsize_t)dim0;
+ dims[1] = (hsize_t)dim1;
+ sid = H5Screate_simple(RANK, dims, NULL);
+ VRFY((sid >= 0), "H5Screate_simple succeeded");
+
+ /* create datasets */
+ dataset1 = H5Dcreate2(fid, DATASETNAME5, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((dataset1 >= 0), "H5Dcreate2 succeeded");
+
+ dataset2 = H5Dcreate2(fid, DATASETNAME6, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((dataset2 >= 0), "H5Dcreate2 succeeded");
+
+ /* initialize datasets to 0s */
+ if (0 == mpi_rank) {
+ ret = H5Dwrite(dataset1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, write_buf);
+ VRFY((ret >= 0), "H5Dwrite dataset1 succeeded");
+
+ ret = H5Dwrite(dataset2, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, write_buf);
+ VRFY((ret >= 0), "H5Dwrite dataset2 succeeded");
+ }
+
+ ret = H5Dclose(dataset1);
+ VRFY((ret >= 0), "H5Dclose succeeded");
+ ret = H5Dclose(dataset2);
+ VRFY((ret >= 0), "H5Dclose succeeded");
+ ret = H5Sclose(sid);
+ VRFY((ret >= 0), "H5Sclose succeeded");
+ ret = H5Fclose(fid);
+ VRFY((ret >= 0), "H5Fclose succeeded");
+
+ MPI_Barrier(comm);
+
+ /* make sure setting atomicity fails on a serial file ID */
+ /* file locking allows only one file open (serial) for writing */
+ if (MAINPROCESS) {
+ fid = H5Fopen(filename, H5F_ACC_RDWR, H5P_DEFAULT);
+ VRFY((fid >= 0), "H5Fopen succeeded");
+
+ /* should fail */
+ H5E_BEGIN_TRY
+ {
+ ret = H5Fset_mpi_atomicity(fid, TRUE);
+ }
+ H5E_END_TRY
+ VRFY((ret == FAIL), "H5Fset_mpi_atomicity failed");
+
+ ret = H5Fclose(fid);
+ VRFY((ret >= 0), "H5Fclose succeeded");
+ }
+
+ MPI_Barrier(comm);
+
+ /* setup file access template */
+ acc_tpl = create_faccess_plist(comm, info, facc_type);
+ VRFY((acc_tpl >= 0), "");
+
+ /* open the file collectively */
+ fid = H5Fopen(filename, H5F_ACC_RDWR, acc_tpl);
+ VRFY((fid >= 0), "H5Fopen succeeded");
+
+ /* Release file-access template */
+ ret = H5Pclose(acc_tpl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ ret = H5Fset_mpi_atomicity(fid, TRUE);
+ VRFY((ret >= 0), "H5Fset_mpi_atomicity succeeded");
+
+ /* open dataset1 (contiguous case) */
+ dataset1 = H5Dopen2(fid, DATASETNAME5, H5P_DEFAULT);
+ VRFY((dataset1 >= 0), "H5Dopen2 succeeded");
+
+ if (0 == mpi_rank) {
+ for (i = 0; i < buf_size; i++) {
+ write_buf[i] = 5;
+ }
+ }
+ else {
+ for (i = 0; i < buf_size; i++) {
+ read_buf[i] = 8;
+ }
+ }
+
+ /* check that the atomicity flag is set */
+ ret = H5Fget_mpi_atomicity(fid, &atomicity);
+ VRFY((ret >= 0), "atomcity get failed");
+ VRFY((atomicity == TRUE), "atomcity set failed");
+
+ MPI_Barrier(comm);
+
+ /* Process 0 writes contiguously to the entire dataset */
+ if (0 == mpi_rank) {
+ ret = H5Dwrite(dataset1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, write_buf);
+ VRFY((ret >= 0), "H5Dwrite dataset1 succeeded");
+ }
+ /* The other processes read the entire dataset */
+ else {
+ ret = H5Dread(dataset1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, read_buf);
+ VRFY((ret >= 0), "H5Dwrite() dataset multichunk write succeeded");
+ }
+
+ if (VERBOSE_MED) {
+ i = 0;
+ j = 0;
+ k = 0;
+ for (i = 0; i < dim0; i++) {
+ HDprintf("\n");
+ for (j = 0; j < dim1; j++)
+ HDprintf("%d ", read_buf[k++]);
+ }
+ }
+
+ /* The processes that read the dataset must either read all values
+ as 0 (read happened before process 0 wrote to dataset 1), or 5
+ (read happened after process 0 wrote to dataset 1) */
+ if (0 != mpi_rank) {
+ int compare = read_buf[0];
+
+ VRFY((compare == 0 || compare == 5),
+ "Atomicity Test Failed Process %d: Value read should be 0 or 5\n");
+ for (i = 1; i < buf_size; i++) {
+ if (read_buf[i] != compare) {
+ HDprintf("Atomicity Test Failed Process %d: read_buf[%d] is %d, should be %d\n", mpi_rank, i,
+ read_buf[i], compare);
+ nerrors++;
+ }
+ }
+ }
+
+ ret = H5Dclose(dataset1);
+ VRFY((ret >= 0), "H5D close succeeded");
+
+ /* release data buffers */
+ if (write_buf)
+ HDfree(write_buf);
+ if (read_buf)
+ HDfree(read_buf);
+
+ /* open dataset2 (non-contiguous case) */
+ dataset2 = H5Dopen2(fid, DATASETNAME6, H5P_DEFAULT);
+ VRFY((dataset2 >= 0), "H5Dopen2 succeeded");
+
+ /* allocate memory for data buffer */
+ write_buf = (int *)HDcalloc((size_t)buf_size, sizeof(int));
+ VRFY((write_buf != NULL), "write_buf HDcalloc succeeded");
+ /* allocate memory for data buffer */
+ read_buf = (int *)HDcalloc((size_t)buf_size, sizeof(int));
+ VRFY((read_buf != NULL), "read_buf HDcalloc succeeded");
+
+ for (i = 0; i < buf_size; i++) {
+ write_buf[i] = 5;
+ }
+ for (i = 0; i < buf_size; i++) {
+ read_buf[i] = 8;
+ }
+
+ atomicity = FALSE;
+ /* check that the atomicity flag is set */
+ ret = H5Fget_mpi_atomicity(fid, &atomicity);
+ VRFY((ret >= 0), "atomcity get failed");
+ VRFY((atomicity == TRUE), "atomcity set failed");
+
+ block[0] = (hsize_t)(dim0 / mpi_size - 1);
+ block[1] = (hsize_t)(dim1 / mpi_size - 1);
+ stride[0] = block[0] + 1;
+ stride[1] = block[1] + 1;
+ count[0] = (hsize_t)mpi_size;
+ count[1] = (hsize_t)mpi_size;
+ start[0] = 0;
+ start[1] = 0;
+
+ /* create a file dataspace */
+ file_dataspace = H5Dget_space(dataset2);
+ VRFY((file_dataspace >= 0), "H5Dget_space succeeded");
+ ret = H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab succeeded");
+
+ /* create a memory dataspace */
+ mem_dataspace = H5Screate_simple(RANK, dims, NULL);
+ VRFY((mem_dataspace >= 0), "");
+
+ ret = H5Sselect_hyperslab(mem_dataspace, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab succeeded");
+
+ MPI_Barrier(comm);
+
+ /* Process 0 writes to the dataset */
+ if (0 == mpi_rank) {
+ ret = H5Dwrite(dataset2, H5T_NATIVE_INT, mem_dataspace, file_dataspace, H5P_DEFAULT, write_buf);
+ VRFY((ret >= 0), "H5Dwrite dataset2 succeeded");
+ }
+ /* All processes wait for the write to finish. This works because
+ atomicity is set to true */
+ MPI_Barrier(comm);
+ /* The other processes read the entire dataset */
+ if (0 != mpi_rank) {
+ ret = H5Dread(dataset2, H5T_NATIVE_INT, mem_dataspace, file_dataspace, H5P_DEFAULT, read_buf);
+ VRFY((ret >= 0), "H5Dread dataset2 succeeded");
+ }
+
+ if (VERBOSE_MED) {
+ if (mpi_rank == 1) {
+ i = 0;
+ j = 0;
+ k = 0;
+ for (i = 0; i < dim0; i++) {
+ HDprintf("\n");
+ for (j = 0; j < dim1; j++)
+ HDprintf("%d ", read_buf[k++]);
+ }
+ HDprintf("\n");
+ }
+ }
+
+ /* The processes that read the dataset must either read all values
+ as 5 (read happened after process 0 wrote to dataset 1) */
+ if (0 != mpi_rank) {
+ int compare;
+ i = 0;
+ j = 0;
+ k = 0;
+
+ compare = 5;
+
+ for (i = 0; i < dim0; i++) {
+ if (i >= mpi_rank * ((int)block[0] + 1)) {
+ break;
+ }
+ if ((i + 1) % ((int)block[0] + 1) == 0) {
+ k += dim1;
+ continue;
+ }
+ for (j = 0; j < dim1; j++) {
+ if (j >= mpi_rank * ((int)block[1] + 1)) {
+ k += dim1 - mpi_rank * ((int)block[1] + 1);
+ break;
+ }
+ if ((j + 1) % ((int)block[1] + 1) == 0) {
+ k++;
+ continue;
+ }
+ else if (compare != read_buf[k]) {
+ HDprintf("Atomicity Test Failed Process %d: read_buf[%d] is %d, should be %d\n", mpi_rank,
+ k, read_buf[k], compare);
+ nerrors++;
+ }
+ k++;
+ }
+ }
+ }
+
+ ret = H5Dclose(dataset2);
+ VRFY((ret >= 0), "H5Dclose succeeded");
+ ret = H5Sclose(file_dataspace);
+ VRFY((ret >= 0), "H5Sclose succeeded");
+ ret = H5Sclose(mem_dataspace);
+ VRFY((ret >= 0), "H5Sclose succeeded");
+
+ /* release data buffers */
+ if (write_buf)
+ HDfree(write_buf);
+ if (read_buf)
+ HDfree(read_buf);
+
+ ret = H5Fclose(fid);
+ VRFY((ret >= 0), "H5Fclose succeeded");
+}
+
+/* Function: dense_attr_test
+ *
+ * Purpose: Test cases for writing dense attributes in parallel
+ *
+ * Programmer: Quincey Koziol
+ * Date: April, 2013
+ */
+void
+test_dense_attr(void)
+{
+ int mpi_size, mpi_rank;
+ hid_t fpid, fid;
+ hid_t gid, gpid;
+ hid_t atFileSpace, atid;
+ hsize_t atDims[1] = {10000};
+ herr_t status;
+ const char *filename;
+
+ /* set up MPI parameters */
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, group, dataset, or attribute aren't supported with "
+ "this connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ /* get filename */
+ filename = (const char *)PARATESTFILE /* GetTestParameters() */;
+ HDassert(filename != NULL);
+
+ fpid = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY((fpid > 0), "H5Pcreate succeeded");
+ status = H5Pset_libver_bounds(fpid, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST);
+ VRFY((status >= 0), "H5Pset_libver_bounds succeeded");
+ status = H5Pset_fapl_mpio(fpid, MPI_COMM_WORLD, MPI_INFO_NULL);
+ VRFY((status >= 0), "H5Pset_fapl_mpio succeeded");
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fpid);
+ VRFY((fid > 0), "H5Fcreate succeeded");
+ status = H5Pclose(fpid);
+ VRFY((status >= 0), "H5Pclose succeeded");
+
+ gpid = H5Pcreate(H5P_GROUP_CREATE);
+ VRFY((gpid > 0), "H5Pcreate succeeded");
+ status = H5Pset_attr_phase_change(gpid, 0, 0);
+ VRFY((status >= 0), "H5Pset_attr_phase_change succeeded");
+ gid = H5Gcreate2(fid, "foo", H5P_DEFAULT, gpid, H5P_DEFAULT);
+ VRFY((gid > 0), "H5Gcreate2 succeeded");
+ status = H5Pclose(gpid);
+ VRFY((status >= 0), "H5Pclose succeeded");
+
+ atFileSpace = H5Screate_simple(1, atDims, NULL);
+ VRFY((atFileSpace > 0), "H5Screate_simple succeeded");
+ atid = H5Acreate2(gid, "bar", H5T_STD_U64LE, atFileSpace, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((atid > 0), "H5Acreate succeeded");
+ status = H5Sclose(atFileSpace);
+ VRFY((status >= 0), "H5Sclose succeeded");
+
+ status = H5Aclose(atid);
+ VRFY((status >= 0), "H5Aclose succeeded");
+
+ status = H5Gclose(gid);
+ VRFY((status >= 0), "H5Gclose succeeded");
+ status = H5Fclose(fid);
+ VRFY((status >= 0), "H5Fclose succeeded");
+
+ return;
+}
diff --git a/testpar/API/t_file.c b/testpar/API/t_file.c
new file mode 100644
index 0000000..936454a
--- /dev/null
+++ b/testpar/API/t_file.c
@@ -0,0 +1,1032 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * Parallel tests for file operations
+ */
+
+#include "hdf5.h"
+#include "testphdf5.h"
+
+#if 0
+#include "H5CXprivate.h" /* API Contexts */
+#include "H5Iprivate.h"
+#include "H5PBprivate.h"
+
+/*
+ * This file needs to access private information from the H5F package.
+ */
+#define H5AC_FRIEND /*suppress error about including H5ACpkg */
+#include "H5ACpkg.h"
+#define H5C_FRIEND /*suppress error about including H5Cpkg */
+#include "H5Cpkg.h"
+#define H5F_FRIEND /*suppress error about including H5Fpkg */
+#define H5F_TESTING
+#include "H5Fpkg.h"
+#define H5MF_FRIEND /*suppress error about including H5MFpkg */
+#include "H5MFpkg.h"
+#endif
+
+#define NUM_DSETS 5
+
+int mpi_size, mpi_rank;
+
+#if 0
+static int create_file(const char *filename, hid_t fcpl, hid_t fapl, int metadata_write_strategy);
+static int open_file(const char *filename, hid_t fapl, int metadata_write_strategy, hsize_t page_size,
+ size_t page_buffer_size);
+#endif
+
+/*
+ * test file access by communicator besides COMM_WORLD.
+ * Split COMM_WORLD into two, one (even_comm) contains the original
+ * processes of even ranks. The other (odd_comm) contains the original
+ * processes of odd ranks. Processes in even_comm creates a file, then
+ * cloose it, using even_comm. Processes in old_comm just do a barrier
+ * using odd_comm. Then they all do a barrier using COMM_WORLD.
+ * If the file creation and cloose does not do correct collective action
+ * according to the communicator argument, the processes will freeze up
+ * sooner or later due to barrier mixed up.
+ */
+void
+test_split_comm_access(void)
+{
+ MPI_Comm comm;
+ MPI_Info info = MPI_INFO_NULL;
+ int is_old, mrc;
+ int newrank, newprocs;
+ hid_t fid; /* file IDs */
+ hid_t acc_tpl; /* File access properties */
+ herr_t ret; /* generic return value */
+ const char *filename;
+
+ filename = (const char *)PARATESTFILE /* GetTestParameters()*/;
+ if (VERBOSE_MED)
+ HDprintf("Split Communicator access test on file %s\n", filename);
+
+ /* set up MPI parameters */
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file aren't supported with this connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ is_old = mpi_rank % 2;
+ mrc = MPI_Comm_split(MPI_COMM_WORLD, is_old, mpi_rank, &comm);
+ VRFY((mrc == MPI_SUCCESS), "");
+ MPI_Comm_size(comm, &newprocs);
+ MPI_Comm_rank(comm, &newrank);
+
+ if (is_old) {
+ /* odd-rank processes */
+ mrc = MPI_Barrier(comm);
+ VRFY((mrc == MPI_SUCCESS), "");
+ }
+ else {
+ /* even-rank processes */
+ int sub_mpi_rank; /* rank in the sub-comm */
+ MPI_Comm_rank(comm, &sub_mpi_rank);
+
+ /* setup file access template */
+ acc_tpl = create_faccess_plist(comm, info, facc_type);
+ VRFY((acc_tpl >= 0), "");
+
+ /* create the file collectively */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, acc_tpl);
+ VRFY((fid >= 0), "H5Fcreate succeeded");
+
+ /* close the file */
+ ret = H5Fclose(fid);
+ VRFY((ret >= 0), "");
+
+ /* delete the test file */
+ ret = H5Fdelete(filename, acc_tpl);
+ VRFY((ret >= 0), "H5Fdelete succeeded");
+
+ /* Release file-access template */
+ ret = H5Pclose(acc_tpl);
+ VRFY((ret >= 0), "");
+ }
+ mrc = MPI_Comm_free(&comm);
+ VRFY((mrc == MPI_SUCCESS), "MPI_Comm_free succeeded");
+ mrc = MPI_Barrier(MPI_COMM_WORLD);
+ VRFY((mrc == MPI_SUCCESS), "final MPI_Barrier succeeded");
+}
+
+#if 0
+void
+test_page_buffer_access(void)
+{
+ hid_t file_id = -1; /* File ID */
+ hid_t fcpl, fapl;
+ size_t page_count = 0;
+ int i, num_elements = 200;
+ haddr_t raw_addr, meta_addr;
+ int *data;
+ H5F_t *f = NULL;
+ herr_t ret; /* generic return value */
+ const char *filename;
+ hbool_t api_ctx_pushed = FALSE; /* Whether API context pushed */
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+
+ filename = (const char *)GetTestParameters();
+
+ if (VERBOSE_MED)
+ HDprintf("Page Buffer Usage in Parallel %s\n", filename);
+
+ fapl = create_faccess_plist(MPI_COMM_WORLD, MPI_INFO_NULL, facc_type);
+ VRFY((fapl >= 0), "create_faccess_plist succeeded");
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ VRFY((fcpl >= 0), "");
+
+ ret = H5Pset_file_space_strategy(fcpl, H5F_FSPACE_STRATEGY_PAGE, 1, (hsize_t)0);
+ VRFY((ret == 0), "");
+ ret = H5Pset_file_space_page_size(fcpl, sizeof(int) * 128);
+ VRFY((ret == 0), "");
+ ret = H5Pset_page_buffer_size(fapl, sizeof(int) * 100000, 0, 0);
+ VRFY((ret == 0), "");
+
+ /* This should fail because collective metadata writes are not supported with page buffering */
+ H5E_BEGIN_TRY
+ {
+ file_id = H5Fcreate(filename, H5F_ACC_TRUNC, fcpl, fapl);
+ }
+ H5E_END_TRY;
+ VRFY((file_id < 0), "H5Fcreate failed");
+
+ /* disable collective metadata writes for page buffering to work */
+ ret = H5Pset_coll_metadata_write(fapl, FALSE);
+ VRFY((ret >= 0), "");
+
+ ret = create_file(filename, fcpl, fapl, H5AC_METADATA_WRITE_STRATEGY__DISTRIBUTED);
+ VRFY((ret == 0), "");
+ ret = open_file(filename, fapl, H5AC_METADATA_WRITE_STRATEGY__DISTRIBUTED, sizeof(int) * 100,
+ sizeof(int) * 100000);
+ VRFY((ret == 0), "");
+
+ ret = create_file(filename, fcpl, fapl, H5AC_METADATA_WRITE_STRATEGY__PROCESS_0_ONLY);
+ VRFY((ret == 0), "");
+ ret = open_file(filename, fapl, H5AC_METADATA_WRITE_STRATEGY__PROCESS_0_ONLY, sizeof(int) * 100,
+ sizeof(int) * 100000);
+ VRFY((ret == 0), "");
+
+ ret = H5Pset_file_space_page_size(fcpl, sizeof(int) * 100);
+ VRFY((ret == 0), "");
+
+ data = (int *)HDmalloc(sizeof(int) * (size_t)num_elements);
+
+ /* initialize all the elements to have a value of -1 */
+ for (i = 0; i < num_elements; i++)
+ data[i] = -1;
+ if (MAINPROCESS) {
+ hid_t fapl_self = H5I_INVALID_HID;
+ fapl_self = create_faccess_plist(MPI_COMM_SELF, MPI_INFO_NULL, facc_type);
+
+ ret = H5Pset_page_buffer_size(fapl_self, sizeof(int) * 1000, 0, 0);
+ VRFY((ret == 0), "");
+ /* collective metadata writes do not work with page buffering */
+ ret = H5Pset_coll_metadata_write(fapl_self, FALSE);
+ VRFY((ret >= 0), "");
+
+ file_id = H5Fcreate(filename, H5F_ACC_TRUNC, fcpl, fapl_self);
+ VRFY((file_id >= 0), "");
+
+ /* Push API context */
+ ret = H5CX_push();
+ VRFY((ret == 0), "H5CX_push()");
+ api_ctx_pushed = TRUE;
+
+ /* Get a pointer to the internal file object */
+ f = (H5F_t *)H5I_object(file_id);
+
+ VRFY((f->shared->page_buf != NULL), "Page Buffer created with 1 process");
+
+ /* allocate space for 200 raw elements */
+ raw_addr = H5MF_alloc(f, H5FD_MEM_DRAW, sizeof(int) * (size_t)num_elements);
+ VRFY((raw_addr != HADDR_UNDEF), "");
+
+ /* allocate space for 200 metadata elements */
+ meta_addr = H5MF_alloc(f, H5FD_MEM_SUPER, sizeof(int) * (size_t)num_elements);
+ VRFY((meta_addr != HADDR_UNDEF), "");
+
+ page_count = 0;
+
+ ret = H5F_block_write(f, H5FD_MEM_SUPER, meta_addr, sizeof(int) * (size_t)num_elements, data);
+ VRFY((ret == 0), "");
+ ret = H5F_block_write(f, H5FD_MEM_SUPER, meta_addr, sizeof(int) * (size_t)num_elements, data);
+ ret = H5F_block_write(f, H5FD_MEM_DRAW, raw_addr, sizeof(int) * (size_t)num_elements, data);
+ VRFY((ret == 0), "");
+
+ VRFY((H5SL_count(f->shared->page_buf->slist_ptr) == page_count), "Wrong number of pages in PB");
+
+ /* update the first 50 elements */
+ for (i = 0; i < 50; i++)
+ data[i] = i;
+ ret = H5F_block_write(f, H5FD_MEM_DRAW, raw_addr, sizeof(int) * 50, data);
+ H5Eprint2(H5E_DEFAULT, stderr);
+ VRFY((ret == 0), "");
+ ret = H5F_block_write(f, H5FD_MEM_SUPER, meta_addr, sizeof(int) * 50, data);
+ VRFY((ret == 0), "");
+ page_count += 2;
+ VRFY((H5SL_count(f->shared->page_buf->slist_ptr) == page_count), "Wrong number of pages in PB");
+
+ /* update the second 50 elements */
+ for (i = 0; i < 50; i++)
+ data[i] = i + 50;
+ ret = H5F_block_write(f, H5FD_MEM_DRAW, raw_addr + (sizeof(int) * 50), sizeof(int) * 50, data);
+ VRFY((ret == 0), "");
+ ret = H5F_block_write(f, H5FD_MEM_SUPER, meta_addr + (sizeof(int) * 50), sizeof(int) * 50, data);
+ VRFY((ret == 0), "");
+ VRFY((H5SL_count(f->shared->page_buf->slist_ptr) == page_count), "Wrong number of pages in PB");
+
+ /* update 100 - 200 */
+ for (i = 0; i < 100; i++)
+ data[i] = i + 100;
+ ret = H5F_block_write(f, H5FD_MEM_DRAW, raw_addr + (sizeof(int) * 100), sizeof(int) * 100, data);
+ VRFY((ret == 0), "");
+ ret = H5F_block_write(f, H5FD_MEM_SUPER, meta_addr + (sizeof(int) * 100), sizeof(int) * 100, data);
+ VRFY((ret == 0), "");
+ VRFY((H5SL_count(f->shared->page_buf->slist_ptr) == page_count), "Wrong number of pages in PB");
+
+ ret = H5PB_flush(f->shared);
+ VRFY((ret == 0), "");
+
+ /* read elements 0 - 200 */
+ ret = H5F_block_read(f, H5FD_MEM_DRAW, raw_addr, sizeof(int) * 200, data);
+ VRFY((ret == 0), "");
+ VRFY((H5SL_count(f->shared->page_buf->slist_ptr) == page_count), "Wrong number of pages in PB");
+ for (i = 0; i < 200; i++)
+ VRFY((data[i] == i), "Read different values than written");
+ ret = H5F_block_read(f, H5FD_MEM_SUPER, meta_addr, sizeof(int) * 200, data);
+ VRFY((ret == 0), "");
+ VRFY((H5SL_count(f->shared->page_buf->slist_ptr) == page_count), "Wrong number of pages in PB");
+ for (i = 0; i < 200; i++)
+ VRFY((data[i] == i), "Read different values than written");
+
+ /* read elements 0 - 50 */
+ ret = H5F_block_read(f, H5FD_MEM_DRAW, raw_addr, sizeof(int) * 50, data);
+ VRFY((ret == 0), "");
+ VRFY((H5SL_count(f->shared->page_buf->slist_ptr) == page_count), "Wrong number of pages in PB");
+ for (i = 0; i < 50; i++)
+ VRFY((data[i] == i), "Read different values than written");
+ ret = H5F_block_read(f, H5FD_MEM_SUPER, meta_addr, sizeof(int) * 50, data);
+ VRFY((ret == 0), "");
+ VRFY((H5SL_count(f->shared->page_buf->slist_ptr) == page_count), "Wrong number of pages in PB");
+ for (i = 0; i < 50; i++)
+ VRFY((data[i] == i), "Read different values than written");
+
+ /* close the file */
+ ret = H5Fclose(file_id);
+ VRFY((ret >= 0), "H5Fclose succeeded");
+ ret = H5Pclose(fapl_self);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ /* Pop API context */
+ if (api_ctx_pushed) {
+ ret = H5CX_pop(FALSE);
+ VRFY((ret == 0), "H5CX_pop()");
+ api_ctx_pushed = FALSE;
+ }
+ }
+
+ MPI_Barrier(MPI_COMM_WORLD);
+
+ if (mpi_size > 1) {
+ ret = H5Pset_page_buffer_size(fapl, sizeof(int) * 1000, 0, 0);
+ VRFY((ret == 0), "");
+ /* collective metadata writes do not work with page buffering */
+ ret = H5Pset_coll_metadata_write(fapl, FALSE);
+ VRFY((ret >= 0), "");
+
+ file_id = H5Fcreate(filename, H5F_ACC_TRUNC, fcpl, fapl);
+ VRFY((file_id >= 0), "");
+
+ /* Push API context */
+ ret = H5CX_push();
+ VRFY((ret == 0), "H5CX_push()");
+ api_ctx_pushed = TRUE;
+
+ /* Get a pointer to the internal file object */
+ f = (H5F_t *)H5I_object(file_id);
+
+ VRFY((f->shared->page_buf != NULL), "Page Buffer created with 1 process");
+
+ /* allocate space for 200 raw elements */
+ raw_addr = H5MF_alloc(f, H5FD_MEM_DRAW, sizeof(int) * (size_t)num_elements);
+ VRFY((raw_addr != HADDR_UNDEF), "");
+ /* allocate space for 200 metadata elements */
+ meta_addr = H5MF_alloc(f, H5FD_MEM_SUPER, sizeof(int) * (size_t)num_elements);
+ VRFY((meta_addr != HADDR_UNDEF), "");
+
+ page_count = 0;
+
+ ret = H5F_block_write(f, H5FD_MEM_SUPER, meta_addr, sizeof(int) * (size_t)num_elements, data);
+ VRFY((ret == 0), "");
+ ret = H5F_block_write(f, H5FD_MEM_DRAW, raw_addr, sizeof(int) * (size_t)num_elements, data);
+ VRFY((ret == 0), "");
+
+ VRFY((H5SL_count(f->shared->page_buf->slist_ptr) == page_count), "Wrong number of pages in PB");
+
+ /* update the first 50 elements */
+ for (i = 0; i < 50; i++)
+ data[i] = i;
+ ret = H5F_block_write(f, H5FD_MEM_DRAW, raw_addr, sizeof(int) * 50, data);
+ VRFY((ret == 0), "");
+ ret = H5F_block_write(f, H5FD_MEM_SUPER, meta_addr, sizeof(int) * 50, data);
+ VRFY((ret == 0), "");
+ VRFY((H5SL_count(f->shared->page_buf->slist_ptr) == page_count), "Wrong number of pages in PB");
+
+ /* update the second 50 elements */
+ for (i = 0; i < 50; i++)
+ data[i] = i + 50;
+ ret = H5F_block_write(f, H5FD_MEM_DRAW, raw_addr + (sizeof(int) * 50), sizeof(int) * 50, data);
+ VRFY((ret == 0), "");
+ ret = H5F_block_write(f, H5FD_MEM_SUPER, meta_addr + (sizeof(int) * 50), sizeof(int) * 50, data);
+ VRFY((ret == 0), "");
+ VRFY((H5SL_count(f->shared->page_buf->slist_ptr) == page_count), "Wrong number of pages in PB");
+
+ /* update 100 - 200 */
+ for (i = 0; i < 100; i++)
+ data[i] = i + 100;
+ ret = H5F_block_write(f, H5FD_MEM_DRAW, raw_addr + (sizeof(int) * 100), sizeof(int) * 100, data);
+ VRFY((ret == 0), "");
+ ret = H5F_block_write(f, H5FD_MEM_SUPER, meta_addr + (sizeof(int) * 100), sizeof(int) * 100, data);
+ VRFY((ret == 0), "");
+ VRFY((H5SL_count(f->shared->page_buf->slist_ptr) == page_count), "Wrong number of pages in PB");
+
+ ret = H5Fflush(file_id, H5F_SCOPE_GLOBAL);
+ VRFY((ret == 0), "");
+
+ /* read elements 0 - 200 */
+ ret = H5F_block_read(f, H5FD_MEM_DRAW, raw_addr, sizeof(int) * 200, data);
+ VRFY((ret == 0), "");
+ VRFY((H5SL_count(f->shared->page_buf->slist_ptr) == page_count), "Wrong number of pages in PB");
+ for (i = 0; i < 200; i++)
+ VRFY((data[i] == i), "Read different values than written");
+ ret = H5F_block_read(f, H5FD_MEM_SUPER, meta_addr, sizeof(int) * 200, data);
+ VRFY((ret == 0), "");
+ VRFY((H5SL_count(f->shared->page_buf->slist_ptr) == page_count), "Wrong number of pages in PB");
+ for (i = 0; i < 200; i++)
+ VRFY((data[i] == i), "Read different values than written");
+
+ /* read elements 0 - 50 */
+ ret = H5F_block_read(f, H5FD_MEM_DRAW, raw_addr, sizeof(int) * 50, data);
+ VRFY((ret == 0), "");
+ VRFY((H5SL_count(f->shared->page_buf->slist_ptr) == page_count), "Wrong number of pages in PB");
+ for (i = 0; i < 50; i++)
+ VRFY((data[i] == i), "Read different values than written");
+ ret = H5F_block_read(f, H5FD_MEM_SUPER, meta_addr, sizeof(int) * 50, data);
+ VRFY((ret == 0), "");
+ page_count += 1;
+ VRFY((H5SL_count(f->shared->page_buf->slist_ptr) == page_count), "Wrong number of pages in PB");
+ for (i = 0; i < 50; i++)
+ VRFY((data[i] == i), "Read different values than written");
+
+ MPI_Barrier(MPI_COMM_WORLD);
+ /* reset the first 50 elements to -1*/
+ for (i = 0; i < 50; i++)
+ data[i] = -1;
+ ret = H5F_block_write(f, H5FD_MEM_DRAW, raw_addr, sizeof(int) * 50, data);
+ VRFY((ret == 0), "");
+ VRFY((H5SL_count(f->shared->page_buf->slist_ptr) == page_count), "Wrong number of pages in PB");
+ ret = H5F_block_write(f, H5FD_MEM_SUPER, meta_addr, sizeof(int) * 50, data);
+ VRFY((ret == 0), "");
+ VRFY((H5SL_count(f->shared->page_buf->slist_ptr) == page_count), "Wrong number of pages in PB");
+
+ /* read elements 0 - 50 */
+ ret = H5F_block_read(f, H5FD_MEM_DRAW, raw_addr, sizeof(int) * 50, data);
+ VRFY((ret == 0), "");
+ VRFY((H5SL_count(f->shared->page_buf->slist_ptr) == page_count), "Wrong number of pages in PB");
+ for (i = 0; i < 50; i++)
+ VRFY((data[i] == -1), "Read different values than written");
+ ret = H5F_block_read(f, H5FD_MEM_SUPER, meta_addr, sizeof(int) * 50, data);
+ VRFY((ret == 0), "");
+ VRFY((H5SL_count(f->shared->page_buf->slist_ptr) == page_count), "Wrong number of pages in PB");
+ for (i = 0; i < 50; i++)
+ VRFY((data[i] == -1), "Read different values than written");
+
+ /* close the file */
+ ret = H5Fclose(file_id);
+ VRFY((ret >= 0), "H5Fclose succeeded");
+ }
+
+ ret = H5Pclose(fapl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+ ret = H5Pclose(fcpl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ /* Pop API context */
+ if (api_ctx_pushed) {
+ ret = H5CX_pop(FALSE);
+ VRFY((ret == 0), "H5CX_pop()");
+ api_ctx_pushed = FALSE;
+ }
+
+ HDfree(data);
+ data = NULL;
+ MPI_Barrier(MPI_COMM_WORLD);
+}
+
+static int
+create_file(const char *filename, hid_t fcpl, hid_t fapl, int metadata_write_strategy)
+{
+ hid_t file_id, dset_id, grp_id;
+ hid_t sid, mem_dataspace;
+ hsize_t start[RANK];
+ hsize_t count[RANK];
+ hsize_t stride[RANK];
+ hsize_t block[RANK];
+ DATATYPE *data_array = NULL;
+ hsize_t dims[RANK], i;
+ hsize_t num_elements;
+ int k;
+ char dset_name[20];
+ H5F_t *f = NULL;
+ H5C_t *cache_ptr = NULL;
+ H5AC_cache_config_t config;
+ hbool_t api_ctx_pushed = FALSE; /* Whether API context pushed */
+ herr_t ret;
+
+ file_id = H5Fcreate(filename, H5F_ACC_TRUNC, fcpl, fapl);
+ VRFY((file_id >= 0), "");
+
+ ret = H5Fflush(file_id, H5F_SCOPE_GLOBAL);
+ VRFY((ret == 0), "");
+
+ /* Push API context */
+ ret = H5CX_push();
+ VRFY((ret == 0), "H5CX_push()");
+ api_ctx_pushed = TRUE;
+
+ f = (H5F_t *)H5I_object(file_id);
+ VRFY((f != NULL), "");
+
+ cache_ptr = f->shared->cache;
+ VRFY((cache_ptr->magic == H5C__H5C_T_MAGIC), "");
+
+ cache_ptr->ignore_tags = TRUE;
+ H5C_stats__reset(cache_ptr);
+ config.version = H5AC__CURR_CACHE_CONFIG_VERSION;
+
+ ret = H5AC_get_cache_auto_resize_config(cache_ptr, &config);
+ VRFY((ret == 0), "");
+
+ config.metadata_write_strategy = metadata_write_strategy;
+
+ ret = H5AC_set_cache_auto_resize_config(cache_ptr, &config);
+ VRFY((ret == 0), "");
+
+ grp_id = H5Gcreate2(file_id, "GROUP", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((grp_id >= 0), "");
+
+ dims[0] = (hsize_t)(ROW_FACTOR * mpi_size);
+ dims[1] = (hsize_t)(COL_FACTOR * mpi_size);
+ sid = H5Screate_simple(RANK, dims, NULL);
+ VRFY((sid >= 0), "H5Screate_simple succeeded");
+
+ /* Each process takes a slabs of rows. */
+ block[0] = dims[0] / (hsize_t)mpi_size;
+ block[1] = dims[1];
+ stride[0] = block[0];
+ stride[1] = block[1];
+ count[0] = 1;
+ count[1] = 1;
+ start[0] = (hsize_t)mpi_rank * block[0];
+ start[1] = 0;
+
+ num_elements = block[0] * block[1];
+ /* allocate memory for data buffer */
+ data_array = (DATATYPE *)HDmalloc(num_elements * sizeof(DATATYPE));
+ VRFY((data_array != NULL), "data_array HDmalloc succeeded");
+ /* put some trivial data in the data_array */
+ for (i = 0; i < num_elements; i++)
+ data_array[i] = mpi_rank + 1;
+
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab succeeded");
+
+ /* create a memory dataspace independently */
+ mem_dataspace = H5Screate_simple(1, &num_elements, NULL);
+ VRFY((mem_dataspace >= 0), "");
+
+ for (k = 0; k < NUM_DSETS; k++) {
+ HDsnprintf(dset_name, sizeof(dset_name), "D1dset%d", k);
+ dset_id = H5Dcreate2(grp_id, dset_name, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((dset_id >= 0), "");
+ ret = H5Dclose(dset_id);
+ VRFY((ret == 0), "");
+
+ HDsnprintf(dset_name, sizeof(dset_name), "D2dset%d", k);
+ dset_id = H5Dcreate2(grp_id, dset_name, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((dset_id >= 0), "");
+ ret = H5Dclose(dset_id);
+ VRFY((ret == 0), "");
+
+ HDsnprintf(dset_name, sizeof(dset_name), "D3dset%d", k);
+ dset_id = H5Dcreate2(grp_id, dset_name, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((dset_id >= 0), "");
+ ret = H5Dclose(dset_id);
+ VRFY((ret == 0), "");
+
+ HDsnprintf(dset_name, sizeof(dset_name), "dset%d", k);
+ dset_id = H5Dcreate2(grp_id, dset_name, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((dset_id >= 0), "");
+
+ ret = H5Dwrite(dset_id, H5T_NATIVE_INT, mem_dataspace, sid, H5P_DEFAULT, data_array);
+ VRFY((ret == 0), "");
+
+ ret = H5Dclose(dset_id);
+ VRFY((ret == 0), "");
+
+ HDmemset(data_array, 0, num_elements * sizeof(DATATYPE));
+ dset_id = H5Dopen2(grp_id, dset_name, H5P_DEFAULT);
+ VRFY((dset_id >= 0), "");
+
+ ret = H5Dread(dset_id, H5T_NATIVE_INT, mem_dataspace, sid, H5P_DEFAULT, data_array);
+ VRFY((ret == 0), "");
+
+ ret = H5Dclose(dset_id);
+ VRFY((ret == 0), "");
+
+ for (i = 0; i < num_elements; i++)
+ VRFY((data_array[i] == mpi_rank + 1), "Dataset Verify failed");
+
+ HDsnprintf(dset_name, sizeof(dset_name), "D1dset%d", k);
+ ret = H5Ldelete(grp_id, dset_name, H5P_DEFAULT);
+ VRFY((ret == 0), "");
+ HDsnprintf(dset_name, sizeof(dset_name), "D2dset%d", k);
+ ret = H5Ldelete(grp_id, dset_name, H5P_DEFAULT);
+ VRFY((ret == 0), "");
+ HDsnprintf(dset_name, sizeof(dset_name), "D3dset%d", k);
+ ret = H5Ldelete(grp_id, dset_name, H5P_DEFAULT);
+ VRFY((ret == 0), "");
+ }
+
+ ret = H5Gclose(grp_id);
+ VRFY((ret == 0), "");
+ ret = H5Fclose(file_id);
+ VRFY((ret == 0), "");
+ ret = H5Sclose(sid);
+ VRFY((ret == 0), "");
+ ret = H5Sclose(mem_dataspace);
+ VRFY((ret == 0), "");
+
+ /* Pop API context */
+ if (api_ctx_pushed) {
+ ret = H5CX_pop(FALSE);
+ VRFY((ret == 0), "H5CX_pop()");
+ api_ctx_pushed = FALSE;
+ }
+
+ MPI_Barrier(MPI_COMM_WORLD);
+ HDfree(data_array);
+ return 0;
+} /* create_file */
+
+static int
+open_file(const char *filename, hid_t fapl, int metadata_write_strategy, hsize_t page_size,
+ size_t page_buffer_size)
+{
+ hid_t file_id, dset_id, grp_id, grp_id2;
+ hid_t sid, mem_dataspace;
+ DATATYPE *data_array = NULL;
+ hsize_t dims[RANK];
+ hsize_t start[RANK];
+ hsize_t count[RANK];
+ hsize_t stride[RANK];
+ hsize_t block[RANK];
+ int i, k, ndims;
+ hsize_t num_elements;
+ char dset_name[20];
+ H5F_t *f = NULL;
+ H5C_t *cache_ptr = NULL;
+ H5AC_cache_config_t config;
+ hbool_t api_ctx_pushed = FALSE; /* Whether API context pushed */
+ herr_t ret;
+
+ config.version = H5AC__CURR_CACHE_CONFIG_VERSION;
+ ret = H5Pget_mdc_config(fapl, &config);
+ VRFY((ret == 0), "");
+
+ config.metadata_write_strategy = metadata_write_strategy;
+
+ ret = H5Pget_mdc_config(fapl, &config);
+ VRFY((ret == 0), "");
+
+ file_id = H5Fopen(filename, H5F_ACC_RDWR, fapl);
+ H5Eprint2(H5E_DEFAULT, stderr);
+ VRFY((file_id >= 0), "");
+
+ /* Push API context */
+ ret = H5CX_push();
+ VRFY((ret == 0), "H5CX_push()");
+ api_ctx_pushed = TRUE;
+
+ ret = H5Fflush(file_id, H5F_SCOPE_GLOBAL);
+ VRFY((ret == 0), "");
+
+ f = (H5F_t *)H5I_object(file_id);
+ VRFY((f != NULL), "");
+
+ cache_ptr = f->shared->cache;
+ VRFY((cache_ptr->magic == H5C__H5C_T_MAGIC), "");
+
+ MPI_Barrier(MPI_COMM_WORLD);
+
+ VRFY((f->shared->page_buf != NULL), "");
+ VRFY((f->shared->page_buf->page_size == page_size), "");
+ VRFY((f->shared->page_buf->max_size == page_buffer_size), "");
+
+ grp_id = H5Gopen2(file_id, "GROUP", H5P_DEFAULT);
+ VRFY((grp_id >= 0), "");
+
+ dims[0] = (hsize_t)(ROW_FACTOR * mpi_size);
+ dims[1] = (hsize_t)(COL_FACTOR * mpi_size);
+
+ /* Each process takes a slabs of rows. */
+ block[0] = dims[0] / (hsize_t)mpi_size;
+ block[1] = dims[1];
+ stride[0] = block[0];
+ stride[1] = block[1];
+ count[0] = 1;
+ count[1] = 1;
+ start[0] = (hsize_t)mpi_rank * block[0];
+ start[1] = 0;
+
+ num_elements = block[0] * block[1];
+ /* allocate memory for data buffer */
+ data_array = (DATATYPE *)HDmalloc(num_elements * sizeof(DATATYPE));
+ VRFY((data_array != NULL), "data_array HDmalloc succeeded");
+
+ /* create a memory dataspace independently */
+ mem_dataspace = H5Screate_simple(1, &num_elements, NULL);
+ VRFY((mem_dataspace >= 0), "");
+
+ for (k = 0; k < NUM_DSETS; k++) {
+ HDsnprintf(dset_name, sizeof(dset_name), "dset%d", k);
+ dset_id = H5Dopen2(grp_id, dset_name, H5P_DEFAULT);
+ VRFY((dset_id >= 0), "");
+
+ sid = H5Dget_space(dset_id);
+ VRFY((dset_id >= 0), "H5Dget_space succeeded");
+
+ ndims = H5Sget_simple_extent_dims(sid, dims, NULL);
+ VRFY((ndims == 2), "H5Sget_simple_extent_dims succeeded");
+ VRFY(dims[0] == (hsize_t)(ROW_FACTOR * mpi_size), "Wrong dataset dimensions");
+ VRFY(dims[1] == (hsize_t)(COL_FACTOR * mpi_size), "Wrong dataset dimensions");
+
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab succeeded");
+
+ ret = H5Dread(dset_id, H5T_NATIVE_INT, mem_dataspace, sid, H5P_DEFAULT, data_array);
+ VRFY((ret >= 0), "");
+
+ ret = H5Dclose(dset_id);
+ VRFY((ret >= 0), "");
+ ret = H5Sclose(sid);
+ VRFY((ret == 0), "");
+
+ for (i = 0; i < (int)num_elements; i++)
+ VRFY((data_array[i] == mpi_rank + 1), "Dataset Verify failed");
+ }
+
+ grp_id2 = H5Gcreate2(file_id, "GROUP/GROUP2", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((grp_id2 >= 0), "");
+ ret = H5Gclose(grp_id2);
+ VRFY((ret == 0), "");
+
+ ret = H5Fflush(file_id, H5F_SCOPE_GLOBAL);
+ VRFY((ret == 0), "");
+
+ MPI_Barrier(MPI_COMM_WORLD);
+ /* flush invalidate each ring, starting from the outermost ring and
+ * working inward.
+ */
+ for (i = 0; i < H5C__HASH_TABLE_LEN; i++) {
+ H5C_cache_entry_t *entry_ptr = NULL;
+
+ entry_ptr = cache_ptr->index[i];
+
+ while (entry_ptr != NULL) {
+ HDassert(entry_ptr->magic == H5C__H5C_CACHE_ENTRY_T_MAGIC);
+ HDassert(entry_ptr->is_dirty == FALSE);
+
+ if (!entry_ptr->is_pinned && !entry_ptr->is_protected) {
+ ret = H5AC_expunge_entry(f, entry_ptr->type, entry_ptr->addr, 0);
+ VRFY((ret == 0), "");
+ }
+
+ entry_ptr = entry_ptr->ht_next;
+ }
+ }
+ MPI_Barrier(MPI_COMM_WORLD);
+
+ grp_id2 = H5Gopen2(file_id, "GROUP/GROUP2", H5P_DEFAULT);
+ H5Eprint2(H5E_DEFAULT, stderr);
+ VRFY((grp_id2 >= 0), "");
+ ret = H5Gclose(grp_id2);
+ H5Eprint2(H5E_DEFAULT, stderr);
+ VRFY((ret == 0), "");
+
+ ret = H5Gclose(grp_id);
+ VRFY((ret == 0), "");
+ ret = H5Fclose(file_id);
+ VRFY((ret == 0), "");
+ ret = H5Sclose(mem_dataspace);
+ VRFY((ret == 0), "");
+
+ /* Pop API context */
+ if (api_ctx_pushed) {
+ ret = H5CX_pop(FALSE);
+ VRFY((ret == 0), "H5CX_pop()");
+ api_ctx_pushed = FALSE;
+ }
+
+ HDfree(data_array);
+
+ return nerrors;
+}
+#endif
+
+/*
+ * NOTE: See HDFFV-10894 and add tests later to verify MPI-specific properties in the
+ * incoming fapl that could conflict with the existing values in H5F_shared_t on
+ * multiple opens of the same file.
+ */
+void
+test_file_properties(void)
+{
+ hid_t fid = H5I_INVALID_HID; /* HDF5 file ID */
+ hid_t fapl_id = H5I_INVALID_HID; /* File access plist */
+ hid_t fapl_copy_id = H5I_INVALID_HID; /* File access plist */
+ hbool_t is_coll;
+ htri_t are_equal;
+ const char *filename;
+ MPI_Comm comm = MPI_COMM_WORLD;
+ MPI_Info info = MPI_INFO_NULL;
+ MPI_Comm comm_out = MPI_COMM_NULL;
+ MPI_Info info_out = MPI_INFO_NULL;
+ herr_t ret; /* Generic return value */
+ int mpi_ret; /* MPI return value */
+ int cmp; /* Compare value */
+
+ /* set up MPI parameters */
+ mpi_ret = MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ VRFY((mpi_ret >= 0), "MPI_Comm_size succeeded");
+ mpi_ret = MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+ VRFY((mpi_ret >= 0), "MPI_Comm_rank succeeded");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file aren't supported with this connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ filename = (const char *)PARATESTFILE /* GetTestParameters() */;
+
+ mpi_ret = MPI_Info_create(&info);
+ VRFY((mpi_ret >= 0), "MPI_Info_create succeeded");
+ mpi_ret = MPI_Info_set(info, "hdf_info_prop1", "xyz");
+ VRFY((mpi_ret == MPI_SUCCESS), "MPI_Info_set");
+
+ /* setup file access plist */
+ fapl_id = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY((fapl_id != H5I_INVALID_HID), "H5Pcreate");
+ ret = H5Pset_fapl_mpio(fapl_id, comm, info);
+ VRFY((ret >= 0), "H5Pset_fapl_mpio");
+
+ /* Check getting and setting MPI properties
+ * (for use in VOL connectors, not the MPI-I/O VFD)
+ */
+ ret = H5Pset_mpi_params(fapl_id, comm, info);
+ VRFY((ret >= 0), "H5Pset_mpi_params succeeded");
+ ret = H5Pget_mpi_params(fapl_id, &comm_out, &info_out);
+ VRFY((ret >= 0), "H5Pget_mpi_params succeeded");
+
+ /* Check the communicator */
+ VRFY((comm != comm_out), "Communicators should not be bitwise identical");
+ cmp = MPI_UNEQUAL;
+ mpi_ret = MPI_Comm_compare(comm, comm_out, &cmp);
+ VRFY((ret >= 0), "MPI_Comm_compare succeeded");
+ VRFY((cmp == MPI_CONGRUENT), "Communicators should be congruent via MPI_Comm_compare");
+
+ /* Check the info object */
+ VRFY((info != info_out), "Info objects should not be bitwise identical");
+
+ /* Free the obtained comm and info object */
+ mpi_ret = MPI_Comm_free(&comm_out);
+ VRFY((mpi_ret >= 0), "MPI_Comm_free succeeded");
+ mpi_ret = MPI_Info_free(&info_out);
+ VRFY((mpi_ret >= 0), "MPI_Info_free succeeded");
+
+ /* Copy the fapl and ensure it's equal to the original */
+ fapl_copy_id = H5Pcopy(fapl_id);
+ VRFY((fapl_copy_id != H5I_INVALID_HID), "H5Pcopy");
+ are_equal = H5Pequal(fapl_id, fapl_copy_id);
+ VRFY((TRUE == are_equal), "H5Pequal");
+
+ /* Add a property to the copy and ensure it's different now */
+ mpi_ret = MPI_Info_set(info, "hdf_info_prop2", "abc");
+ VRFY((mpi_ret == MPI_SUCCESS), "MPI_Info_set");
+ ret = H5Pset_mpi_params(fapl_copy_id, comm, info);
+ VRFY((ret >= 0), "H5Pset_mpi_params succeeded");
+ are_equal = H5Pequal(fapl_id, fapl_copy_id);
+ VRFY((FALSE == are_equal), "H5Pequal");
+
+ /* Add a property with the same key but a different value to the original
+ * and ensure they are still different.
+ */
+ mpi_ret = MPI_Info_set(info, "hdf_info_prop2", "ijk");
+ VRFY((mpi_ret == MPI_SUCCESS), "MPI_Info_set");
+ ret = H5Pset_mpi_params(fapl_id, comm, info);
+ VRFY((ret >= 0), "H5Pset_mpi_params succeeded");
+ are_equal = H5Pequal(fapl_id, fapl_copy_id);
+ VRFY((FALSE == are_equal), "H5Pequal");
+
+ /* Set the second property in the original to the same
+ * value as the copy and ensure they are the same now.
+ */
+ mpi_ret = MPI_Info_set(info, "hdf_info_prop2", "abc");
+ VRFY((mpi_ret == MPI_SUCCESS), "MPI_Info_set");
+ ret = H5Pset_mpi_params(fapl_id, comm, info);
+ VRFY((ret >= 0), "H5Pset_mpi_params succeeded");
+ are_equal = H5Pequal(fapl_id, fapl_copy_id);
+ VRFY((TRUE == are_equal), "H5Pequal");
+
+ /* create the file */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id);
+ VRFY((fid != H5I_INVALID_HID), "H5Fcreate succeeded");
+
+ /* verify settings for file access properties */
+
+ /* Collective metadata writes */
+ ret = H5Pget_coll_metadata_write(fapl_id, &is_coll);
+ VRFY((ret >= 0), "H5Pget_coll_metadata_write succeeded");
+ VRFY((is_coll == FALSE), "Incorrect property setting for coll metadata writes");
+
+ /* Collective metadata read API calling requirement */
+ ret = H5Pget_all_coll_metadata_ops(fapl_id, &is_coll);
+ VRFY((ret >= 0), "H5Pget_all_coll_metadata_ops succeeded");
+ VRFY((is_coll == FALSE), "Incorrect property setting for coll metadata API calls requirement");
+
+ ret = H5Fclose(fid);
+ VRFY((ret >= 0), "H5Fclose succeeded");
+
+ /* Open the file with the MPI-IO driver */
+ ret = H5Pset_fapl_mpio(fapl_id, comm, info);
+ VRFY((ret >= 0), "H5Pset_fapl_mpio failed");
+ fid = H5Fopen(filename, H5F_ACC_RDWR, fapl_id);
+ VRFY((fid != H5I_INVALID_HID), "H5Fcreate succeeded");
+
+ /* verify settings for file access properties */
+
+ /* Collective metadata writes */
+ ret = H5Pget_coll_metadata_write(fapl_id, &is_coll);
+ VRFY((ret >= 0), "H5Pget_coll_metadata_write succeeded");
+ VRFY((is_coll == FALSE), "Incorrect property setting for coll metadata writes");
+
+ /* Collective metadata read API calling requirement */
+ ret = H5Pget_all_coll_metadata_ops(fapl_id, &is_coll);
+ VRFY((ret >= 0), "H5Pget_all_coll_metadata_ops succeeded");
+ VRFY((is_coll == FALSE), "Incorrect property setting for coll metadata API calls requirement");
+
+ ret = H5Fclose(fid);
+ VRFY((ret >= 0), "H5Fclose succeeded");
+
+ /* Open the file with the MPI-IO driver w/ collective settings */
+ ret = H5Pset_fapl_mpio(fapl_id, comm, info);
+ VRFY((ret >= 0), "H5Pset_fapl_mpio failed");
+ /* Collective metadata writes */
+ ret = H5Pset_coll_metadata_write(fapl_id, TRUE);
+ VRFY((ret >= 0), "H5Pget_coll_metadata_write succeeded");
+ /* Collective metadata read API calling requirement */
+ ret = H5Pset_all_coll_metadata_ops(fapl_id, TRUE);
+ VRFY((ret >= 0), "H5Pget_all_coll_metadata_ops succeeded");
+ fid = H5Fopen(filename, H5F_ACC_RDWR, fapl_id);
+ VRFY((fid != H5I_INVALID_HID), "H5Fcreate succeeded");
+
+ /* verify settings for file access properties */
+
+ /* Collective metadata writes */
+ ret = H5Pget_coll_metadata_write(fapl_id, &is_coll);
+ VRFY((ret >= 0), "H5Pget_coll_metadata_write succeeded");
+ VRFY((is_coll == TRUE), "Incorrect property setting for coll metadata writes");
+
+ /* Collective metadata read API calling requirement */
+ ret = H5Pget_all_coll_metadata_ops(fapl_id, &is_coll);
+ VRFY((ret >= 0), "H5Pget_all_coll_metadata_ops succeeded");
+ VRFY((is_coll == TRUE), "Incorrect property setting for coll metadata API calls requirement");
+
+ /* close fapl and retrieve it from file */
+ ret = H5Pclose(fapl_id);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+ fapl_id = H5I_INVALID_HID;
+
+ fapl_id = H5Fget_access_plist(fid);
+ VRFY((fapl_id != H5I_INVALID_HID), "H5P_FILE_ACCESS");
+
+ /* verify settings for file access properties */
+
+ /* Collective metadata writes */
+ ret = H5Pget_coll_metadata_write(fapl_id, &is_coll);
+ VRFY((ret >= 0), "H5Pget_coll_metadata_write succeeded");
+ VRFY((is_coll == TRUE), "Incorrect property setting for coll metadata writes");
+
+ /* Collective metadata read API calling requirement */
+ ret = H5Pget_all_coll_metadata_ops(fapl_id, &is_coll);
+ VRFY((ret >= 0), "H5Pget_all_coll_metadata_ops succeeded");
+ VRFY((is_coll == TRUE), "Incorrect property setting for coll metadata API calls requirement");
+
+ /* close file */
+ ret = H5Fclose(fid);
+ VRFY((ret >= 0), "H5Fclose succeeded");
+
+ /* Release file-access plist */
+ ret = H5Pclose(fapl_id);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+ ret = H5Pclose(fapl_copy_id);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ /* Free the MPI info object */
+ mpi_ret = MPI_Info_free(&info);
+ VRFY((mpi_ret >= 0), "MPI_Info_free succeeded");
+
+} /* end test_file_properties() */
+
+void
+test_delete(void)
+{
+ hid_t fid = H5I_INVALID_HID; /* HDF5 file ID */
+ hid_t fapl_id = H5I_INVALID_HID; /* File access plist */
+ const char *filename = NULL;
+ MPI_Comm comm = MPI_COMM_WORLD;
+ MPI_Info info = MPI_INFO_NULL;
+ htri_t is_hdf5 = FAIL; /* Whether a file is an HDF5 file */
+ herr_t ret; /* Generic return value */
+
+ filename = (const char *)PARATESTFILE /* GetTestParameters() */;
+
+ /* set up MPI parameters */
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* setup file access plist */
+ fapl_id = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY((fapl_id != H5I_INVALID_HID), "H5Pcreate");
+ ret = H5Pset_fapl_mpio(fapl_id, comm, info);
+ VRFY((SUCCEED == ret), "H5Pset_fapl_mpio");
+
+ /* create the file */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id);
+ VRFY((fid != H5I_INVALID_HID), "H5Fcreate");
+
+ /* close the file */
+ ret = H5Fclose(fid);
+ VRFY((SUCCEED == ret), "H5Fclose");
+
+ /* Verify that the file is an HDF5 file */
+ is_hdf5 = H5Fis_accessible(filename, fapl_id);
+ VRFY((TRUE == is_hdf5), "H5Fis_accessible");
+
+ /* Delete the file */
+ ret = H5Fdelete(filename, fapl_id);
+ VRFY((SUCCEED == ret), "H5Fdelete");
+
+ /* Verify that the file is NO LONGER an HDF5 file */
+ /* This should fail since there is no file */
+ H5E_BEGIN_TRY
+ {
+ is_hdf5 = H5Fis_accessible(filename, fapl_id);
+ }
+ H5E_END_TRY;
+ VRFY((is_hdf5 != SUCCEED), "H5Fis_accessible");
+
+ /* Release file-access plist */
+ ret = H5Pclose(fapl_id);
+ VRFY((SUCCEED == ret), "H5Pclose");
+
+} /* end test_delete() */
diff --git a/testpar/API/t_file_image.c b/testpar/API/t_file_image.c
new file mode 100644
index 0000000..4f4fa96
--- /dev/null
+++ b/testpar/API/t_file_image.c
@@ -0,0 +1,371 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * Parallel tests for file image operations
+ */
+
+#include "hdf5.h"
+#include "testphdf5.h"
+
+/* file_image_daisy_chain_test
+ *
+ * Process zero:
+ *
+ * 1) Creates a core file with an integer vector data set of
+ * length n (= mpi_size),
+ *
+ * 2) Initializes the vector to zero in * location 0, and to -1
+ * everywhere else.
+ *
+ * 3) Flushes the core file, and gets an image of it. Closes
+ * the core file.
+ *
+ * 4) Sends the image to process 1.
+ *
+ * 5) Awaits receipt on a file image from process n-1.
+ *
+ * 6) opens the image received from process n-1, verifies that
+ * it contains a vector of length equal to mpi_size, and
+ * that the vector contains (0, 1, 2, ... n-1)
+ *
+ * 7) closes the core file and exits.
+ *
+ * Process i (0 < i < n)
+ *
+ * 1) Await receipt of file image from process (i - 1).
+ *
+ * 2) Open the image with the core file driver, verify that i
+ * contains a vector v of length, and that v[j] = j for
+ * 0 <= j < i, and that v[j] == -1 for i <= j < n
+ *
+ * 3) Set v[i] = i in the core file.
+ *
+ * 4) Flush the core file and send it to process (i + 1) % n.
+ *
+ * 5) close the core file and exit.
+ *
+ * Test fails on a hang (if an image is not received), or on invalid data.
+ *
+ * JRM -- 11/28/11
+ */
+void
+file_image_daisy_chain_test(void)
+{
+ char file_name[1024] = "\0";
+ int mpi_size, mpi_rank;
+ int mpi_result;
+ int i;
+ int space_ndims;
+ MPI_Status rcvstat;
+ int *vector_ptr = NULL;
+ hid_t fapl_id = -1;
+ hid_t file_id; /* file IDs */
+ hid_t dset_id = -1;
+ hid_t dset_type_id = -1;
+ hid_t space_id = -1;
+ herr_t err;
+ hsize_t dims[1];
+ void *image_ptr = NULL;
+ ssize_t bytes_read;
+ ssize_t image_len;
+ hbool_t vector_ok = TRUE;
+ htri_t tri_result;
+
+ /* set up MPI parameters */
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* setup file name */
+ HDsnprintf(file_name, 1024, "file_image_daisy_chain_test_%05d.h5", (int)mpi_rank);
+
+ if (mpi_rank == 0) {
+
+ /* 1) Creates a core file with an integer vector data set
+ * of length mpi_size,
+ */
+ fapl_id = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY((fapl_id >= 0), "creating fapl");
+
+ err = H5Pset_fapl_core(fapl_id, (size_t)(64 * 1024), FALSE);
+ VRFY((err >= 0), "setting core file driver in fapl.");
+
+ file_id = H5Fcreate(file_name, 0, H5P_DEFAULT, fapl_id);
+ VRFY((file_id >= 0), "created core file");
+
+ dims[0] = (hsize_t)mpi_size;
+ space_id = H5Screate_simple(1, dims, dims);
+ VRFY((space_id >= 0), "created data space");
+
+ dset_id = H5Dcreate2(file_id, "v", H5T_NATIVE_INT, space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((dset_id >= 0), "created data set");
+
+ /* 2) Initialize the vector to zero in location 0, and
+ * to -1 everywhere else.
+ */
+
+ vector_ptr = (int *)HDmalloc((size_t)(mpi_size) * sizeof(int));
+ VRFY((vector_ptr != NULL), "allocated in memory representation of vector");
+
+ vector_ptr[0] = 0;
+ for (i = 1; i < mpi_size; i++)
+ vector_ptr[i] = -1;
+
+ err = H5Dwrite(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, (void *)vector_ptr);
+ VRFY((err >= 0), "wrote initial data to vector.");
+
+ HDfree(vector_ptr);
+ vector_ptr = NULL;
+
+ /* 3) Flush the core file, and get an image of it. Close
+ * the core file.
+ */
+ err = H5Fflush(file_id, H5F_SCOPE_GLOBAL);
+ VRFY((err >= 0), "flushed core file.");
+
+ image_len = H5Fget_file_image(file_id, NULL, (size_t)0);
+ VRFY((image_len > 0), "got image file size");
+
+ image_ptr = (void *)HDmalloc((size_t)image_len);
+ VRFY(image_ptr != NULL, "allocated file image buffer.");
+
+ bytes_read = H5Fget_file_image(file_id, image_ptr, (size_t)image_len);
+ VRFY(bytes_read == image_len, "wrote file into image buffer");
+
+ err = H5Sclose(space_id);
+ VRFY((err >= 0), "closed data space.");
+
+ err = H5Dclose(dset_id);
+ VRFY((err >= 0), "closed data set.");
+
+ err = H5Fclose(file_id);
+ VRFY((err >= 0), "closed core file(1).");
+
+ err = H5Pclose(fapl_id);
+ VRFY((err >= 0), "closed fapl(1).");
+
+ /* 4) Send the image to process 1. */
+
+ mpi_result = MPI_Ssend((void *)(&image_len), (int)sizeof(ssize_t), MPI_BYTE, 1, 0, MPI_COMM_WORLD);
+ VRFY((mpi_result == MPI_SUCCESS), "sent image size to process 1");
+
+ mpi_result = MPI_Ssend((void *)image_ptr, (int)image_len, MPI_BYTE, 1, 0, MPI_COMM_WORLD);
+ VRFY((mpi_result == MPI_SUCCESS), "sent image to process 1");
+
+ HDfree(image_ptr);
+ image_ptr = NULL;
+ image_len = 0;
+
+ /* 5) Await receipt on a file image from process n-1. */
+
+ mpi_result = MPI_Recv((void *)(&image_len), (int)sizeof(ssize_t), MPI_BYTE, mpi_size - 1, 0,
+ MPI_COMM_WORLD, &rcvstat);
+ VRFY((mpi_result == MPI_SUCCESS), "received image len from process n-1");
+
+ image_ptr = (void *)HDmalloc((size_t)image_len);
+ VRFY(image_ptr != NULL, "allocated file image receive buffer.");
+
+ mpi_result =
+ MPI_Recv((void *)image_ptr, (int)image_len, MPI_BYTE, mpi_size - 1, 0, MPI_COMM_WORLD, &rcvstat);
+ VRFY((mpi_result == MPI_SUCCESS), "received file image from process n-1");
+
+ /* 6) open the image received from process n-1, verify that
+ * it contains a vector of length equal to mpi_size, and
+ * that the vector contains (0, 1, 2, ... n-1).
+ */
+ fapl_id = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY((fapl_id >= 0), "creating fapl");
+
+ err = H5Pset_fapl_core(fapl_id, (size_t)(64 * 1024), FALSE);
+ VRFY((err >= 0), "setting core file driver in fapl.");
+
+ err = H5Pset_file_image(fapl_id, image_ptr, (size_t)image_len);
+ VRFY((err >= 0), "set file image in fapl.");
+
+ file_id = H5Fopen(file_name, H5F_ACC_RDWR, fapl_id);
+ VRFY((file_id >= 0), "opened received file image file");
+
+ dset_id = H5Dopen2(file_id, "v", H5P_DEFAULT);
+ VRFY((dset_id >= 0), "opened data set");
+
+ dset_type_id = H5Dget_type(dset_id);
+ VRFY((dset_type_id >= 0), "obtained data set type");
+
+ tri_result = H5Tequal(dset_type_id, H5T_NATIVE_INT);
+ VRFY((tri_result == TRUE), "verified data set type");
+
+ space_id = H5Dget_space(dset_id);
+ VRFY((space_id >= 0), "opened data space");
+
+ space_ndims = H5Sget_simple_extent_ndims(space_id);
+ VRFY((space_ndims == 1), "verified data space num dims(1)");
+
+ space_ndims = H5Sget_simple_extent_dims(space_id, dims, NULL);
+ VRFY((space_ndims == 1), "verified data space num dims(2)");
+ VRFY((dims[0] == (hsize_t)mpi_size), "verified data space dims");
+
+ vector_ptr = (int *)HDmalloc((size_t)(mpi_size) * sizeof(int));
+ VRFY((vector_ptr != NULL), "allocated in memory rep of vector");
+
+ err = H5Dread(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, (void *)vector_ptr);
+ VRFY((err >= 0), "read received vector.");
+
+ vector_ok = TRUE;
+ for (i = 0; i < mpi_size; i++)
+ if (vector_ptr[i] != i)
+ vector_ok = FALSE;
+ VRFY((vector_ok), "verified received vector.");
+
+ HDfree(vector_ptr);
+ vector_ptr = NULL;
+
+ /* 7) closes the core file and exit. */
+
+ err = H5Sclose(space_id);
+ VRFY((err >= 0), "closed data space.");
+
+ err = H5Dclose(dset_id);
+ VRFY((err >= 0), "closed data set.");
+
+ err = H5Fclose(file_id);
+ VRFY((err >= 0), "closed core file(1).");
+
+ err = H5Pclose(fapl_id);
+ VRFY((err >= 0), "closed fapl(1).");
+
+ HDfree(image_ptr);
+ image_ptr = NULL;
+ image_len = 0;
+ }
+ else {
+ /* 1) Await receipt of file image from process (i - 1). */
+
+ mpi_result = MPI_Recv((void *)(&image_len), (int)sizeof(ssize_t), MPI_BYTE, mpi_rank - 1, 0,
+ MPI_COMM_WORLD, &rcvstat);
+ VRFY((mpi_result == MPI_SUCCESS), "received image size from process mpi_rank-1");
+
+ image_ptr = (void *)HDmalloc((size_t)image_len);
+ VRFY(image_ptr != NULL, "allocated file image receive buffer.");
+
+ mpi_result =
+ MPI_Recv((void *)image_ptr, (int)image_len, MPI_BYTE, mpi_rank - 1, 0, MPI_COMM_WORLD, &rcvstat);
+ VRFY((mpi_result == MPI_SUCCESS), "received file image from process mpi_rank-1");
+
+ /* 2) Open the image with the core file driver, verify that it
+ * contains a vector v of length, and that v[j] = j for
+ * 0 <= j < i, and that v[j] == -1 for i <= j < n
+ */
+ fapl_id = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY((fapl_id >= 0), "creating fapl");
+
+ err = H5Pset_fapl_core(fapl_id, (size_t)(64 * 1024), FALSE);
+ VRFY((err >= 0), "setting core file driver in fapl.");
+
+ err = H5Pset_file_image(fapl_id, image_ptr, (size_t)image_len);
+ VRFY((err >= 0), "set file image in fapl.");
+
+ file_id = H5Fopen(file_name, H5F_ACC_RDWR, fapl_id);
+ H5Eprint2(H5P_DEFAULT, stderr);
+ VRFY((file_id >= 0), "opened received file image file");
+
+ dset_id = H5Dopen2(file_id, "v", H5P_DEFAULT);
+ VRFY((dset_id >= 0), "opened data set");
+
+ dset_type_id = H5Dget_type(dset_id);
+ VRFY((dset_type_id >= 0), "obtained data set type");
+
+ tri_result = H5Tequal(dset_type_id, H5T_NATIVE_INT);
+ VRFY((tri_result == TRUE), "verified data set type");
+
+ space_id = H5Dget_space(dset_id);
+ VRFY((space_id >= 0), "opened data space");
+
+ space_ndims = H5Sget_simple_extent_ndims(space_id);
+ VRFY((space_ndims == 1), "verified data space num dims(1)");
+
+ space_ndims = H5Sget_simple_extent_dims(space_id, dims, NULL);
+ VRFY((space_ndims == 1), "verified data space num dims(2)");
+ VRFY((dims[0] == (hsize_t)mpi_size), "verified data space dims");
+
+ vector_ptr = (int *)HDmalloc((size_t)(mpi_size) * sizeof(int));
+ VRFY((vector_ptr != NULL), "allocated in memory rep of vector");
+
+ err = H5Dread(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, (void *)vector_ptr);
+ VRFY((err >= 0), "read received vector.");
+
+ vector_ok = TRUE;
+ for (i = 0; i < mpi_size; i++) {
+ if (i < mpi_rank) {
+ if (vector_ptr[i] != i)
+ vector_ok = FALSE;
+ }
+ else {
+ if (vector_ptr[i] != -1)
+ vector_ok = FALSE;
+ }
+ }
+ VRFY((vector_ok), "verified received vector.");
+
+ /* 3) Set v[i] = i in the core file. */
+
+ vector_ptr[mpi_rank] = mpi_rank;
+
+ err = H5Dwrite(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, (void *)vector_ptr);
+ VRFY((err >= 0), "wrote modified data to vector.");
+
+ HDfree(vector_ptr);
+ vector_ptr = NULL;
+
+ /* 4) Flush the core file and send it to process (mpi_rank + 1) % n. */
+
+ err = H5Fflush(file_id, H5F_SCOPE_GLOBAL);
+ VRFY((err >= 0), "flushed core file.");
+
+ image_len = H5Fget_file_image(file_id, NULL, (size_t)0);
+ VRFY((image_len > 0), "got (possibly modified) image file len");
+
+ image_ptr = (void *)HDrealloc((void *)image_ptr, (size_t)image_len);
+ VRFY(image_ptr != NULL, "re-allocated file image buffer.");
+
+ bytes_read = H5Fget_file_image(file_id, image_ptr, (size_t)image_len);
+ VRFY(bytes_read == image_len, "wrote file into image buffer");
+
+ mpi_result = MPI_Ssend((void *)(&image_len), (int)sizeof(ssize_t), MPI_BYTE,
+ (mpi_rank + 1) % mpi_size, 0, MPI_COMM_WORLD);
+ VRFY((mpi_result == MPI_SUCCESS), "sent image size to process (mpi_rank + 1) % mpi_size");
+
+ mpi_result = MPI_Ssend((void *)image_ptr, (int)image_len, MPI_BYTE, (mpi_rank + 1) % mpi_size, 0,
+ MPI_COMM_WORLD);
+ VRFY((mpi_result == MPI_SUCCESS), "sent image to process (mpi_rank + 1) % mpi_size");
+
+ HDfree(image_ptr);
+ image_ptr = NULL;
+ image_len = 0;
+
+ /* 5) close the core file and exit. */
+
+ err = H5Sclose(space_id);
+ VRFY((err >= 0), "closed data space.");
+
+ err = H5Dclose(dset_id);
+ VRFY((err >= 0), "closed data set.");
+
+ err = H5Fclose(file_id);
+ VRFY((err >= 0), "closed core file(1).");
+
+ err = H5Pclose(fapl_id);
+ VRFY((err >= 0), "closed fapl(1).");
+ }
+
+ return;
+
+} /* file_image_daisy_chain_test() */
diff --git a/testpar/API/t_filter_read.c b/testpar/API/t_filter_read.c
new file mode 100644
index 0000000..f32c21b
--- /dev/null
+++ b/testpar/API/t_filter_read.c
@@ -0,0 +1,564 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * This verifies the correctness of parallel reading of a dataset that has been
+ * written serially using filters.
+ *
+ * Created by: Christian Chilan
+ * Date: 2007/05/15
+ */
+
+#include "hdf5.h"
+#include "testphdf5.h"
+
+#ifdef H5_HAVE_SZLIB_H
+#include "szlib.h"
+#endif
+
+static int mpi_size, mpi_rank;
+
+/* Chunk sizes */
+#define CHUNK_DIM1 7
+#define CHUNK_DIM2 27
+
+/* Sizes of the vertical hyperslabs. Total dataset size is
+ {HS_DIM1, HS_DIM2 * mpi_size } */
+#define HS_DIM1 200
+#define HS_DIM2 100
+
+const char *
+h5_rmprefix(const char *filename)
+{
+ const char *ret_ptr;
+
+ if ((ret_ptr = HDstrstr(filename, ":")) == NULL)
+ ret_ptr = filename;
+ else
+ ret_ptr++;
+
+ return (ret_ptr);
+}
+
+#ifdef H5_HAVE_FILTER_SZIP
+
+/*-------------------------------------------------------------------------
+ * Function: h5_szip_can_encode
+ *
+ * Purpose: Retrieve the filter config flags for szip, tell if
+ * encoder is available.
+ *
+ * Return: 1: decode+encode is enabled
+ * 0: only decode is enabled
+ * -1: other
+ *
+ * Programmer:
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+int
+h5_szip_can_encode(void)
+{
+ unsigned int filter_config_flags;
+
+ H5Zget_filter_info(H5Z_FILTER_SZIP, &filter_config_flags);
+ if ((filter_config_flags & (H5Z_FILTER_CONFIG_ENCODE_ENABLED | H5Z_FILTER_CONFIG_DECODE_ENABLED)) == 0) {
+ /* filter present but neither encode nor decode is supported (???) */
+ return -1;
+ }
+ else if ((filter_config_flags & (H5Z_FILTER_CONFIG_ENCODE_ENABLED | H5Z_FILTER_CONFIG_DECODE_ENABLED)) ==
+ H5Z_FILTER_CONFIG_DECODE_ENABLED) {
+ /* decoder only: read but not write */
+ return 0;
+ }
+ else if ((filter_config_flags & (H5Z_FILTER_CONFIG_ENCODE_ENABLED | H5Z_FILTER_CONFIG_DECODE_ENABLED)) ==
+ H5Z_FILTER_CONFIG_ENCODE_ENABLED) {
+ /* encoder only: write but not read (???) */
+ return -1;
+ }
+ else if ((filter_config_flags & (H5Z_FILTER_CONFIG_ENCODE_ENABLED | H5Z_FILTER_CONFIG_DECODE_ENABLED)) ==
+ (H5Z_FILTER_CONFIG_ENCODE_ENABLED | H5Z_FILTER_CONFIG_DECODE_ENABLED)) {
+ return 1;
+ }
+ return (-1);
+}
+#endif /* H5_HAVE_FILTER_SZIP */
+
+/*-------------------------------------------------------------------------
+ * Function: filter_read_internal
+ *
+ * Purpose: Tests parallel reading of a 2D dataset written serially using
+ * filters. During the parallel reading phase, the dataset is
+ * divided evenly among the processors in vertical hyperslabs.
+ *
+ * Programmer: Christian Chilan
+ * Tuesday, May 15, 2007
+ *
+ *-------------------------------------------------------------------------
+ */
+static void
+filter_read_internal(const char *filename, hid_t dcpl, hsize_t *dset_size)
+{
+ hid_t file, dataset; /* HDF5 IDs */
+ hid_t access_plist; /* Access property list ID */
+ hid_t sid, memspace; /* Dataspace IDs */
+ hsize_t size[2]; /* Dataspace dimensions */
+ hsize_t hs_offset[2]; /* Hyperslab offset */
+ hsize_t hs_size[2]; /* Hyperslab size */
+ size_t i, j; /* Local index variables */
+ char name[32] = "dataset";
+ herr_t hrc; /* Error status */
+ int *points = NULL; /* Writing buffer for entire dataset */
+ int *check = NULL; /* Reading buffer for selected hyperslab */
+
+ (void)dset_size; /* silence compiler */
+
+ /* set up MPI parameters */
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* set sizes for dataset and hyperslabs */
+ hs_size[0] = size[0] = HS_DIM1;
+ hs_size[1] = HS_DIM2;
+
+ size[1] = hs_size[1] * (hsize_t)mpi_size;
+
+ hs_offset[0] = 0;
+ hs_offset[1] = hs_size[1] * (hsize_t)mpi_rank;
+
+ /* Create the data space */
+ sid = H5Screate_simple(2, size, NULL);
+ VRFY(sid >= 0, "H5Screate_simple");
+
+ /* Create buffers */
+ points = (int *)HDmalloc(size[0] * size[1] * sizeof(int));
+ VRFY(points != NULL, "HDmalloc");
+
+ check = (int *)HDmalloc(hs_size[0] * hs_size[1] * sizeof(int));
+ VRFY(check != NULL, "HDmalloc");
+
+ /* Initialize writing buffer with random data */
+ for (i = 0; i < size[0]; i++)
+ for (j = 0; j < size[1]; j++)
+ points[i * size[1] + j] = (int)(i + j + 7);
+
+ VRFY(H5Pall_filters_avail(dcpl), "Incorrect filter availability");
+
+ /* Serial write phase */
+ if (MAINPROCESS) {
+
+ file = H5Fcreate(h5_rmprefix(filename), H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY(file >= 0, "H5Fcreate");
+
+ /* Create the dataset */
+ dataset = H5Dcreate2(file, name, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ VRFY(dataset >= 0, "H5Dcreate2");
+
+ hrc = H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, points);
+ VRFY(hrc >= 0, "H5Dwrite");
+#if 0
+ *dset_size = H5Dget_storage_size(dataset);
+ VRFY(*dset_size > 0, "H5Dget_storage_size");
+#endif
+
+ hrc = H5Dclose(dataset);
+ VRFY(hrc >= 0, "H5Dclose");
+
+ hrc = H5Fclose(file);
+ VRFY(hrc >= 0, "H5Fclose");
+ }
+
+ MPI_Barrier(MPI_COMM_WORLD);
+
+ /* Parallel read phase */
+ /* Set up MPIO file access property lists */
+ access_plist = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY((access_plist >= 0), "H5Pcreate");
+
+ hrc = H5Pset_fapl_mpio(access_plist, MPI_COMM_WORLD, MPI_INFO_NULL);
+ VRFY((hrc >= 0), "H5Pset_fapl_mpio");
+
+ /* Open the file */
+ file = H5Fopen(filename, H5F_ACC_RDWR, access_plist);
+ VRFY((file >= 0), "H5Fopen");
+
+ dataset = H5Dopen2(file, name, H5P_DEFAULT);
+ VRFY((dataset >= 0), "H5Dopen2");
+
+ hrc = H5Sselect_hyperslab(sid, H5S_SELECT_SET, hs_offset, NULL, hs_size, NULL);
+ VRFY(hrc >= 0, "H5Sselect_hyperslab");
+
+ memspace = H5Screate_simple(2, hs_size, NULL);
+ VRFY(memspace >= 0, "H5Screate_simple");
+
+ hrc = H5Dread(dataset, H5T_NATIVE_INT, memspace, sid, H5P_DEFAULT, check);
+ VRFY(hrc >= 0, "H5Dread");
+
+ /* Check that the values read are the same as the values written */
+ for (i = 0; i < hs_size[0]; i++) {
+ for (j = 0; j < hs_size[1]; j++) {
+ if (points[i * size[1] + (size_t)hs_offset[1] + j] != check[i * hs_size[1] + j]) {
+ HDfprintf(stderr, " Read different values than written.\n");
+ HDfprintf(stderr, " At index %lu,%lu\n", (unsigned long)(i),
+ (unsigned long)(hs_offset[1] + j));
+ HDfprintf(stderr, " At original: %d\n",
+ (int)points[i * size[1] + (size_t)hs_offset[1] + j]);
+ HDfprintf(stderr, " At returned: %d\n", (int)check[i * hs_size[1] + j]);
+ VRFY(FALSE, "");
+ }
+ }
+ }
+#if 0
+ /* Get the storage size of the dataset */
+ *dset_size = H5Dget_storage_size(dataset);
+ VRFY(*dset_size != 0, "H5Dget_storage_size");
+#endif
+
+ /* Clean up objects used for this test */
+ hrc = H5Dclose(dataset);
+ VRFY(hrc >= 0, "H5Dclose");
+
+ hrc = H5Sclose(sid);
+ VRFY(hrc >= 0, "H5Sclose");
+
+ hrc = H5Sclose(memspace);
+ VRFY(hrc >= 0, "H5Sclose");
+
+ hrc = H5Pclose(access_plist);
+ VRFY(hrc >= 0, "H5Pclose");
+
+ hrc = H5Fclose(file);
+ VRFY(hrc >= 0, "H5Fclose");
+
+ HDfree(points);
+ HDfree(check);
+
+ MPI_Barrier(MPI_COMM_WORLD);
+}
+
+/*-------------------------------------------------------------------------
+ * Function: test_filter_read
+ *
+ * Purpose: Tests parallel reading of datasets written serially using
+ * several (combinations of) filters.
+ *
+ * Programmer: Christian Chilan
+ * Tuesday, May 15, 2007
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+
+void
+test_filter_read(void)
+{
+ hid_t dc; /* HDF5 IDs */
+ const hsize_t chunk_size[2] = {CHUNK_DIM1, CHUNK_DIM2}; /* Chunk dimensions */
+#if 0
+ hsize_t null_size; /* Size of dataset without filters */
+#endif
+ unsigned chunk_opts; /* Chunk options */
+ unsigned disable_partial_chunk_filters; /* Whether filters are disabled on partial chunks */
+ herr_t hrc;
+ const char *filename;
+#ifdef H5_HAVE_FILTER_FLETCHER32
+ hsize_t fletcher32_size; /* Size of dataset with Fletcher32 checksum */
+#endif
+
+#ifdef H5_HAVE_FILTER_DEFLATE
+ hsize_t deflate_size; /* Size of dataset with deflate filter */
+#endif /* H5_HAVE_FILTER_DEFLATE */
+
+#ifdef H5_HAVE_FILTER_SZIP
+ hsize_t szip_size; /* Size of dataset with szip filter */
+ unsigned szip_options_mask = H5_SZIP_NN_OPTION_MASK;
+ unsigned szip_pixels_per_block = 4;
+#endif /* H5_HAVE_FILTER_SZIP */
+
+#if 0
+ hsize_t shuffle_size; /* Size of dataset with shuffle filter */
+#endif
+
+#if (defined H5_HAVE_FILTER_DEFLATE || defined H5_HAVE_FILTER_SZIP)
+ hsize_t combo_size; /* Size of dataset with multiple filters */
+#endif /* H5_HAVE_FILTER_DEFLATE || H5_HAVE_FILTER_SZIP */
+
+ filename = PARATESTFILE /* GetTestParameters() */;
+
+ if (VERBOSE_MED)
+ HDprintf("Parallel reading of dataset written with filters %s\n", filename);
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_FILTERS)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(
+ " API functions for basic file, dataset or filter aren't supported with this connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ /*----------------------------------------------------------
+ * STEP 0: Test without filters.
+ *----------------------------------------------------------
+ */
+ dc = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY(dc >= 0, "H5Pcreate");
+
+ hrc = H5Pset_chunk(dc, 2, chunk_size);
+ VRFY(hrc >= 0, "H5Pset_chunk");
+
+ filter_read_internal(filename, dc, /* &null_size */ NULL);
+
+ /* Clean up objects used for this test */
+ hrc = H5Pclose(dc);
+ VRFY(hrc >= 0, "H5Pclose");
+
+ /* Run steps 1-3 both with and without filters disabled on partial chunks */
+ for (disable_partial_chunk_filters = 0; disable_partial_chunk_filters <= 1;
+ disable_partial_chunk_filters++) {
+ /* Set chunk options appropriately */
+ dc = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY(dc >= 0, "H5Pcreate");
+
+ hrc = H5Pset_chunk(dc, 2, chunk_size);
+ VRFY(hrc >= 0, "H5Pset_filter");
+
+ hrc = H5Pget_chunk_opts(dc, &chunk_opts);
+ VRFY(hrc >= 0, "H5Pget_chunk_opts");
+
+ if (disable_partial_chunk_filters)
+ chunk_opts |= H5D_CHUNK_DONT_FILTER_PARTIAL_CHUNKS;
+
+ hrc = H5Pclose(dc);
+ VRFY(hrc >= 0, "H5Pclose");
+
+ /*----------------------------------------------------------
+ * STEP 1: Test Fletcher32 Checksum by itself.
+ *----------------------------------------------------------
+ */
+#ifdef H5_HAVE_FILTER_FLETCHER32
+
+ dc = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY(dc >= 0, "H5Pset_filter");
+
+ hrc = H5Pset_chunk(dc, 2, chunk_size);
+ VRFY(hrc >= 0, "H5Pset_filter");
+
+ hrc = H5Pset_chunk_opts(dc, chunk_opts);
+ VRFY(hrc >= 0, "H5Pset_chunk_opts");
+
+ hrc = H5Pset_filter(dc, H5Z_FILTER_FLETCHER32, 0, 0, NULL);
+ VRFY(hrc >= 0, "H5Pset_filter");
+
+ filter_read_internal(filename, dc, &fletcher32_size);
+ VRFY(fletcher32_size > null_size, "Size after checksumming is incorrect.");
+
+ /* Clean up objects used for this test */
+ hrc = H5Pclose(dc);
+ VRFY(hrc >= 0, "H5Pclose");
+
+#endif /* H5_HAVE_FILTER_FLETCHER32 */
+
+ /*----------------------------------------------------------
+ * STEP 2: Test deflation by itself.
+ *----------------------------------------------------------
+ */
+#ifdef H5_HAVE_FILTER_DEFLATE
+
+ dc = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY(dc >= 0, "H5Pcreate");
+
+ hrc = H5Pset_chunk(dc, 2, chunk_size);
+ VRFY(hrc >= 0, "H5Pset_chunk");
+
+ hrc = H5Pset_chunk_opts(dc, chunk_opts);
+ VRFY(hrc >= 0, "H5Pset_chunk_opts");
+
+ hrc = H5Pset_deflate(dc, 6);
+ VRFY(hrc >= 0, "H5Pset_deflate");
+
+ filter_read_internal(filename, dc, &deflate_size);
+
+ /* Clean up objects used for this test */
+ hrc = H5Pclose(dc);
+ VRFY(hrc >= 0, "H5Pclose");
+
+#endif /* H5_HAVE_FILTER_DEFLATE */
+
+ /*----------------------------------------------------------
+ * STEP 3: Test szip compression by itself.
+ *----------------------------------------------------------
+ */
+#ifdef H5_HAVE_FILTER_SZIP
+ if (h5_szip_can_encode() == 1) {
+ dc = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY(dc >= 0, "H5Pcreate");
+
+ hrc = H5Pset_chunk(dc, 2, chunk_size);
+ VRFY(hrc >= 0, "H5Pset_chunk");
+
+ hrc = H5Pset_chunk_opts(dc, chunk_opts);
+ VRFY(hrc >= 0, "H5Pset_chunk_opts");
+
+ hrc = H5Pset_szip(dc, szip_options_mask, szip_pixels_per_block);
+ VRFY(hrc >= 0, "H5Pset_szip");
+
+ filter_read_internal(filename, dc, &szip_size);
+
+ /* Clean up objects used for this test */
+ hrc = H5Pclose(dc);
+ VRFY(hrc >= 0, "H5Pclose");
+ }
+#endif /* H5_HAVE_FILTER_SZIP */
+ } /* end for */
+
+ /*----------------------------------------------------------
+ * STEP 4: Test shuffling by itself.
+ *----------------------------------------------------------
+ */
+
+ dc = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY(dc >= 0, "H5Pcreate");
+
+ hrc = H5Pset_chunk(dc, 2, chunk_size);
+ VRFY(hrc >= 0, "H5Pset_chunk");
+
+ hrc = H5Pset_shuffle(dc);
+ VRFY(hrc >= 0, "H5Pset_shuffle");
+
+ filter_read_internal(filename, dc, /* &shuffle_size */ NULL);
+#if 0
+ VRFY(shuffle_size == null_size, "Shuffled size not the same as uncompressed size.");
+#endif
+
+ /* Clean up objects used for this test */
+ hrc = H5Pclose(dc);
+ VRFY(hrc >= 0, "H5Pclose");
+
+ /*----------------------------------------------------------
+ * STEP 5: Test shuffle + deflate + checksum in any order.
+ *----------------------------------------------------------
+ */
+#ifdef H5_HAVE_FILTER_DEFLATE
+ /* Testing shuffle+deflate+checksum filters (checksum first) */
+ dc = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY(dc >= 0, "H5Pcreate");
+
+ hrc = H5Pset_chunk(dc, 2, chunk_size);
+ VRFY(hrc >= 0, "H5Pset_chunk");
+
+ hrc = H5Pset_fletcher32(dc);
+ VRFY(hrc >= 0, "H5Pset_fletcher32");
+
+ hrc = H5Pset_shuffle(dc);
+ VRFY(hrc >= 0, "H5Pset_shuffle");
+
+ hrc = H5Pset_deflate(dc, 6);
+ VRFY(hrc >= 0, "H5Pset_deflate");
+
+ filter_read_internal(filename, dc, &combo_size);
+
+ /* Clean up objects used for this test */
+ hrc = H5Pclose(dc);
+ VRFY(hrc >= 0, "H5Pclose");
+
+ /* Testing shuffle+deflate+checksum filters (checksum last) */
+ dc = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY(dc >= 0, "H5Pcreate");
+
+ hrc = H5Pset_chunk(dc, 2, chunk_size);
+ VRFY(hrc >= 0, "H5Pset_chunk");
+
+ hrc = H5Pset_shuffle(dc);
+ VRFY(hrc >= 0, "H5Pset_shuffle");
+
+ hrc = H5Pset_deflate(dc, 6);
+ VRFY(hrc >= 0, "H5Pset_deflate");
+
+ hrc = H5Pset_fletcher32(dc);
+ VRFY(hrc >= 0, "H5Pset_fletcher32");
+
+ filter_read_internal(filename, dc, &combo_size);
+
+ /* Clean up objects used for this test */
+ hrc = H5Pclose(dc);
+ VRFY(hrc >= 0, "H5Pclose");
+
+#endif /* H5_HAVE_FILTER_DEFLATE */
+
+ /*----------------------------------------------------------
+ * STEP 6: Test shuffle + szip + checksum in any order.
+ *----------------------------------------------------------
+ */
+#ifdef H5_HAVE_FILTER_SZIP
+
+ /* Testing shuffle+szip(with encoder)+checksum filters(checksum first) */
+ dc = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY(dc >= 0, "H5Pcreate");
+
+ hrc = H5Pset_chunk(dc, 2, chunk_size);
+ VRFY(hrc >= 0, "H5Pset_chunk");
+
+ hrc = H5Pset_fletcher32(dc);
+ VRFY(hrc >= 0, "H5Pset_fletcher32");
+
+ hrc = H5Pset_shuffle(dc);
+ VRFY(hrc >= 0, "H5Pset_shuffle");
+
+ /* Make sure encoding is enabled */
+ if (h5_szip_can_encode() == 1) {
+ hrc = H5Pset_szip(dc, szip_options_mask, szip_pixels_per_block);
+ VRFY(hrc >= 0, "H5Pset_szip");
+
+ filter_read_internal(filename, dc, &combo_size);
+ }
+
+ /* Clean up objects used for this test */
+ hrc = H5Pclose(dc);
+ VRFY(hrc >= 0, "H5Pclose");
+
+ /* Testing shuffle+szip(with encoder)+checksum filters(checksum last) */
+ /* Make sure encoding is enabled */
+ if (h5_szip_can_encode() == 1) {
+ dc = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY(dc >= 0, "H5Pcreate");
+
+ hrc = H5Pset_chunk(dc, 2, chunk_size);
+ VRFY(hrc >= 0, "H5Pset_chunk");
+
+ hrc = H5Pset_shuffle(dc);
+ VRFY(hrc >= 0, "H5Pset_shuffle");
+
+ hrc = H5Pset_szip(dc, szip_options_mask, szip_pixels_per_block);
+ VRFY(hrc >= 0, "H5Pset_szip");
+
+ hrc = H5Pset_fletcher32(dc);
+ VRFY(hrc >= 0, "H5Pset_fletcher32");
+
+ filter_read_internal(filename, dc, &combo_size);
+
+ /* Clean up objects used for this test */
+ hrc = H5Pclose(dc);
+ VRFY(hrc >= 0, "H5Pclose");
+ }
+
+#endif /* H5_HAVE_FILTER_SZIP */
+}
diff --git a/testpar/API/t_mdset.c b/testpar/API/t_mdset.c
new file mode 100644
index 0000000..e11818f
--- /dev/null
+++ b/testpar/API/t_mdset.c
@@ -0,0 +1,2814 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include "hdf5.h"
+#include "testphdf5.h"
+
+#if 0
+#include "H5Dprivate.h"
+#include "H5private.h"
+#endif
+
+#define DIM 2
+#define SIZE 32
+#define NDATASET 4
+#define GROUP_DEPTH 32
+enum obj_type { is_group, is_dset };
+
+static int get_size(void);
+static void write_dataset(hid_t, hid_t, hid_t);
+static int read_dataset(hid_t, hid_t, hid_t);
+static void create_group_recursive(hid_t, hid_t, hid_t, int);
+static void recursive_read_group(hid_t, hid_t, hid_t, int);
+static void group_dataset_read(hid_t fid, int mpi_rank, int m);
+static void write_attribute(hid_t, int, int);
+static int read_attribute(hid_t, int, int);
+static int check_value(DATATYPE *, DATATYPE *, int);
+static void get_slab(hsize_t[], hsize_t[], hsize_t[], hsize_t[], int);
+
+/*
+ * The size value computed by this function is used extensively in
+ * configuring tests for the current number of processes.
+ *
+ * This function was created as part of an effort to allow the
+ * test functions in this file to run on an arbitrary number of
+ * processors.
+ * JRM - 8/11/04
+ */
+
+static int
+get_size(void)
+{
+ int mpi_rank;
+ int mpi_size;
+ int size = SIZE;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank); /* needed for VRFY */
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+
+ if (mpi_size > size) {
+ if ((mpi_size % 2) == 0) {
+ size = mpi_size;
+ }
+ else {
+ size = mpi_size + 1;
+ }
+ }
+
+ VRFY((mpi_size <= size), "mpi_size <= size");
+ VRFY(((size % 2) == 0), "size isn't even");
+
+ return (size);
+
+} /* get_size() */
+
+/*
+ * Example of using PHDF5 to create a zero sized dataset.
+ *
+ */
+void
+zero_dim_dset(void)
+{
+ int mpi_size, mpi_rank;
+ const char *filename;
+ hid_t fid, plist, dcpl, dsid, sid;
+ hsize_t dim, chunk_dim;
+ herr_t ret;
+ int data[1];
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file or dataset aren't supported with this connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ filename = PARATESTFILE /* GetTestParameters() */;
+
+ plist = create_faccess_plist(MPI_COMM_WORLD, MPI_INFO_NULL, facc_type);
+ VRFY((plist >= 0), "create_faccess_plist succeeded");
+
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, plist);
+ VRFY((fid >= 0), "H5Fcreate succeeded");
+ ret = H5Pclose(plist);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY((dcpl >= 0), "failed H5Pcreate");
+
+ /* Set 1 chunk size */
+ chunk_dim = 1;
+ ret = H5Pset_chunk(dcpl, 1, &chunk_dim);
+ VRFY((ret >= 0), "failed H5Pset_chunk");
+
+ /* Create 1D dataspace with 0 dim size */
+ dim = 0;
+ sid = H5Screate_simple(1, &dim, NULL);
+ VRFY((sid >= 0), "failed H5Screate_simple");
+
+ /* Create chunked dataset */
+ dsid = H5Dcreate2(fid, "dset", H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ VRFY((dsid >= 0), "failed H5Dcreate2");
+
+ /* write 0 elements from dataset */
+ ret = H5Dwrite(dsid, H5T_NATIVE_INT, sid, sid, H5P_DEFAULT, data);
+ VRFY((ret >= 0), "failed H5Dwrite");
+
+ /* Read 0 elements from dataset */
+ ret = H5Dread(dsid, H5T_NATIVE_INT, sid, sid, H5P_DEFAULT, data);
+ VRFY((ret >= 0), "failed H5Dread");
+
+ H5Pclose(dcpl);
+ H5Dclose(dsid);
+ H5Sclose(sid);
+ H5Fclose(fid);
+}
+
+/*
+ * Example of using PHDF5 to create ndatasets datasets. Each process write
+ * a slab of array to the file.
+ */
+void
+multiple_dset_write(void)
+{
+ int i, j, n, mpi_size, mpi_rank, size;
+ hid_t iof, plist, dataset, memspace, filespace;
+ hid_t dcpl; /* Dataset creation property list */
+ hsize_t chunk_origin[DIM];
+ hsize_t chunk_dims[DIM], file_dims[DIM];
+ hsize_t count[DIM] = {1, 1};
+ double *outme = NULL;
+ double fill = 1.0; /* Fill value */
+ char dname[100];
+ herr_t ret;
+#if 0
+ const H5Ptest_param_t *pt;
+#endif
+ char *filename;
+ int ndatasets;
+
+#if 0
+ pt = GetTestParameters();
+#endif
+ /* filename = pt->name; */ filename = PARATESTFILE;
+ /* ndatasets = pt->count; */ ndatasets = NDATASETS;
+
+ size = get_size();
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file or dataset aren't supported with this connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ outme = HDmalloc((size_t)size * (size_t)size * sizeof(double));
+ VRFY((outme != NULL), "HDmalloc succeeded for outme");
+
+ plist = create_faccess_plist(MPI_COMM_WORLD, MPI_INFO_NULL, facc_type);
+ VRFY((plist >= 0), "create_faccess_plist succeeded");
+ iof = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, plist);
+ VRFY((iof >= 0), "H5Fcreate succeeded");
+ ret = H5Pclose(plist);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ /* decide the hyperslab according to process number. */
+ get_slab(chunk_origin, chunk_dims, count, file_dims, size);
+
+ memspace = H5Screate_simple(DIM, chunk_dims, NULL);
+ filespace = H5Screate_simple(DIM, file_dims, NULL);
+ ret = H5Sselect_hyperslab(filespace, H5S_SELECT_SET, chunk_origin, chunk_dims, count, chunk_dims);
+ VRFY((ret >= 0), "mdata hyperslab selection");
+
+ /* Create a dataset creation property list */
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY((dcpl >= 0), "dataset creation property list succeeded");
+
+ ret = H5Pset_fill_value(dcpl, H5T_NATIVE_DOUBLE, &fill);
+ VRFY((ret >= 0), "set fill-value succeeded");
+
+ for (n = 0; n < ndatasets; n++) {
+ HDsnprintf(dname, sizeof(dname), "dataset %d", n);
+ dataset = H5Dcreate2(iof, dname, H5T_NATIVE_DOUBLE, filespace, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ VRFY((dataset > 0), dname);
+
+ /* calculate data to write */
+ for (i = 0; i < size; i++)
+ for (j = 0; j < size; j++)
+ outme[(i * size) + j] = n * 1000 + mpi_rank;
+
+ H5Dwrite(dataset, H5T_NATIVE_DOUBLE, memspace, filespace, H5P_DEFAULT, outme);
+
+ H5Dclose(dataset);
+#ifdef BARRIER_CHECKS
+ if (!((n + 1) % 10)) {
+ HDprintf("created %d datasets\n", n + 1);
+ MPI_Barrier(MPI_COMM_WORLD);
+ }
+#endif /* BARRIER_CHECKS */
+ }
+
+ H5Sclose(filespace);
+ H5Sclose(memspace);
+ H5Pclose(dcpl);
+ H5Fclose(iof);
+
+ HDfree(outme);
+}
+
+/* Example of using PHDF5 to create, write, and read compact dataset.
+ */
+void
+compact_dataset(void)
+{
+ int i, j, mpi_size, mpi_rank, size, err_num = 0;
+ hid_t iof, plist, dcpl, dxpl, dataset, filespace;
+ hsize_t file_dims[DIM];
+ double *outme;
+ double *inme;
+ char dname[] = "dataset";
+ herr_t ret;
+ const char *filename;
+#ifdef H5_HAVE_INSTRUMENTED_LIBRARY
+ hbool_t prop_value;
+#endif
+
+ size = get_size();
+
+ for (i = 0; i < DIM; i++)
+ file_dims[i] = (hsize_t)size;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file or dataset aren't supported with this connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ outme = HDmalloc((size_t)((size_t)size * (size_t)size * sizeof(double)));
+ VRFY((outme != NULL), "HDmalloc succeeded for outme");
+
+ inme = HDmalloc((size_t)size * (size_t)size * sizeof(double));
+ VRFY((outme != NULL), "HDmalloc succeeded for inme");
+
+ filename = PARATESTFILE /* GetTestParameters() */;
+ VRFY((mpi_size <= size), "mpi_size <= size");
+
+ plist = create_faccess_plist(MPI_COMM_WORLD, MPI_INFO_NULL, facc_type);
+ iof = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, plist);
+
+ /* Define data space */
+ filespace = H5Screate_simple(DIM, file_dims, NULL);
+
+ /* Create a compact dataset */
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY((dcpl >= 0), "dataset creation property list succeeded");
+ ret = H5Pset_layout(dcpl, H5D_COMPACT);
+ VRFY((dcpl >= 0), "set property list for compact dataset");
+ ret = H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_EARLY);
+ VRFY((ret >= 0), "set space allocation time for compact dataset");
+
+ dataset = H5Dcreate2(iof, dname, H5T_NATIVE_DOUBLE, filespace, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ VRFY((dataset >= 0), "H5Dcreate2 succeeded");
+
+ /* set up the collective transfer properties list */
+ dxpl = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((dxpl >= 0), "");
+ ret = H5Pset_dxpl_mpio(dxpl, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pcreate xfer succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(dxpl, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ /* Recalculate data to write. Each process writes the same data. */
+ for (i = 0; i < size; i++)
+ for (j = 0; j < size; j++)
+ outme[(i * size) + j] = (i + j) * 1000;
+
+ ret = H5Dwrite(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, dxpl, outme);
+ VRFY((ret >= 0), "H5Dwrite succeeded");
+
+ H5Pclose(dcpl);
+ H5Pclose(plist);
+ H5Dclose(dataset);
+ H5Sclose(filespace);
+ H5Fclose(iof);
+
+ /* Open the file and dataset, read and compare the data. */
+ plist = create_faccess_plist(MPI_COMM_WORLD, MPI_INFO_NULL, facc_type);
+ iof = H5Fopen(filename, H5F_ACC_RDONLY, plist);
+ VRFY((iof >= 0), "H5Fopen succeeded");
+
+ /* set up the collective transfer properties list */
+ dxpl = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((dxpl >= 0), "");
+ ret = H5Pset_dxpl_mpio(dxpl, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pcreate xfer succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(dxpl, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ dataset = H5Dopen2(iof, dname, H5P_DEFAULT);
+ VRFY((dataset >= 0), "H5Dopen2 succeeded");
+
+#ifdef H5_HAVE_INSTRUMENTED_LIBRARY
+ prop_value = H5D_XFER_COLL_RANK0_BCAST_DEF;
+ ret = H5Pinsert2(dxpl, H5D_XFER_COLL_RANK0_BCAST_NAME, H5D_XFER_COLL_RANK0_BCAST_SIZE, &prop_value, NULL,
+ NULL, NULL, NULL, NULL, NULL);
+ VRFY((ret >= 0), "H5Pinsert2() succeeded");
+#endif /* H5_HAVE_INSTRUMENTED_LIBRARY */
+
+ ret = H5Dread(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, dxpl, inme);
+ VRFY((ret >= 0), "H5Dread succeeded");
+
+#ifdef H5_HAVE_INSTRUMENTED_LIBRARY
+ prop_value = FALSE;
+ ret = H5Pget(dxpl, H5D_XFER_COLL_RANK0_BCAST_NAME, &prop_value);
+ VRFY((ret >= 0), "H5Pget succeeded");
+ VRFY((prop_value == FALSE && dxfer_coll_type == DXFER_COLLECTIVE_IO),
+ "rank 0 Bcast optimization was performed for a compact dataset");
+#endif /* H5_HAVE_INSTRUMENTED_LIBRARY */
+
+ /* Verify data value */
+ for (i = 0; i < size; i++)
+ for (j = 0; j < size; j++)
+ if (!H5_DBL_ABS_EQUAL(inme[(i * size) + j], outme[(i * size) + j]))
+ if (err_num++ < MAX_ERR_REPORT || VERBOSE_MED)
+ HDprintf("Dataset Verify failed at [%d][%d]: expect %f, got %f\n", i, j,
+ outme[(i * size) + j], inme[(i * size) + j]);
+
+ H5Pclose(plist);
+ H5Pclose(dxpl);
+ H5Dclose(dataset);
+ H5Fclose(iof);
+ HDfree(inme);
+ HDfree(outme);
+}
+
+/*
+ * Example of using PHDF5 to create, write, and read dataset and attribute
+ * of Null dataspace.
+ */
+void
+null_dataset(void)
+{
+ int mpi_size, mpi_rank;
+ hid_t iof, plist, dxpl, dataset, attr, sid;
+ unsigned uval = 2; /* Buffer for writing to dataset */
+ int val = 1; /* Buffer for writing to attribute */
+ hssize_t nelem;
+ char dname[] = "dataset";
+ char attr_name[] = "attribute";
+ herr_t ret;
+ const char *filename;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, dataset, or attribute aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ filename = PARATESTFILE /* GetTestParameters() */;
+
+ plist = create_faccess_plist(MPI_COMM_WORLD, MPI_INFO_NULL, facc_type);
+ iof = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, plist);
+
+ /* Define data space */
+ sid = H5Screate(H5S_NULL);
+
+ /* Check that the null dataspace actually has 0 elements */
+ nelem = H5Sget_simple_extent_npoints(sid);
+ VRFY((nelem == 0), "H5Sget_simple_extent_npoints");
+
+ /* Create a compact dataset */
+ dataset = H5Dcreate2(iof, dname, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((dataset >= 0), "H5Dcreate2 succeeded");
+
+ /* set up the collective transfer properties list */
+ dxpl = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((dxpl >= 0), "");
+ ret = H5Pset_dxpl_mpio(dxpl, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pcreate xfer succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(dxpl, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ /* Write "nothing" to the dataset(with type conversion) */
+ ret = H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, &uval);
+ VRFY((ret >= 0), "H5Dwrite succeeded");
+
+ /* Create an attribute for the group */
+ attr = H5Acreate2(dataset, attr_name, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((attr >= 0), "H5Acreate2");
+
+ /* Write "nothing" to the attribute(with type conversion) */
+ ret = H5Awrite(attr, H5T_NATIVE_INT, &val);
+ VRFY((ret >= 0), "H5Awrite");
+
+ H5Aclose(attr);
+ H5Dclose(dataset);
+ H5Pclose(plist);
+ H5Sclose(sid);
+ H5Fclose(iof);
+
+ /* Open the file and dataset, read and compare the data. */
+ plist = create_faccess_plist(MPI_COMM_WORLD, MPI_INFO_NULL, facc_type);
+ iof = H5Fopen(filename, H5F_ACC_RDONLY, plist);
+ VRFY((iof >= 0), "H5Fopen succeeded");
+
+ /* set up the collective transfer properties list */
+ dxpl = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((dxpl >= 0), "");
+ ret = H5Pset_dxpl_mpio(dxpl, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pcreate xfer succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(dxpl, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ dataset = H5Dopen2(iof, dname, H5P_DEFAULT);
+ VRFY((dataset >= 0), "H5Dopen2 succeeded");
+
+ /* Try reading from the dataset(make certain our buffer is unmodified) */
+ ret = H5Dread(dataset, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, dxpl, &uval);
+ VRFY((ret >= 0), "H5Dread");
+ VRFY((uval == 2), "H5Dread");
+
+ /* Open the attribute for the dataset */
+ attr = H5Aopen(dataset, attr_name, H5P_DEFAULT);
+ VRFY((attr >= 0), "H5Aopen");
+
+ /* Try reading from the attribute(make certain our buffer is unmodified) */ ret =
+ H5Aread(attr, H5T_NATIVE_INT, &val);
+ VRFY((ret >= 0), "H5Aread");
+ VRFY((val == 1), "H5Aread");
+
+ H5Pclose(plist);
+ H5Pclose(dxpl);
+ H5Aclose(attr);
+ H5Dclose(dataset);
+ H5Fclose(iof);
+}
+
+/* Example of using PHDF5 to create "large" datasets. (>2GB, >4GB, >8GB)
+ * Actual data is _not_ written to these datasets. Dataspaces are exact
+ * sizes(2GB, 4GB, etc.), but the metadata for the file pushes the file over
+ * the boundary of interest.
+ */
+void
+big_dataset(void)
+{
+ int mpi_size, mpi_rank; /* MPI info */
+ hid_t iof, /* File ID */
+ fapl, /* File access property list ID */
+ dataset, /* Dataset ID */
+ filespace; /* Dataset's dataspace ID */
+ hsize_t file_dims[4]; /* Dimensions of dataspace */
+ char dname[] = "dataset"; /* Name of dataset */
+#if 0
+ MPI_Offset file_size; /* Size of file on disk */
+#endif
+ herr_t ret; /* Generic return value */
+ const char *filename;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file or dataset aren't supported with this connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ /* Verify MPI_Offset can handle larger than 2GB sizes */
+ VRFY((sizeof(MPI_Offset) > 4), "sizeof(MPI_Offset)>4");
+
+ filename = PARATESTFILE /* GetTestParameters() */;
+
+ fapl = create_faccess_plist(MPI_COMM_WORLD, MPI_INFO_NULL, facc_type);
+ VRFY((fapl >= 0), "create_faccess_plist succeeded");
+
+ /*
+ * Create >2GB HDF5 file
+ */
+ iof = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ VRFY((iof >= 0), "H5Fcreate succeeded");
+
+ /* Define dataspace for 2GB dataspace */
+ file_dims[0] = 2;
+ file_dims[1] = 1024;
+ file_dims[2] = 1024;
+ file_dims[3] = 1024;
+ filespace = H5Screate_simple(4, file_dims, NULL);
+ VRFY((filespace >= 0), "H5Screate_simple succeeded");
+
+ dataset = H5Dcreate2(iof, dname, H5T_NATIVE_UCHAR, filespace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((dataset >= 0), "H5Dcreate2 succeeded");
+
+ /* Close all file objects */
+ ret = H5Dclose(dataset);
+ VRFY((ret >= 0), "H5Dclose succeeded");
+ ret = H5Sclose(filespace);
+ VRFY((ret >= 0), "H5Sclose succeeded");
+ ret = H5Fclose(iof);
+ VRFY((ret >= 0), "H5Fclose succeeded");
+
+#if 0
+ /* Check that file of the correct size was created */
+ file_size = h5_get_file_size(filename, fapl);
+ VRFY((file_size == 2147485696ULL), "File is correct size(~2GB)");
+#endif
+
+ /*
+ * Create >4GB HDF5 file
+ */
+ iof = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ VRFY((iof >= 0), "H5Fcreate succeeded");
+
+ /* Define dataspace for 4GB dataspace */
+ file_dims[0] = 4;
+ file_dims[1] = 1024;
+ file_dims[2] = 1024;
+ file_dims[3] = 1024;
+ filespace = H5Screate_simple(4, file_dims, NULL);
+ VRFY((filespace >= 0), "H5Screate_simple succeeded");
+
+ dataset = H5Dcreate2(iof, dname, H5T_NATIVE_UCHAR, filespace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((dataset >= 0), "H5Dcreate2 succeeded");
+
+ /* Close all file objects */
+ ret = H5Dclose(dataset);
+ VRFY((ret >= 0), "H5Dclose succeeded");
+ ret = H5Sclose(filespace);
+ VRFY((ret >= 0), "H5Sclose succeeded");
+ ret = H5Fclose(iof);
+ VRFY((ret >= 0), "H5Fclose succeeded");
+#if 0
+ /* Check that file of the correct size was created */
+ file_size = h5_get_file_size(filename, fapl);
+ VRFY((file_size == 4294969344ULL), "File is correct size(~4GB)");
+#endif
+
+ /*
+ * Create >8GB HDF5 file
+ */
+ iof = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ VRFY((iof >= 0), "H5Fcreate succeeded");
+
+ /* Define dataspace for 8GB dataspace */
+ file_dims[0] = 8;
+ file_dims[1] = 1024;
+ file_dims[2] = 1024;
+ file_dims[3] = 1024;
+ filespace = H5Screate_simple(4, file_dims, NULL);
+ VRFY((filespace >= 0), "H5Screate_simple succeeded");
+
+ dataset = H5Dcreate2(iof, dname, H5T_NATIVE_UCHAR, filespace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((dataset >= 0), "H5Dcreate2 succeeded");
+
+ /* Close all file objects */
+ ret = H5Dclose(dataset);
+ VRFY((ret >= 0), "H5Dclose succeeded");
+ ret = H5Sclose(filespace);
+ VRFY((ret >= 0), "H5Sclose succeeded");
+ ret = H5Fclose(iof);
+ VRFY((ret >= 0), "H5Fclose succeeded");
+#if 0
+ /* Check that file of the correct size was created */
+ file_size = h5_get_file_size(filename, fapl);
+ VRFY((file_size == 8589936640ULL), "File is correct size(~8GB)");
+#endif
+
+ /* Close fapl */
+ ret = H5Pclose(fapl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+}
+
+/* Example of using PHDF5 to read a partial written dataset. The dataset does
+ * not have actual data written to the entire raw data area and relies on the
+ * default fill value of zeros to work correctly.
+ */
+void
+dataset_fillvalue(void)
+{
+ int mpi_size, mpi_rank; /* MPI info */
+ int err_num; /* Number of errors */
+ hid_t iof, /* File ID */
+ fapl, /* File access property list ID */
+ dxpl, /* Data transfer property list ID */
+ dataset, /* Dataset ID */
+ memspace, /* Memory dataspace ID */
+ filespace; /* Dataset's dataspace ID */
+ char dname[] = "dataset"; /* Name of dataset */
+ hsize_t dset_dims[4] = {0, 6, 7, 8};
+ hsize_t req_start[4] = {0, 0, 0, 0};
+ hsize_t req_count[4] = {1, 6, 7, 8};
+ hsize_t dset_size; /* Dataset size */
+ int *rdata, *wdata; /* Buffers for data to read and write */
+ int *twdata, *trdata; /* Temporary pointer into buffer */
+ int acc, i, ii, j, k, l; /* Local index variables */
+ herr_t ret; /* Generic return value */
+ const char *filename;
+#ifdef H5_HAVE_INSTRUMENTED_LIBRARY
+ hbool_t prop_value;
+#endif
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file or dataset aren't supported with this connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ filename = PARATESTFILE /* GetTestParameters() */;
+
+ /* Set the dataset dimension to be one row more than number of processes */
+ /* and calculate the actual dataset size. */
+ dset_dims[0] = (hsize_t)(mpi_size + 1);
+ dset_size = dset_dims[0] * dset_dims[1] * dset_dims[2] * dset_dims[3];
+
+ /* Allocate space for the buffers */
+ rdata = HDmalloc((size_t)(dset_size * sizeof(int)));
+ VRFY((rdata != NULL), "HDcalloc succeeded for read buffer");
+ wdata = HDmalloc((size_t)(dset_size * sizeof(int)));
+ VRFY((wdata != NULL), "HDmalloc succeeded for write buffer");
+
+ fapl = create_faccess_plist(MPI_COMM_WORLD, MPI_INFO_NULL, facc_type);
+ VRFY((fapl >= 0), "create_faccess_plist succeeded");
+
+ /*
+ * Create HDF5 file
+ */
+ iof = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ VRFY((iof >= 0), "H5Fcreate succeeded");
+
+ filespace = H5Screate_simple(4, dset_dims, NULL);
+ VRFY((filespace >= 0), "File H5Screate_simple succeeded");
+
+ dataset = H5Dcreate2(iof, dname, H5T_NATIVE_INT, filespace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((dataset >= 0), "H5Dcreate2 succeeded");
+
+ memspace = H5Screate_simple(4, dset_dims, NULL);
+ VRFY((memspace >= 0), "Memory H5Screate_simple succeeded");
+
+ /*
+ * Read dataset before any data is written.
+ */
+
+ /* Create DXPL for I/O */
+ dxpl = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((dxpl >= 0), "H5Pcreate succeeded");
+
+#ifdef H5_HAVE_INSTRUMENTED_LIBRARY
+ prop_value = H5D_XFER_COLL_RANK0_BCAST_DEF;
+ ret = H5Pinsert2(dxpl, H5D_XFER_COLL_RANK0_BCAST_NAME, H5D_XFER_COLL_RANK0_BCAST_SIZE, &prop_value, NULL,
+ NULL, NULL, NULL, NULL, NULL);
+ VRFY((ret >= 0), "testing property list inserted succeeded");
+#endif /* H5_HAVE_INSTRUMENTED_LIBRARY */
+
+ for (ii = 0; ii < 2; ii++) {
+
+ if (ii == 0)
+ ret = H5Pset_dxpl_mpio(dxpl, H5FD_MPIO_INDEPENDENT);
+ else
+ ret = H5Pset_dxpl_mpio(dxpl, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pset_dxpl_mpio succeeded");
+
+ /* set entire read buffer with the constant 2 */
+ HDmemset(rdata, 2, (size_t)(dset_size * sizeof(int)));
+
+ /* Read the entire dataset back */
+ ret = H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, rdata);
+ VRFY((ret >= 0), "H5Dread succeeded");
+
+#ifdef H5_HAVE_INSTRUMENTED_LIBRARY
+ prop_value = FALSE;
+ ret = H5Pget(dxpl, H5D_XFER_COLL_RANK0_BCAST_NAME, &prop_value);
+ VRFY((ret >= 0), "testing property list get succeeded");
+ if (ii == 0)
+ VRFY((prop_value == FALSE), "correctly handled rank 0 Bcast");
+ else
+ VRFY((prop_value == TRUE), "correctly handled rank 0 Bcast");
+#endif /* H5_HAVE_INSTRUMENTED_LIBRARY */
+
+ /* Verify all data read are the fill value 0 */
+ trdata = rdata;
+ err_num = 0;
+ for (i = 0; i < (int)dset_dims[0]; i++)
+ for (j = 0; j < (int)dset_dims[1]; j++)
+ for (k = 0; k < (int)dset_dims[2]; k++)
+ for (l = 0; l < (int)dset_dims[3]; l++, trdata++)
+ if (*trdata != 0)
+ if (err_num++ < MAX_ERR_REPORT || VERBOSE_MED)
+ HDprintf(
+ "Rank %d: Dataset Verify failed at [%d][%d][%d][%d]: expect 0, got %d\n",
+ mpi_rank, i, j, k, l, *trdata);
+ if (err_num > MAX_ERR_REPORT && !VERBOSE_MED)
+ HDprintf("Rank %d: [more errors ...]\n", mpi_rank);
+ if (err_num) {
+ HDprintf("Rank %d: %d errors found in check_value\n", mpi_rank, err_num);
+ nerrors++;
+ }
+ }
+
+ /* Barrier to ensure all processes have completed the above test. */
+ MPI_Barrier(MPI_COMM_WORLD);
+
+ /*
+ * Each process writes 1 row of data. Thus last row is not written.
+ */
+ /* Create hyperslabs in memory and file dataspaces */
+ req_start[0] = (hsize_t)mpi_rank;
+ ret = H5Sselect_hyperslab(filespace, H5S_SELECT_SET, req_start, NULL, req_count, NULL);
+ VRFY((ret >= 0), "H5Sselect_hyperslab succeeded on memory dataspace");
+ ret = H5Sselect_hyperslab(memspace, H5S_SELECT_SET, req_start, NULL, req_count, NULL);
+ VRFY((ret >= 0), "H5Sselect_hyperslab succeeded on memory dataspace");
+
+ ret = H5Pset_dxpl_mpio(dxpl, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pset_dxpl_mpio succeeded");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(dxpl, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ /* Fill write buffer with some values */
+ twdata = wdata;
+ for (i = 0, acc = 0; i < (int)dset_dims[0]; i++)
+ for (j = 0; j < (int)dset_dims[1]; j++)
+ for (k = 0; k < (int)dset_dims[2]; k++)
+ for (l = 0; l < (int)dset_dims[3]; l++)
+ *twdata++ = acc++;
+
+ /* Collectively write a hyperslab of data to the dataset */
+ ret = H5Dwrite(dataset, H5T_NATIVE_INT, memspace, filespace, dxpl, wdata);
+ VRFY((ret >= 0), "H5Dwrite succeeded");
+
+ /* Barrier here, to allow processes to sync */
+ MPI_Barrier(MPI_COMM_WORLD);
+
+ /*
+ * Read dataset after partial write.
+ */
+
+#ifdef H5_HAVE_INSTRUMENTED_LIBRARY
+ prop_value = H5D_XFER_COLL_RANK0_BCAST_DEF;
+ ret = H5Pset(dxpl, H5D_XFER_COLL_RANK0_BCAST_NAME, &prop_value);
+ VRFY((ret >= 0), " H5Pset succeeded");
+#endif /* H5_HAVE_INSTRUMENTED_LIBRARY */
+
+ for (ii = 0; ii < 2; ii++) {
+
+ if (ii == 0)
+ ret = H5Pset_dxpl_mpio(dxpl, H5FD_MPIO_INDEPENDENT);
+ else
+ ret = H5Pset_dxpl_mpio(dxpl, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pset_dxpl_mpio succeeded");
+
+ /* set entire read buffer with the constant 2 */
+ HDmemset(rdata, 2, (size_t)(dset_size * sizeof(int)));
+
+ /* Read the entire dataset back */
+ ret = H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, rdata);
+ VRFY((ret >= 0), "H5Dread succeeded");
+
+#ifdef H5_HAVE_INSTRUMENTED_LIBRARY
+ prop_value = FALSE;
+ ret = H5Pget(dxpl, H5D_XFER_COLL_RANK0_BCAST_NAME, &prop_value);
+ VRFY((ret >= 0), "testing property list get succeeded");
+ if (ii == 0)
+ VRFY((prop_value == FALSE), "correctly handled rank 0 Bcast");
+ else
+ VRFY((prop_value == TRUE), "correctly handled rank 0 Bcast");
+#endif /* H5_HAVE_INSTRUMENTED_LIBRARY */
+
+ /* Verify correct data read */
+ twdata = wdata;
+ trdata = rdata;
+ err_num = 0;
+ for (i = 0; i < (int)dset_dims[0]; i++)
+ for (j = 0; j < (int)dset_dims[1]; j++)
+ for (k = 0; k < (int)dset_dims[2]; k++)
+ for (l = 0; l < (int)dset_dims[3]; l++, twdata++, trdata++)
+ if (i < mpi_size) {
+ if (*twdata != *trdata)
+ if (err_num++ < MAX_ERR_REPORT || VERBOSE_MED)
+ HDprintf("Dataset Verify failed at [%d][%d][%d][%d]: expect %d, got %d\n",
+ i, j, k, l, *twdata, *trdata);
+ } /* end if */
+ else {
+ if (*trdata != 0)
+ if (err_num++ < MAX_ERR_REPORT || VERBOSE_MED)
+ HDprintf("Dataset Verify failed at [%d][%d][%d][%d]: expect 0, got %d\n",
+ i, j, k, l, *trdata);
+ } /* end else */
+ if (err_num > MAX_ERR_REPORT && !VERBOSE_MED)
+ HDprintf("[more errors ...]\n");
+ if (err_num) {
+ HDprintf("%d errors found in check_value\n", err_num);
+ nerrors++;
+ }
+ }
+
+ /* Close all file objects */
+ ret = H5Dclose(dataset);
+ VRFY((ret >= 0), "H5Dclose succeeded");
+ ret = H5Sclose(filespace);
+ VRFY((ret >= 0), "H5Sclose succeeded");
+ ret = H5Fclose(iof);
+ VRFY((ret >= 0), "H5Fclose succeeded");
+
+ /* Close memory dataspace */
+ ret = H5Sclose(memspace);
+ VRFY((ret >= 0), "H5Sclose succeeded");
+
+ /* Close dxpl */
+ ret = H5Pclose(dxpl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ /* Close fapl */
+ ret = H5Pclose(fapl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ /* free the buffers */
+ HDfree(rdata);
+ HDfree(wdata);
+}
+
+/* combined cngrpw and ingrpr tests because ingrpr reads file created by cngrpw. */
+void
+collective_group_write_independent_group_read(void)
+{
+ collective_group_write();
+ independent_group_read();
+}
+
+/* Write multiple groups with a chunked dataset in each group collectively.
+ * These groups and datasets are for testing independent read later.
+ */
+void
+collective_group_write(void)
+{
+ int mpi_rank, mpi_size, size;
+ int i, j, m;
+ char gname[64], dname[32];
+ hid_t fid, gid, did, plist, dcpl, memspace, filespace;
+ DATATYPE *outme = NULL;
+ hsize_t chunk_origin[DIM];
+ hsize_t chunk_dims[DIM], file_dims[DIM], count[DIM];
+ hsize_t chunk_size[2]; /* Chunk dimensions - computed shortly */
+ herr_t ret1, ret2;
+#if 0
+ const H5Ptest_param_t *pt;
+#endif
+ char *filename;
+ int ngroups;
+
+#if 0
+ pt = GetTestParameters();
+#endif
+ /* filename = pt->name; */ filename = PARATESTFILE;
+ /* ngroups = pt->count; */ ngroups = NGROUPS;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ size = get_size();
+
+ chunk_size[0] = (hsize_t)(size / 2);
+ chunk_size[1] = (hsize_t)(size / 2);
+
+ outme = HDmalloc((size_t)size * (size_t)size * sizeof(DATATYPE));
+ VRFY((outme != NULL), "HDmalloc succeeded for outme");
+
+ plist = create_faccess_plist(MPI_COMM_WORLD, MPI_INFO_NULL, facc_type);
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, plist);
+ VRFY((fid >= 0), "H5Fcreate");
+ H5Pclose(plist);
+
+ /* decide the hyperslab according to process number. */
+ get_slab(chunk_origin, chunk_dims, count, file_dims, size);
+
+ /* select hyperslab in memory and file spaces. These two operations are
+ * identical since the datasets are the same. */
+ memspace = H5Screate_simple(DIM, file_dims, NULL);
+ ret1 = H5Sselect_hyperslab(memspace, H5S_SELECT_SET, chunk_origin, chunk_dims, count, chunk_dims);
+ filespace = H5Screate_simple(DIM, file_dims, NULL);
+ ret2 = H5Sselect_hyperslab(filespace, H5S_SELECT_SET, chunk_origin, chunk_dims, count, chunk_dims);
+ VRFY((memspace >= 0), "memspace");
+ VRFY((filespace >= 0), "filespace");
+ VRFY((ret1 == 0), "mgroup memspace selection");
+ VRFY((ret2 == 0), "mgroup filespace selection");
+
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ ret1 = H5Pset_chunk(dcpl, 2, chunk_size);
+ VRFY((dcpl >= 0), "dataset creation property");
+ VRFY((ret1 == 0), "set chunk for dataset creation property");
+
+ /* creates ngroups groups under the root group, writes chunked
+ * datasets in parallel. */
+ for (m = 0; m < ngroups; m++) {
+ HDsnprintf(gname, sizeof(gname), "group%d", m);
+ gid = H5Gcreate2(fid, gname, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((gid > 0), gname);
+
+ HDsnprintf(dname, sizeof(dname), "dataset%d", m);
+ did = H5Dcreate2(gid, dname, H5T_NATIVE_INT, filespace, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ VRFY((did > 0), dname);
+
+ for (i = 0; i < size; i++)
+ for (j = 0; j < size; j++)
+ outme[(i * size) + j] = (i + j) * 1000 + mpi_rank;
+
+ ret1 = H5Dwrite(did, H5T_NATIVE_INT, memspace, filespace, H5P_DEFAULT, outme);
+ VRFY((ret1 == 0), "H5Dwrite");
+
+ ret1 = H5Dclose(did);
+ VRFY((ret1 == 0), "H5Dclose");
+
+ ret1 = H5Gclose(gid);
+ VRFY((ret1 == 0), "H5Gclose");
+
+#ifdef BARRIER_CHECKS
+ if (!((m + 1) % 10)) {
+ HDprintf("created %d groups\n", m + 1);
+ MPI_Barrier(MPI_COMM_WORLD);
+ }
+#endif /* BARRIER_CHECKS */
+ }
+
+ H5Pclose(dcpl);
+ H5Sclose(filespace);
+ H5Sclose(memspace);
+
+ ret1 = H5Fclose(fid);
+ VRFY((ret1 == 0), "H5Fclose");
+
+ HDfree(outme);
+}
+
+/* Let two sets of processes open and read different groups and chunked
+ * datasets independently.
+ */
+void
+independent_group_read(void)
+{
+ int mpi_rank, m;
+ hid_t plist, fid;
+#if 0
+ const H5Ptest_param_t *pt;
+#endif
+ char *filename;
+ int ngroups;
+ herr_t ret;
+
+#if 0
+ pt = GetTestParameters();
+#endif
+ /* filename = pt->name; */ filename = PARATESTFILE;
+ /* ngroups = pt->count; */ ngroups = NGROUPS;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ plist = create_faccess_plist(MPI_COMM_WORLD, MPI_INFO_NULL, facc_type);
+ H5Pset_all_coll_metadata_ops(plist, FALSE);
+
+ fid = H5Fopen(filename, H5F_ACC_RDONLY, plist);
+ VRFY((fid > 0), "H5Fopen");
+ H5Pclose(plist);
+
+ /* open groups and read datasets. Odd number processes read even number
+ * groups from the end; even number processes read odd number groups
+ * from the beginning. */
+ if (mpi_rank % 2 == 0) {
+ for (m = ngroups - 1; m == 0; m -= 2)
+ group_dataset_read(fid, mpi_rank, m);
+ }
+ else {
+ for (m = 0; m < ngroups; m += 2)
+ group_dataset_read(fid, mpi_rank, m);
+ }
+
+ ret = H5Fclose(fid);
+ VRFY((ret == 0), "H5Fclose");
+}
+
+/* Open and read datasets and compare data
+ */
+static void
+group_dataset_read(hid_t fid, int mpi_rank, int m)
+{
+ int ret, i, j, size;
+ char gname[64], dname[32];
+ hid_t gid, did;
+ DATATYPE *outdata = NULL;
+ DATATYPE *indata = NULL;
+
+ size = get_size();
+
+ indata = (DATATYPE *)HDmalloc((size_t)size * (size_t)size * sizeof(DATATYPE));
+ VRFY((indata != NULL), "HDmalloc succeeded for indata");
+
+ outdata = (DATATYPE *)HDmalloc((size_t)size * (size_t)size * sizeof(DATATYPE));
+ VRFY((outdata != NULL), "HDmalloc succeeded for outdata");
+
+ /* open every group under root group. */
+ HDsnprintf(gname, sizeof(gname), "group%d", m);
+ gid = H5Gopen2(fid, gname, H5P_DEFAULT);
+ VRFY((gid > 0), gname);
+
+ /* check the data. */
+ HDsnprintf(dname, sizeof(dname), "dataset%d", m);
+ did = H5Dopen2(gid, dname, H5P_DEFAULT);
+ VRFY((did > 0), dname);
+
+ H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, indata);
+
+ /* this is the original value */
+ for (i = 0; i < size; i++)
+ for (j = 0; j < size; j++)
+ outdata[(i * size) + j] = (i + j) * 1000 + mpi_rank;
+
+ /* compare the original value(outdata) to the value in file(indata).*/
+ ret = check_value(indata, outdata, size);
+ VRFY((ret == 0), "check the data");
+
+ ret = H5Dclose(did);
+ VRFY((ret == 0), "H5Dclose");
+ ret = H5Gclose(gid);
+ VRFY((ret == 0), "H5Gclose");
+
+ HDfree(indata);
+ HDfree(outdata);
+}
+
+/*
+ * Example of using PHDF5 to create multiple groups. Under the root group,
+ * it creates ngroups groups. Under the first group just created, it creates
+ * recursive subgroups of depth GROUP_DEPTH. In each created group, it
+ * generates NDATASETS datasets. Each process write a hyperslab of an array
+ * into the file. The structure is like
+ *
+ * root group
+ * |
+ * ---------------------------- ... ... ------------------------
+ * | | | ... ... | |
+ * group0*+' group1*+' group2*+' ... ... group ngroups*+'
+ * |
+ * 1st_child_group*'
+ * |
+ * 2nd_child_group*'
+ * |
+ * :
+ * :
+ * |
+ * GROUP_DEPTHth_child_group*'
+ *
+ * * means the group has dataset(s).
+ * + means the group has attribute(s).
+ * ' means the datasets in the groups have attribute(s).
+ *
+ */
+void
+multiple_group_write(void)
+{
+ int mpi_rank, mpi_size, size;
+ int m;
+ char gname[64];
+ hid_t fid, gid, plist, memspace, filespace;
+ hsize_t chunk_origin[DIM];
+ hsize_t chunk_dims[DIM], file_dims[DIM], count[DIM];
+ herr_t ret;
+#if 0
+ const H5Ptest_param_t *pt;
+#endif
+ char *filename;
+ int ngroups;
+
+#if 0
+ pt = GetTestParameters();
+#endif
+ /* filename = pt->name; */ filename = PARATESTFILE;
+ /* ngroups = pt->count; */ ngroups = NGROUPS;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, group, dataset, or attribute aren't supported with "
+ "this connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ size = get_size();
+
+ plist = create_faccess_plist(MPI_COMM_WORLD, MPI_INFO_NULL, facc_type);
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, plist);
+ H5Pclose(plist);
+
+ /* decide the hyperslab according to process number. */
+ get_slab(chunk_origin, chunk_dims, count, file_dims, size);
+
+ /* select hyperslab in memory and file spaces. These two operations are
+ * identical since the datasets are the same. */
+ memspace = H5Screate_simple(DIM, file_dims, NULL);
+ VRFY((memspace >= 0), "memspace");
+ ret = H5Sselect_hyperslab(memspace, H5S_SELECT_SET, chunk_origin, chunk_dims, count, chunk_dims);
+ VRFY((ret >= 0), "mgroup memspace selection");
+
+ filespace = H5Screate_simple(DIM, file_dims, NULL);
+ VRFY((filespace >= 0), "filespace");
+ ret = H5Sselect_hyperslab(filespace, H5S_SELECT_SET, chunk_origin, chunk_dims, count, chunk_dims);
+ VRFY((ret >= 0), "mgroup filespace selection");
+
+ /* creates ngroups groups under the root group, writes datasets in
+ * parallel. */
+ for (m = 0; m < ngroups; m++) {
+ HDsnprintf(gname, sizeof(gname), "group%d", m);
+ gid = H5Gcreate2(fid, gname, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((gid > 0), gname);
+
+ /* create attribute for these groups. */
+ write_attribute(gid, is_group, m);
+
+ if (m != 0)
+ write_dataset(memspace, filespace, gid);
+
+ H5Gclose(gid);
+
+#ifdef BARRIER_CHECKS
+ if (!((m + 1) % 10)) {
+ HDprintf("created %d groups\n", m + 1);
+ MPI_Barrier(MPI_COMM_WORLD);
+ }
+#endif /* BARRIER_CHECKS */
+ }
+
+ /* recursively creates subgroups under the first group. */
+ gid = H5Gopen2(fid, "group0", H5P_DEFAULT);
+ create_group_recursive(memspace, filespace, gid, 0);
+ ret = H5Gclose(gid);
+ VRFY((ret >= 0), "H5Gclose");
+
+ ret = H5Sclose(filespace);
+ VRFY((ret >= 0), "H5Sclose");
+ ret = H5Sclose(memspace);
+ VRFY((ret >= 0), "H5Sclose");
+ ret = H5Fclose(fid);
+ VRFY((ret >= 0), "H5Fclose");
+}
+
+/*
+ * In a group, creates NDATASETS datasets. Each process writes a hyperslab
+ * of a data array to the file.
+ */
+static void
+write_dataset(hid_t memspace, hid_t filespace, hid_t gid)
+{
+ int i, j, n, size;
+ int mpi_rank, mpi_size;
+ char dname[32];
+ DATATYPE *outme = NULL;
+ hid_t did;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+
+ size = get_size();
+
+ outme = HDmalloc((size_t)size * (size_t)size * sizeof(double));
+ VRFY((outme != NULL), "HDmalloc succeeded for outme");
+
+ for (n = 0; n < NDATASET; n++) {
+ HDsnprintf(dname, sizeof(dname), "dataset%d", n);
+ did = H5Dcreate2(gid, dname, H5T_NATIVE_INT, filespace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((did > 0), dname);
+
+ for (i = 0; i < size; i++)
+ for (j = 0; j < size; j++)
+ outme[(i * size) + j] = n * 1000 + mpi_rank;
+
+ H5Dwrite(did, H5T_NATIVE_INT, memspace, filespace, H5P_DEFAULT, outme);
+
+ /* create attribute for these datasets.*/
+ write_attribute(did, is_dset, n);
+
+ H5Dclose(did);
+ }
+ HDfree(outme);
+}
+
+/*
+ * Creates subgroups of depth GROUP_DEPTH recursively. Also writes datasets
+ * in parallel in each group.
+ */
+static void
+create_group_recursive(hid_t memspace, hid_t filespace, hid_t gid, int counter)
+{
+ hid_t child_gid;
+ int mpi_rank;
+ char gname[64];
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+#ifdef BARRIER_CHECKS
+ if (!((counter + 1) % 10)) {
+ HDprintf("created %dth child groups\n", counter + 1);
+ MPI_Barrier(MPI_COMM_WORLD);
+ }
+#endif /* BARRIER_CHECKS */
+
+ HDsnprintf(gname, sizeof(gname), "%dth_child_group", counter + 1);
+ child_gid = H5Gcreate2(gid, gname, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((child_gid > 0), gname);
+
+ /* write datasets in parallel. */
+ write_dataset(memspace, filespace, gid);
+
+ if (counter < GROUP_DEPTH)
+ create_group_recursive(memspace, filespace, child_gid, counter + 1);
+
+ H5Gclose(child_gid);
+}
+
+/*
+ * This function is to verify the data from multiple group testing. It opens
+ * every dataset in every group and check their correctness.
+ */
+void
+multiple_group_read(void)
+{
+ int mpi_rank, mpi_size, error_num, size;
+ int m;
+ char gname[64];
+ hid_t plist, fid, gid, memspace, filespace;
+ hsize_t chunk_origin[DIM];
+ hsize_t chunk_dims[DIM], file_dims[DIM], count[DIM];
+#if 0
+ const H5Ptest_param_t *pt;
+#endif
+ char *filename;
+ int ngroups;
+
+#if 0
+ pt = GetTestParameters();
+#endif
+ /* filename = pt->name; */ filename = PARATESTFILE;
+ /* ngroups = pt->count; */ ngroups = NGROUPS;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, group, dataset, or attribute aren't supported with "
+ "this connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ size = get_size();
+
+ plist = create_faccess_plist(MPI_COMM_WORLD, MPI_INFO_NULL, facc_type);
+ fid = H5Fopen(filename, H5F_ACC_RDONLY, plist);
+ H5Pclose(plist);
+
+ /* decide hyperslab for each process */
+ get_slab(chunk_origin, chunk_dims, count, file_dims, size);
+
+ /* select hyperslab for memory and file space */
+ memspace = H5Screate_simple(DIM, file_dims, NULL);
+ H5Sselect_hyperslab(memspace, H5S_SELECT_SET, chunk_origin, chunk_dims, count, chunk_dims);
+ filespace = H5Screate_simple(DIM, file_dims, NULL);
+ H5Sselect_hyperslab(filespace, H5S_SELECT_SET, chunk_origin, chunk_dims, count, chunk_dims);
+
+ /* open every group under root group. */
+ for (m = 0; m < ngroups; m++) {
+ HDsnprintf(gname, sizeof(gname), "group%d", m);
+ gid = H5Gopen2(fid, gname, H5P_DEFAULT);
+ VRFY((gid > 0), gname);
+
+ /* check the data. */
+ if (m != 0)
+ if ((error_num = read_dataset(memspace, filespace, gid)) > 0)
+ nerrors += error_num;
+
+ /* check attribute.*/
+ error_num = 0;
+ if ((error_num = read_attribute(gid, is_group, m)) > 0)
+ nerrors += error_num;
+
+ H5Gclose(gid);
+
+#ifdef BARRIER_CHECKS
+ if (!((m + 1) % 10))
+ MPI_Barrier(MPI_COMM_WORLD);
+#endif /* BARRIER_CHECKS */
+ }
+
+ /* open all the groups in vertical direction. */
+ gid = H5Gopen2(fid, "group0", H5P_DEFAULT);
+ VRFY((gid > 0), "group0");
+ recursive_read_group(memspace, filespace, gid, 0);
+ H5Gclose(gid);
+
+ H5Sclose(filespace);
+ H5Sclose(memspace);
+ H5Fclose(fid);
+}
+
+/*
+ * This function opens all the datasets in a certain, checks the data using
+ * dataset_vrfy function.
+ */
+static int
+read_dataset(hid_t memspace, hid_t filespace, hid_t gid)
+{
+ int i, j, n, mpi_rank, mpi_size, size, attr_errors = 0, vrfy_errors = 0;
+ char dname[32];
+ DATATYPE *outdata = NULL, *indata = NULL;
+ hid_t did;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+
+ size = get_size();
+
+ indata = (DATATYPE *)HDmalloc((size_t)size * (size_t)size * sizeof(DATATYPE));
+ VRFY((indata != NULL), "HDmalloc succeeded for indata");
+
+ outdata = (DATATYPE *)HDmalloc((size_t)size * (size_t)size * sizeof(DATATYPE));
+ VRFY((outdata != NULL), "HDmalloc succeeded for outdata");
+
+ for (n = 0; n < NDATASET; n++) {
+ HDsnprintf(dname, sizeof(dname), "dataset%d", n);
+ did = H5Dopen2(gid, dname, H5P_DEFAULT);
+ VRFY((did > 0), dname);
+
+ H5Dread(did, H5T_NATIVE_INT, memspace, filespace, H5P_DEFAULT, indata);
+
+ /* this is the original value */
+ for (i = 0; i < size; i++)
+ for (j = 0; j < size; j++) {
+ *outdata = n * 1000 + mpi_rank;
+ outdata++;
+ }
+ outdata -= size * size;
+
+ /* compare the original value(outdata) to the value in file(indata).*/
+ vrfy_errors = check_value(indata, outdata, size);
+
+ /* check attribute.*/
+ if ((attr_errors = read_attribute(did, is_dset, n)) > 0)
+ vrfy_errors += attr_errors;
+
+ H5Dclose(did);
+ }
+
+ HDfree(indata);
+ HDfree(outdata);
+
+ return vrfy_errors;
+}
+
+/*
+ * This recursive function opens all the groups in vertical direction and
+ * checks the data.
+ */
+static void
+recursive_read_group(hid_t memspace, hid_t filespace, hid_t gid, int counter)
+{
+ hid_t child_gid;
+ int mpi_rank, err_num = 0;
+ char gname[64];
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+#ifdef BARRIER_CHECKS
+ if ((counter + 1) % 10)
+ MPI_Barrier(MPI_COMM_WORLD);
+#endif /* BARRIER_CHECKS */
+
+ if ((err_num = read_dataset(memspace, filespace, gid)))
+ nerrors += err_num;
+
+ if (counter < GROUP_DEPTH) {
+ HDsnprintf(gname, sizeof(gname), "%dth_child_group", counter + 1);
+ child_gid = H5Gopen2(gid, gname, H5P_DEFAULT);
+ VRFY((child_gid > 0), gname);
+ recursive_read_group(memspace, filespace, child_gid, counter + 1);
+ H5Gclose(child_gid);
+ }
+}
+
+/* Create and write attribute for a group or a dataset. For groups, attribute
+ * is a scalar datum; for dataset, it is a one-dimensional array.
+ */
+static void
+write_attribute(hid_t obj_id, int this_type, int num)
+{
+ hid_t sid, aid;
+ hsize_t dspace_dims[1] = {8};
+ int i, mpi_rank, attr_data[8], dspace_rank = 1;
+ char attr_name[32];
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ if (this_type == is_group) {
+ HDsnprintf(attr_name, sizeof(attr_name), "Group Attribute %d", num);
+ sid = H5Screate(H5S_SCALAR);
+ aid = H5Acreate2(obj_id, attr_name, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ H5Awrite(aid, H5T_NATIVE_INT, &num);
+ H5Aclose(aid);
+ H5Sclose(sid);
+ } /* end if */
+ else if (this_type == is_dset) {
+ HDsnprintf(attr_name, sizeof(attr_name), "Dataset Attribute %d", num);
+ for (i = 0; i < 8; i++)
+ attr_data[i] = i;
+ sid = H5Screate_simple(dspace_rank, dspace_dims, NULL);
+ aid = H5Acreate2(obj_id, attr_name, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT);
+ H5Awrite(aid, H5T_NATIVE_INT, attr_data);
+ H5Aclose(aid);
+ H5Sclose(sid);
+ } /* end else-if */
+}
+
+/* Read and verify attribute for group or dataset. */
+static int
+read_attribute(hid_t obj_id, int this_type, int num)
+{
+ hid_t aid;
+ hsize_t group_block[2] = {1, 1}, dset_block[2] = {1, 8};
+ int i, mpi_rank, in_num, in_data[8], out_data[8], vrfy_errors = 0;
+ char attr_name[32];
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ if (this_type == is_group) {
+ HDsnprintf(attr_name, sizeof(attr_name), "Group Attribute %d", num);
+ aid = H5Aopen(obj_id, attr_name, H5P_DEFAULT);
+ H5Aread(aid, H5T_NATIVE_INT, &in_num);
+ vrfy_errors = dataset_vrfy(NULL, NULL, NULL, group_block, &in_num, &num);
+ H5Aclose(aid);
+ }
+ else if (this_type == is_dset) {
+ HDsnprintf(attr_name, sizeof(attr_name), "Dataset Attribute %d", num);
+ for (i = 0; i < 8; i++)
+ out_data[i] = i;
+ aid = H5Aopen(obj_id, attr_name, H5P_DEFAULT);
+ H5Aread(aid, H5T_NATIVE_INT, in_data);
+ vrfy_errors = dataset_vrfy(NULL, NULL, NULL, dset_block, in_data, out_data);
+ H5Aclose(aid);
+ }
+
+ return vrfy_errors;
+}
+
+/* This functions compares the original data with the read-in data for its
+ * hyperslab part only by process ID.
+ */
+static int
+check_value(DATATYPE *indata, DATATYPE *outdata, int size)
+{
+ int mpi_rank, mpi_size, err_num = 0;
+ hsize_t i, j;
+ hsize_t chunk_origin[DIM];
+ hsize_t chunk_dims[DIM], count[DIM];
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+
+ get_slab(chunk_origin, chunk_dims, count, NULL, size);
+
+ indata += chunk_origin[0] * (hsize_t)size;
+ outdata += chunk_origin[0] * (hsize_t)size;
+ for (i = chunk_origin[0]; i < (chunk_origin[0] + chunk_dims[0]); i++)
+ for (j = chunk_origin[1]; j < (chunk_origin[1] + chunk_dims[1]); j++) {
+ if (*indata != *outdata)
+ if (err_num++ < MAX_ERR_REPORT || VERBOSE_MED)
+ HDprintf("Dataset Verify failed at [%lu][%lu](row %lu, col%lu): expect %d, got %d\n",
+ (unsigned long)i, (unsigned long)j, (unsigned long)i, (unsigned long)j, *outdata,
+ *indata);
+ }
+ if (err_num > MAX_ERR_REPORT && !VERBOSE_MED)
+ HDprintf("[more errors ...]\n");
+ if (err_num)
+ HDprintf("%d errors found in check_value\n", err_num);
+ return err_num;
+}
+
+/* Decide the portion of data chunk in dataset by process ID.
+ */
+
+static void
+get_slab(hsize_t chunk_origin[], hsize_t chunk_dims[], hsize_t count[], hsize_t file_dims[], int size)
+{
+ int mpi_rank, mpi_size;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+
+ if (chunk_origin != NULL) {
+ chunk_origin[0] = (hsize_t)mpi_rank * (hsize_t)(size / mpi_size);
+ chunk_origin[1] = 0;
+ }
+ if (chunk_dims != NULL) {
+ chunk_dims[0] = (hsize_t)(size / mpi_size);
+ chunk_dims[1] = (hsize_t)size;
+ }
+ if (file_dims != NULL)
+ file_dims[0] = file_dims[1] = (hsize_t)size;
+ if (count != NULL)
+ count[0] = count[1] = 1;
+}
+
+/*
+ * This function is based on bug demonstration code provided by Thomas
+ * Guignon(thomas.guignon@ifp.fr), and is intended to verify the
+ * correctness of my fix for that bug.
+ *
+ * In essence, the bug appeared when at least one process attempted to
+ * write a point selection -- for which collective I/O is not supported,
+ * and at least one other attempted to write some other type of selection
+ * for which collective I/O is supported.
+ *
+ * Since the processes did not compare notes before performing the I/O,
+ * some would attempt collective I/O while others performed independent
+ * I/O. A hang resulted.
+ *
+ * This function reproduces this situation. At present the test hangs
+ * on failure.
+ * JRM - 9/13/04
+ */
+
+#define N 4
+
+void
+io_mode_confusion(void)
+{
+ /*
+ * HDF5 APIs definitions
+ */
+
+ const int rank = 1;
+ const char *dataset_name = "IntArray";
+
+ hid_t file_id, dset_id; /* file and dataset identifiers */
+ hid_t filespace, memspace; /* file and memory dataspace */
+ /* identifiers */
+ hsize_t dimsf[1]; /* dataset dimensions */
+ int data[N] = {1}; /* pointer to data buffer to write */
+ hsize_t coord[N] = {0L, 1L, 2L, 3L};
+ hid_t plist_id; /* property list identifier */
+ herr_t status;
+
+ /*
+ * MPI variables
+ */
+
+ int mpi_size, mpi_rank;
+
+ /*
+ * test bed related variables
+ */
+
+ const char *fcn_name = "io_mode_confusion";
+ const hbool_t verbose = FALSE;
+#if 0
+ const H5Ptest_param_t *pt;
+#endif
+ char *filename;
+
+#if 0
+ pt = GetTestParameters();
+#endif
+ /* filename = pt->name; */ filename = PARATESTFILE;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, dataset, or dataset more aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ /*
+ * Set up file access property list with parallel I/O access
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Setting up property list.\n", mpi_rank, fcn_name);
+
+ plist_id = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY((plist_id != -1), "H5Pcreate() failed");
+
+ status = H5Pset_fapl_mpio(plist_id, MPI_COMM_WORLD, MPI_INFO_NULL);
+ VRFY((status >= 0), "H5Pset_fapl_mpio() failed");
+
+ /*
+ * Create a new file collectively and release property list identifier.
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Creating new file.\n", mpi_rank, fcn_name);
+
+ file_id = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, plist_id);
+ VRFY((file_id >= 0), "H5Fcreate() failed");
+
+ status = H5Pclose(plist_id);
+ VRFY((status >= 0), "H5Pclose() failed");
+
+ /*
+ * Create the dataspace for the dataset.
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Creating the dataspace for the dataset.\n", mpi_rank, fcn_name);
+
+ dimsf[0] = N;
+ filespace = H5Screate_simple(rank, dimsf, NULL);
+ VRFY((filespace >= 0), "H5Screate_simple() failed.");
+
+ /*
+ * Create the dataset with default properties and close filespace.
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Creating the dataset, and closing filespace.\n", mpi_rank, fcn_name);
+
+ dset_id =
+ H5Dcreate2(file_id, dataset_name, H5T_NATIVE_INT, filespace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((dset_id >= 0), "H5Dcreate2() failed");
+
+ status = H5Sclose(filespace);
+ VRFY((status >= 0), "H5Sclose() failed");
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Calling H5Screate_simple().\n", mpi_rank, fcn_name);
+
+ memspace = H5Screate_simple(rank, dimsf, NULL);
+ VRFY((memspace >= 0), "H5Screate_simple() failed.");
+
+ if (mpi_rank == 0) {
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Calling H5Sselect_all(memspace).\n", mpi_rank, fcn_name);
+
+ status = H5Sselect_all(memspace);
+ VRFY((status >= 0), "H5Sselect_all() failed");
+ }
+ else {
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Calling H5Sselect_none(memspace).\n", mpi_rank, fcn_name);
+
+ status = H5Sselect_none(memspace);
+ VRFY((status >= 0), "H5Sselect_none() failed");
+ }
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Calling MPI_Barrier().\n", mpi_rank, fcn_name);
+
+ MPI_Barrier(MPI_COMM_WORLD);
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Calling H5Dget_space().\n", mpi_rank, fcn_name);
+
+ filespace = H5Dget_space(dset_id);
+ VRFY((filespace >= 0), "H5Dget_space() failed");
+
+ /* select all */
+ if (mpi_rank == 0) {
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Calling H5Sselect_elements() -- set up hang?\n", mpi_rank, fcn_name);
+
+ status = H5Sselect_elements(filespace, H5S_SELECT_SET, N, (const hsize_t *)&coord);
+ VRFY((status >= 0), "H5Sselect_elements() failed");
+ }
+ else { /* select nothing */
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Calling H5Sselect_none().\n", mpi_rank, fcn_name);
+
+ status = H5Sselect_none(filespace);
+ VRFY((status >= 0), "H5Sselect_none() failed");
+ }
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Calling MPI_Barrier().\n", mpi_rank, fcn_name);
+
+ MPI_Barrier(MPI_COMM_WORLD);
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Calling H5Pcreate().\n", mpi_rank, fcn_name);
+
+ plist_id = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((plist_id != -1), "H5Pcreate() failed");
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Calling H5Pset_dxpl_mpio().\n", mpi_rank, fcn_name);
+
+ status = H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_COLLECTIVE);
+ VRFY((status >= 0), "H5Pset_dxpl_mpio() failed");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ status = H5Pset_dxpl_mpio_collective_opt(plist_id, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((status >= 0), "set independent IO collectively succeeded");
+ }
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Calling H5Dwrite() -- hang here?.\n", mpi_rank, fcn_name);
+
+ status = H5Dwrite(dset_id, H5T_NATIVE_INT, memspace, filespace, plist_id, data);
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Returned from H5Dwrite(), status=%d.\n", mpi_rank, fcn_name, status);
+ VRFY((status >= 0), "H5Dwrite() failed");
+
+ /*
+ * Close/release resources.
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Cleaning up from test.\n", mpi_rank, fcn_name);
+
+ status = H5Dclose(dset_id);
+ VRFY((status >= 0), "H5Dclose() failed");
+
+ status = H5Sclose(filespace);
+ VRFY((status >= 0), "H5Dclose() failed");
+
+ status = H5Sclose(memspace);
+ VRFY((status >= 0), "H5Sclose() failed");
+
+ status = H5Pclose(plist_id);
+ VRFY((status >= 0), "H5Pclose() failed");
+
+ status = H5Fclose(file_id);
+ VRFY((status >= 0), "H5Fclose() failed");
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Done.\n", mpi_rank, fcn_name);
+
+ return;
+
+} /* io_mode_confusion() */
+
+#undef N
+
+/*
+ * At present, the object header code maintains an image of its on disk
+ * representation, which is updates as necessary instead of generating on
+ * request.
+ *
+ * Prior to the fix that this test in designed to verify, the image of the
+ * on disk representation was only updated on flush -- not when the object
+ * header was marked clean.
+ *
+ * This worked perfectly well as long as all writes of a given object
+ * header were written from a single process. However, with the implementation
+ * of round robin metadata data writes in parallel HDF5, this is no longer
+ * the case -- it is possible for a given object header to be flushed from
+ * several different processes, with the object header simply being marked
+ * clean in all other processes on each flush. This resulted in NULL or
+ * out of data object header information being written to disk.
+ *
+ * To repair this, I modified the object header code to update its
+ * on disk image both on flush on when marked clean.
+ *
+ * This test is directed at verifying that the fix performs as expected.
+ *
+ * The test functions by creating a HDF5 file with several small datasets,
+ * and then flushing the file. This should result of at least one of
+ * the associated object headers being flushed by a process other than
+ * process 0.
+ *
+ * Then for each data set, add an attribute and flush the file again.
+ *
+ * Close the file and re-open it.
+ *
+ * Open the each of the data sets in turn. If all opens are successful,
+ * the test passes. Otherwise the test fails.
+ *
+ * Note that this test will probably become irrelevant shortly, when we
+ * land the journaling modifications on the trunk -- at which point all
+ * cache clients will have to construct on disk images on demand.
+ *
+ * JRM -- 10/13/10
+ */
+
+#define NUM_DATA_SETS 4
+#define LOCAL_DATA_SIZE 4
+#define LARGE_ATTR_SIZE 256
+/* Since all even and odd processes are split into writer and reader comm
+ * respectively, process 0 and 1 in COMM_WORLD become the root process of
+ * the writer and reader comm respectively.
+ */
+#define Writer_Root 0
+#define Reader_Root 1
+#define Reader_wait(mpi_err, xsteps) mpi_err = MPI_Bcast(&xsteps, 1, MPI_INT, Writer_Root, MPI_COMM_WORLD)
+#define Reader_result(mpi_err, xsteps_done) \
+ mpi_err = MPI_Bcast(&xsteps_done, 1, MPI_INT, Reader_Root, MPI_COMM_WORLD)
+#define Reader_check(mpi_err, xsteps, xsteps_done) \
+ { \
+ Reader_wait(mpi_err, xsteps); \
+ Reader_result(mpi_err, xsteps_done); \
+ }
+
+/* object names used by both rr_obj_hdr_flush_confusion and
+ * rr_obj_hdr_flush_confusion_reader.
+ */
+const char *dataset_name[NUM_DATA_SETS] = {"dataset_0", "dataset_1", "dataset_2", "dataset_3"};
+const char *att_name[NUM_DATA_SETS] = {"attribute_0", "attribute_1", "attribute_2", "attribute_3"};
+const char *lg_att_name[NUM_DATA_SETS] = {"large_attribute_0", "large_attribute_1", "large_attribute_2",
+ "large_attribute_3"};
+
+void
+rr_obj_hdr_flush_confusion(void)
+{
+ /* MPI variables */
+ /* private communicator size and rank */
+ int mpi_size;
+ int mpi_rank;
+ int mrc; /* mpi error code */
+ int is_reader; /* 1 for reader process; 0 for writer process. */
+ MPI_Comm comm;
+
+ /* test bed related variables */
+ const char *fcn_name = "rr_obj_hdr_flush_confusion";
+ const hbool_t verbose = FALSE;
+
+ /* Create two new private communicators from MPI_COMM_WORLD.
+ * Even and odd ranked processes go to comm_writers and comm_readers
+ * respectively.
+ */
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_FLUSH_REFRESH) || !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_ATTR_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file, dataset, attribute, dataset more, attribute more, or "
+ "file flush aren't supported with this connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ HDassert(mpi_size > 2);
+
+ is_reader = mpi_rank % 2;
+ mrc = MPI_Comm_split(MPI_COMM_WORLD, is_reader, mpi_rank, &comm);
+ VRFY((mrc == MPI_SUCCESS), "MPI_Comm_split");
+
+ /* The reader processes branches off to do reading
+ * while the writer processes continues to do writing
+ * Whenever writers finish one writing step, including a H5Fflush,
+ * they inform the readers, via MPI_COMM_WORLD, to verify.
+ * They will wait for the result from the readers before doing the next
+ * step. When all steps are done, they inform readers to end.
+ */
+ if (is_reader)
+ rr_obj_hdr_flush_confusion_reader(comm);
+ else
+ rr_obj_hdr_flush_confusion_writer(comm);
+
+ MPI_Comm_free(&comm);
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Done.\n", mpi_rank, fcn_name);
+
+ return;
+
+} /* rr_obj_hdr_flush_confusion() */
+
+void
+rr_obj_hdr_flush_confusion_writer(MPI_Comm comm)
+{
+ int i;
+ int j;
+ hid_t file_id = -1;
+ hid_t fapl_id = -1;
+ hid_t dxpl_id = -1;
+ hid_t att_id[NUM_DATA_SETS];
+ hid_t att_space[NUM_DATA_SETS];
+ hid_t lg_att_id[NUM_DATA_SETS];
+ hid_t lg_att_space[NUM_DATA_SETS];
+ hid_t disk_space[NUM_DATA_SETS];
+ hid_t mem_space[NUM_DATA_SETS];
+ hid_t dataset[NUM_DATA_SETS];
+ hsize_t att_size[1];
+ hsize_t lg_att_size[1];
+ hsize_t disk_count[1];
+ hsize_t disk_size[1];
+ hsize_t disk_start[1];
+ hsize_t mem_count[1];
+ hsize_t mem_size[1];
+ hsize_t mem_start[1];
+ herr_t err;
+ double data[LOCAL_DATA_SIZE];
+ double att[LOCAL_DATA_SIZE];
+ double lg_att[LARGE_ATTR_SIZE];
+
+ /* MPI variables */
+ /* world communication size and rank */
+ int mpi_world_size;
+ int mpi_world_rank;
+ /* private communicator size and rank */
+ int mpi_size;
+ int mpi_rank;
+ int mrc; /* mpi error code */
+ /* steps to verify and have been verified */
+ int steps = 0;
+ int steps_done = 0;
+
+ /* test bed related variables */
+ const char *fcn_name = "rr_obj_hdr_flush_confusion_writer";
+ const hbool_t verbose = FALSE;
+#if 0
+ const H5Ptest_param_t *pt;
+#endif
+ char *filename;
+
+ /*
+ * setup test bed related variables:
+ */
+
+#if 0
+ pt = (const H5Ptest_param_t *)GetTestParameters();
+#endif
+ /* filename = pt->name; */ filename = PARATESTFILE;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_world_rank);
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_world_size);
+ MPI_Comm_rank(comm, &mpi_rank);
+ MPI_Comm_size(comm, &mpi_size);
+
+ /*
+ * Set up file access property list with parallel I/O access
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Setting up property list.\n", mpi_rank, fcn_name);
+
+ fapl_id = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY((fapl_id != -1), "H5Pcreate(H5P_FILE_ACCESS) failed");
+
+ err = H5Pset_fapl_mpio(fapl_id, comm, MPI_INFO_NULL);
+ VRFY((err >= 0), "H5Pset_fapl_mpio() failed");
+
+ /*
+ * Create a new file collectively and release property list identifier.
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Creating new file \"%s\".\n", mpi_rank, fcn_name, filename);
+
+ file_id = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id);
+ VRFY((file_id >= 0), "H5Fcreate() failed");
+
+ err = H5Pclose(fapl_id);
+ VRFY((err >= 0), "H5Pclose(fapl_id) failed");
+
+ /*
+ * Step 1: create the data sets and write data.
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Creating the datasets.\n", mpi_rank, fcn_name);
+
+ disk_size[0] = (hsize_t)(LOCAL_DATA_SIZE * mpi_size);
+ mem_size[0] = (hsize_t)(LOCAL_DATA_SIZE);
+
+ for (i = 0; i < NUM_DATA_SETS; i++) {
+
+ disk_space[i] = H5Screate_simple(1, disk_size, NULL);
+ VRFY((disk_space[i] >= 0), "H5Screate_simple(1) failed.\n");
+
+ dataset[i] = H5Dcreate2(file_id, dataset_name[i], H5T_NATIVE_DOUBLE, disk_space[i], H5P_DEFAULT,
+ H5P_DEFAULT, H5P_DEFAULT);
+
+ VRFY((dataset[i] >= 0), "H5Dcreate(1) failed.\n");
+ }
+
+ /*
+ * setup data transfer property list
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Setting up dxpl.\n", mpi_rank, fcn_name);
+
+ dxpl_id = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((dxpl_id != -1), "H5Pcreate(H5P_DATASET_XFER) failed.\n");
+
+ err = H5Pset_dxpl_mpio(dxpl_id, H5FD_MPIO_COLLECTIVE);
+ VRFY((err >= 0), "H5Pset_dxpl_mpio(dxpl_id, H5FD_MPIO_COLLECTIVE) failed.\n");
+
+ /*
+ * write data to the data sets
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Writing datasets.\n", mpi_rank, fcn_name);
+
+ disk_count[0] = (hsize_t)(LOCAL_DATA_SIZE);
+ disk_start[0] = (hsize_t)(LOCAL_DATA_SIZE * mpi_rank);
+ mem_count[0] = (hsize_t)(LOCAL_DATA_SIZE);
+ mem_start[0] = (hsize_t)(0);
+
+ for (j = 0; j < LOCAL_DATA_SIZE; j++) {
+ data[j] = (double)(mpi_rank + 1);
+ }
+
+ for (i = 0; i < NUM_DATA_SETS; i++) {
+ err = H5Sselect_hyperslab(disk_space[i], H5S_SELECT_SET, disk_start, NULL, disk_count, NULL);
+ VRFY((err >= 0), "H5Sselect_hyperslab(1) failed.\n");
+ mem_space[i] = H5Screate_simple(1, mem_size, NULL);
+ VRFY((mem_space[i] >= 0), "H5Screate_simple(2) failed.\n");
+ err = H5Sselect_hyperslab(mem_space[i], H5S_SELECT_SET, mem_start, NULL, mem_count, NULL);
+ VRFY((err >= 0), "H5Sselect_hyperslab(2) failed.\n");
+ err = H5Dwrite(dataset[i], H5T_NATIVE_DOUBLE, mem_space[i], disk_space[i], dxpl_id, data);
+ VRFY((err >= 0), "H5Dwrite(1) failed.\n");
+ for (j = 0; j < LOCAL_DATA_SIZE; j++)
+ data[j] *= 10.0;
+ }
+
+ /*
+ * close the data spaces
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: closing dataspaces.\n", mpi_rank, fcn_name);
+
+ for (i = 0; i < NUM_DATA_SETS; i++) {
+ err = H5Sclose(disk_space[i]);
+ VRFY((err >= 0), "H5Sclose(disk_space[i]) failed.\n");
+ err = H5Sclose(mem_space[i]);
+ VRFY((err >= 0), "H5Sclose(mem_space[i]) failed.\n");
+ }
+
+ /* End of Step 1: create the data sets and write data. */
+
+ /*
+ * flush the metadata cache
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: flushing metadata cache.\n", mpi_rank, fcn_name);
+ err = H5Fflush(file_id, H5F_SCOPE_GLOBAL);
+ VRFY((err >= 0), "H5Fflush(1) failed.\n");
+
+ /* Tell the reader to check the file up to steps. */
+ steps++;
+ Reader_check(mrc, steps, steps_done);
+ VRFY((MPI_SUCCESS == mrc), "Reader_check failed");
+
+ /*
+ * Step 2: write attributes to each dataset
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: writing attributes.\n", mpi_rank, fcn_name);
+
+ att_size[0] = (hsize_t)(LOCAL_DATA_SIZE);
+ for (j = 0; j < LOCAL_DATA_SIZE; j++) {
+ att[j] = (double)(j + 1);
+ }
+
+ for (i = 0; i < NUM_DATA_SETS; i++) {
+ att_space[i] = H5Screate_simple(1, att_size, NULL);
+ VRFY((att_space[i] >= 0), "H5Screate_simple(3) failed.\n");
+ att_id[i] =
+ H5Acreate2(dataset[i], att_name[i], H5T_NATIVE_DOUBLE, att_space[i], H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((att_id[i] >= 0), "H5Acreate(1) failed.\n");
+ err = H5Awrite(att_id[i], H5T_NATIVE_DOUBLE, att);
+ VRFY((err >= 0), "H5Awrite(1) failed.\n");
+ for (j = 0; j < LOCAL_DATA_SIZE; j++) {
+ att[j] /= 10.0;
+ }
+ }
+
+ /*
+ * close attribute IDs and spaces
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: closing attr ids and spaces .\n", mpi_rank, fcn_name);
+
+ for (i = 0; i < NUM_DATA_SETS; i++) {
+ err = H5Sclose(att_space[i]);
+ VRFY((err >= 0), "H5Sclose(att_space[i]) failed.\n");
+ err = H5Aclose(att_id[i]);
+ VRFY((err >= 0), "H5Aclose(att_id[i]) failed.\n");
+ }
+
+ /* End of Step 2: write attributes to each dataset */
+
+ /*
+ * flush the metadata cache again
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: flushing metadata cache.\n", mpi_rank, fcn_name);
+ err = H5Fflush(file_id, H5F_SCOPE_GLOBAL);
+ VRFY((err >= 0), "H5Fflush(2) failed.\n");
+
+ /* Tell the reader to check the file up to steps. */
+ steps++;
+ Reader_check(mrc, steps, steps_done);
+ VRFY((MPI_SUCCESS == mrc), "Reader_check failed");
+
+ /*
+ * Step 3: write large attributes to each dataset
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: writing large attributes.\n", mpi_rank, fcn_name);
+
+ lg_att_size[0] = (hsize_t)(LARGE_ATTR_SIZE);
+
+ for (j = 0; j < LARGE_ATTR_SIZE; j++) {
+ lg_att[j] = (double)(j + 1);
+ }
+
+ for (i = 0; i < NUM_DATA_SETS; i++) {
+ lg_att_space[i] = H5Screate_simple(1, lg_att_size, NULL);
+ VRFY((lg_att_space[i] >= 0), "H5Screate_simple(4) failed.\n");
+ lg_att_id[i] = H5Acreate2(dataset[i], lg_att_name[i], H5T_NATIVE_DOUBLE, lg_att_space[i], H5P_DEFAULT,
+ H5P_DEFAULT);
+ VRFY((lg_att_id[i] >= 0), "H5Acreate(2) failed.\n");
+ err = H5Awrite(lg_att_id[i], H5T_NATIVE_DOUBLE, lg_att);
+ VRFY((err >= 0), "H5Awrite(2) failed.\n");
+ for (j = 0; j < LARGE_ATTR_SIZE; j++) {
+ lg_att[j] /= 10.0;
+ }
+ }
+
+ /* Step 3: write large attributes to each dataset */
+
+ /*
+ * flush the metadata cache yet again to clean the object headers.
+ *
+ * This is an attempt to create a situation where we have dirty
+ * object header continuation chunks, but clean object headers
+ * to verify a speculative bug fix -- it doesn't seem to work,
+ * but I will leave the code in anyway, as the object header
+ * code is going to change a lot in the near future.
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: flushing metadata cache.\n", mpi_rank, fcn_name);
+ err = H5Fflush(file_id, H5F_SCOPE_GLOBAL);
+ VRFY((err >= 0), "H5Fflush(3) failed.\n");
+
+ /* Tell the reader to check the file up to steps. */
+ steps++;
+ Reader_check(mrc, steps, steps_done);
+ VRFY((MPI_SUCCESS == mrc), "Reader_check failed");
+
+ /*
+ * Step 4: write different large attributes to each dataset
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: writing different large attributes.\n", mpi_rank, fcn_name);
+
+ for (j = 0; j < LARGE_ATTR_SIZE; j++) {
+ lg_att[j] = (double)(j + 2);
+ }
+
+ for (i = 0; i < NUM_DATA_SETS; i++) {
+ err = H5Awrite(lg_att_id[i], H5T_NATIVE_DOUBLE, lg_att);
+ VRFY((err >= 0), "H5Awrite(2) failed.\n");
+ for (j = 0; j < LARGE_ATTR_SIZE; j++) {
+ lg_att[j] /= 10.0;
+ }
+ }
+
+ /* End of Step 4: write different large attributes to each dataset */
+
+ /*
+ * flush the metadata cache again
+ */
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: flushing metadata cache.\n", mpi_rank, fcn_name);
+ err = H5Fflush(file_id, H5F_SCOPE_GLOBAL);
+ VRFY((err >= 0), "H5Fflush(3) failed.\n");
+
+ /* Tell the reader to check the file up to steps. */
+ steps++;
+ Reader_check(mrc, steps, steps_done);
+ VRFY((MPI_SUCCESS == mrc), "Reader_check failed");
+
+ /* Step 5: Close all objects and the file */
+
+ /*
+ * close large attribute IDs and spaces
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: closing large attr ids and spaces .\n", mpi_rank, fcn_name);
+
+ for (i = 0; i < NUM_DATA_SETS; i++) {
+
+ err = H5Sclose(lg_att_space[i]);
+ VRFY((err >= 0), "H5Sclose(lg_att_space[i]) failed.\n");
+ err = H5Aclose(lg_att_id[i]);
+ VRFY((err >= 0), "H5Aclose(lg_att_id[i]) failed.\n");
+ }
+
+ /*
+ * close the data sets
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: closing datasets .\n", mpi_rank, fcn_name);
+
+ for (i = 0; i < NUM_DATA_SETS; i++) {
+ err = H5Dclose(dataset[i]);
+ VRFY((err >= 0), "H5Dclose(dataset[i])1 failed.\n");
+ }
+
+ /*
+ * close the data transfer property list.
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: closing dxpl .\n", mpi_rank, fcn_name);
+
+ err = H5Pclose(dxpl_id);
+ VRFY((err >= 0), "H5Pclose(dxpl_id) failed.\n");
+
+ /*
+ * Close file.
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: closing file.\n", mpi_rank, fcn_name);
+
+ err = H5Fclose(file_id);
+ VRFY((err >= 0), "H5Fclose(1) failed");
+
+ /* End of Step 5: Close all objects and the file */
+ /* Tell the reader to check the file up to steps. */
+ steps++;
+ Reader_check(mrc, steps, steps_done);
+ VRFY((MPI_SUCCESS == mrc), "Reader_check failed");
+
+ /* All done. Inform reader to end. */
+ steps = 0;
+ Reader_check(mrc, steps, steps_done);
+ VRFY((MPI_SUCCESS == mrc), "Reader_check failed");
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Done.\n", mpi_rank, fcn_name);
+
+ return;
+
+} /* rr_obj_hdr_flush_confusion_writer() */
+
+void
+rr_obj_hdr_flush_confusion_reader(MPI_Comm comm)
+{
+ int i;
+ int j;
+ hid_t file_id = -1;
+ hid_t fapl_id = -1;
+ hid_t dxpl_id = -1;
+ hid_t lg_att_id[NUM_DATA_SETS];
+ hid_t lg_att_type[NUM_DATA_SETS];
+ hid_t disk_space[NUM_DATA_SETS];
+ hid_t mem_space[NUM_DATA_SETS];
+ hid_t dataset[NUM_DATA_SETS];
+ hsize_t disk_count[1];
+ hsize_t disk_start[1];
+ hsize_t mem_count[1];
+ hsize_t mem_size[1];
+ hsize_t mem_start[1];
+ herr_t err;
+ htri_t tri_err;
+ double data[LOCAL_DATA_SIZE];
+ double data_read[LOCAL_DATA_SIZE];
+ double att[LOCAL_DATA_SIZE];
+ double att_read[LOCAL_DATA_SIZE];
+ double lg_att[LARGE_ATTR_SIZE];
+ double lg_att_read[LARGE_ATTR_SIZE];
+
+ /* MPI variables */
+ /* world communication size and rank */
+ int mpi_world_size;
+ int mpi_world_rank;
+ /* private communicator size and rank */
+ int mpi_size;
+ int mpi_rank;
+ int mrc; /* mpi error code */
+ int steps = -1; /* How far (steps) to verify the file */
+ int steps_done = -1; /* How far (steps) have been verified */
+
+ /* test bed related variables */
+ const char *fcn_name = "rr_obj_hdr_flush_confusion_reader";
+ const hbool_t verbose = FALSE;
+#if 0
+ const H5Ptest_param_t *pt;
+#endif
+ char *filename;
+
+ /*
+ * setup test bed related variables:
+ */
+
+#if 0
+ pt = (const H5Ptest_param_t *)GetTestParameters();
+#endif
+ /* filename = pt->name; */ filename = PARATESTFILE;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_world_rank);
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_world_size);
+ MPI_Comm_rank(comm, &mpi_rank);
+ MPI_Comm_size(comm, &mpi_size);
+
+ /* Repeatedly re-open the file and verify its contents until it is */
+ /* told to end (when steps=0). */
+ while (steps_done != 0) {
+ Reader_wait(mrc, steps);
+ VRFY((mrc >= 0), "Reader_wait failed");
+ steps_done = 0;
+
+ if (steps > 0) {
+ /*
+ * Set up file access property list with parallel I/O access
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Setting up property list.\n", mpi_rank, fcn_name);
+
+ fapl_id = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY((fapl_id != -1), "H5Pcreate(H5P_FILE_ACCESS) failed");
+ err = H5Pset_fapl_mpio(fapl_id, comm, MPI_INFO_NULL);
+ VRFY((err >= 0), "H5Pset_fapl_mpio() failed");
+
+ /*
+ * Create a new file collectively and release property list identifier.
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Re-open file \"%s\".\n", mpi_rank, fcn_name, filename);
+
+ file_id = H5Fopen(filename, H5F_ACC_RDONLY, fapl_id);
+ VRFY((file_id >= 0), "H5Fopen() failed");
+ err = H5Pclose(fapl_id);
+ VRFY((err >= 0), "H5Pclose(fapl_id) failed");
+
+#if 1
+ if (steps >= 1) {
+ /*=====================================================*
+ * Step 1: open the data sets and read data.
+ *=====================================================*/
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: opening the datasets.\n", mpi_rank, fcn_name);
+
+ for (i = 0; i < NUM_DATA_SETS; i++) {
+ dataset[i] = -1;
+ }
+
+ for (i = 0; i < NUM_DATA_SETS; i++) {
+ dataset[i] = H5Dopen2(file_id, dataset_name[i], H5P_DEFAULT);
+ VRFY((dataset[i] >= 0), "H5Dopen(1) failed.\n");
+ disk_space[i] = H5Dget_space(dataset[i]);
+ VRFY((disk_space[i] >= 0), "H5Dget_space failed.\n");
+ }
+
+ /*
+ * setup data transfer property list
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Setting up dxpl.\n", mpi_rank, fcn_name);
+
+ dxpl_id = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((dxpl_id != -1), "H5Pcreate(H5P_DATASET_XFER) failed.\n");
+ err = H5Pset_dxpl_mpio(dxpl_id, H5FD_MPIO_COLLECTIVE);
+ VRFY((err >= 0), "H5Pset_dxpl_mpio(dxpl_id, H5FD_MPIO_COLLECTIVE) failed.\n");
+
+ /*
+ * read data from the data sets
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Reading datasets.\n", mpi_rank, fcn_name);
+
+ disk_count[0] = (hsize_t)(LOCAL_DATA_SIZE);
+ disk_start[0] = (hsize_t)(LOCAL_DATA_SIZE * mpi_rank);
+
+ mem_size[0] = (hsize_t)(LOCAL_DATA_SIZE);
+
+ mem_count[0] = (hsize_t)(LOCAL_DATA_SIZE);
+ mem_start[0] = (hsize_t)(0);
+
+ /* set up expected data for verification */
+ for (j = 0; j < LOCAL_DATA_SIZE; j++) {
+ data[j] = (double)(mpi_rank + 1);
+ }
+
+ for (i = 0; i < NUM_DATA_SETS; i++) {
+ err = H5Sselect_hyperslab(disk_space[i], H5S_SELECT_SET, disk_start, NULL, disk_count,
+ NULL);
+ VRFY((err >= 0), "H5Sselect_hyperslab(1) failed.\n");
+ mem_space[i] = H5Screate_simple(1, mem_size, NULL);
+ VRFY((mem_space[i] >= 0), "H5Screate_simple(2) failed.\n");
+ err = H5Sselect_hyperslab(mem_space[i], H5S_SELECT_SET, mem_start, NULL, mem_count, NULL);
+ VRFY((err >= 0), "H5Sselect_hyperslab(2) failed.\n");
+ err = H5Dread(dataset[i], H5T_NATIVE_DOUBLE, mem_space[i], disk_space[i], dxpl_id,
+ data_read);
+ VRFY((err >= 0), "H5Dread(1) failed.\n");
+
+ /* compare read data with expected data */
+ for (j = 0; j < LOCAL_DATA_SIZE; j++)
+ if (!H5_DBL_ABS_EQUAL(data_read[j], data[j])) {
+ HDfprintf(stdout,
+ "%0d:%s: Reading datasets value failed in "
+ "Dataset %d, at position %d: expect %f, got %f.\n",
+ mpi_rank, fcn_name, i, j, data[j], data_read[j]);
+ nerrors++;
+ }
+ for (j = 0; j < LOCAL_DATA_SIZE; j++)
+ data[j] *= 10.0;
+ }
+
+ /*
+ * close the data spaces
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: closing dataspaces.\n", mpi_rank, fcn_name);
+
+ for (i = 0; i < NUM_DATA_SETS; i++) {
+ err = H5Sclose(disk_space[i]);
+ VRFY((err >= 0), "H5Sclose(disk_space[i]) failed.\n");
+ err = H5Sclose(mem_space[i]);
+ VRFY((err >= 0), "H5Sclose(mem_space[i]) failed.\n");
+ }
+ steps_done++;
+ }
+ /* End of Step 1: open the data sets and read data. */
+#endif
+
+#if 1
+ /*=====================================================*
+ * Step 2: reading attributes from each dataset
+ *=====================================================*/
+
+ if (steps >= 2) {
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: reading attributes.\n", mpi_rank, fcn_name);
+
+ for (j = 0; j < LOCAL_DATA_SIZE; j++) {
+ att[j] = (double)(j + 1);
+ }
+
+ for (i = 0; i < NUM_DATA_SETS; i++) {
+ hid_t att_id, att_type;
+
+ att_id = H5Aopen(dataset[i], att_name[i], H5P_DEFAULT);
+ VRFY((att_id >= 0), "H5Aopen failed.\n");
+ att_type = H5Aget_type(att_id);
+ VRFY((att_type >= 0), "H5Aget_type failed.\n");
+ tri_err = H5Tequal(att_type, H5T_NATIVE_DOUBLE);
+ VRFY((tri_err >= 0), "H5Tequal failed.\n");
+ if (tri_err == 0) {
+ HDfprintf(stdout, "%0d:%s: Mismatched Attribute type of Dataset %d.\n", mpi_rank,
+ fcn_name, i);
+ nerrors++;
+ }
+ else {
+ /* should verify attribute size before H5Aread */
+ err = H5Aread(att_id, H5T_NATIVE_DOUBLE, att_read);
+ VRFY((err >= 0), "H5Aread failed.\n");
+ /* compare read attribute data with expected data */
+ for (j = 0; j < LOCAL_DATA_SIZE; j++)
+ if (!H5_DBL_ABS_EQUAL(att_read[j], att[j])) {
+ HDfprintf(stdout,
+ "%0d:%s: Mismatched attribute data read in Dataset %d, at position "
+ "%d: expect %f, got %f.\n",
+ mpi_rank, fcn_name, i, j, att[j], att_read[j]);
+ nerrors++;
+ }
+ for (j = 0; j < LOCAL_DATA_SIZE; j++) {
+ att[j] /= 10.0;
+ }
+ }
+ err = H5Aclose(att_id);
+ VRFY((err >= 0), "H5Aclose failed.\n");
+ }
+ steps_done++;
+ }
+ /* End of Step 2: reading attributes from each dataset */
+#endif
+
+#if 1
+ /*=====================================================*
+ * Step 3 or 4: read large attributes from each dataset.
+ * Step 4 has different attribute value from step 3.
+ *=====================================================*/
+
+ if (steps >= 3) {
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: reading large attributes.\n", mpi_rank, fcn_name);
+
+ for (j = 0; j < LARGE_ATTR_SIZE; j++) {
+ lg_att[j] = (steps == 3) ? (double)(j + 1) : (double)(j + 2);
+ }
+
+ for (i = 0; i < NUM_DATA_SETS; i++) {
+ lg_att_id[i] = H5Aopen(dataset[i], lg_att_name[i], H5P_DEFAULT);
+ VRFY((lg_att_id[i] >= 0), "H5Aopen(2) failed.\n");
+ lg_att_type[i] = H5Aget_type(lg_att_id[i]);
+ VRFY((err >= 0), "H5Aget_type failed.\n");
+ tri_err = H5Tequal(lg_att_type[i], H5T_NATIVE_DOUBLE);
+ VRFY((tri_err >= 0), "H5Tequal failed.\n");
+ if (tri_err == 0) {
+ HDfprintf(stdout, "%0d:%s: Mismatched Large attribute type of Dataset %d.\n",
+ mpi_rank, fcn_name, i);
+ nerrors++;
+ }
+ else {
+ /* should verify large attribute size before H5Aread */
+ err = H5Aread(lg_att_id[i], H5T_NATIVE_DOUBLE, lg_att_read);
+ VRFY((err >= 0), "H5Aread failed.\n");
+ /* compare read attribute data with expected data */
+ for (j = 0; j < LARGE_ATTR_SIZE; j++)
+ if (!H5_DBL_ABS_EQUAL(lg_att_read[j], lg_att[j])) {
+ HDfprintf(stdout,
+ "%0d:%s: Mismatched large attribute data read in Dataset %d, at "
+ "position %d: expect %f, got %f.\n",
+ mpi_rank, fcn_name, i, j, lg_att[j], lg_att_read[j]);
+ nerrors++;
+ }
+ for (j = 0; j < LARGE_ATTR_SIZE; j++) {
+
+ lg_att[j] /= 10.0;
+ }
+ }
+ err = H5Tclose(lg_att_type[i]);
+ VRFY((err >= 0), "H5Tclose failed.\n");
+ err = H5Aclose(lg_att_id[i]);
+ VRFY((err >= 0), "H5Aclose failed.\n");
+ }
+ /* Both step 3 and 4 use this same read checking code. */
+ steps_done = (steps == 3) ? 3 : 4;
+ }
+
+ /* End of Step 3 or 4: read large attributes from each dataset */
+#endif
+
+ /*=====================================================*
+ * Step 5: read all objects from the file
+ *=====================================================*/
+ if (steps >= 5) {
+ /* nothing extra to verify. The file is closed normally. */
+ /* Just increment steps_done */
+ steps_done++;
+ }
+
+ /*
+ * Close the data sets
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: closing datasets again.\n", mpi_rank, fcn_name);
+
+ for (i = 0; i < NUM_DATA_SETS; i++) {
+ if (dataset[i] >= 0) {
+ err = H5Dclose(dataset[i]);
+ VRFY((err >= 0), "H5Dclose(dataset[i])1 failed.\n");
+ }
+ }
+
+ /*
+ * close the data transfer property list.
+ */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: closing dxpl .\n", mpi_rank, fcn_name);
+
+ err = H5Pclose(dxpl_id);
+ VRFY((err >= 0), "H5Pclose(dxpl_id) failed.\n");
+
+ /*
+ * Close the file
+ */
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: closing file again.\n", mpi_rank, fcn_name);
+ err = H5Fclose(file_id);
+ VRFY((err >= 0), "H5Fclose(1) failed");
+
+ } /* else if (steps_done==0) */
+ Reader_result(mrc, steps_done);
+ } /* end while(1) */
+
+ if (verbose)
+ HDfprintf(stdout, "%0d:%s: Done.\n", mpi_rank, fcn_name);
+
+ return;
+} /* rr_obj_hdr_flush_confusion_reader() */
+
+#undef NUM_DATA_SETS
+#undef LOCAL_DATA_SIZE
+#undef LARGE_ATTR_SIZE
+#undef Reader_check
+#undef Reader_wait
+#undef Reader_result
+#undef Writer_Root
+#undef Reader_Root
+
+/*
+ * Test creating a chunked dataset in parallel in a file with an alignment set
+ * and an alignment threshold large enough to avoid aligning the chunks but
+ * small enough that the raw data aggregator will be aligned if it is treated as
+ * an object that must be aligned by the library
+ */
+#define CHUNK_SIZE 72
+#define NCHUNKS 32
+#define AGGR_SIZE 2048
+#define EXTRA_ALIGN 100
+
+void
+chunk_align_bug_1(void)
+{
+ int mpi_rank;
+ hid_t file_id, dset_id, fapl_id, dcpl_id, space_id;
+ hsize_t dims = CHUNK_SIZE * NCHUNKS, cdims = CHUNK_SIZE;
+#if 0
+ h5_stat_size_t file_size;
+ hsize_t align;
+#endif
+ herr_t ret;
+ const char *filename;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file or dataset aren't supported with this connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ filename = (const char *)PARATESTFILE /* GetTestParameters() */;
+
+ /* Create file without alignment */
+ fapl_id = create_faccess_plist(MPI_COMM_WORLD, MPI_INFO_NULL, facc_type);
+ VRFY((fapl_id >= 0), "create_faccess_plist succeeded");
+ file_id = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id);
+ VRFY((file_id >= 0), "H5Fcreate succeeded");
+
+ /* Close file */
+ ret = H5Fclose(file_id);
+ VRFY((ret >= 0), "H5Fclose succeeded");
+#if 0
+ /* Get file size */
+ file_size = h5_get_file_size(filename, fapl_id);
+ VRFY((file_size >= 0), "h5_get_file_size succeeded");
+
+ /* Calculate alignment value, set to allow a chunk to squeak in between the
+ * original EOF and the aligned location of the aggregator. Add some space
+ * for the dataset metadata */
+ align = (hsize_t)file_size + CHUNK_SIZE + EXTRA_ALIGN;
+#endif
+
+ /* Set aggregator size and alignment, disable metadata aggregator */
+ HDassert(AGGR_SIZE > CHUNK_SIZE);
+ ret = H5Pset_small_data_block_size(fapl_id, AGGR_SIZE);
+ VRFY((ret >= 0), "H5Pset_small_data_block_size succeeded");
+ ret = H5Pset_meta_block_size(fapl_id, 0);
+ VRFY((ret >= 0), "H5Pset_meta_block_size succeeded");
+#if 0
+ ret = H5Pset_alignment(fapl_id, CHUNK_SIZE + 1, align);
+ VRFY((ret >= 0), "H5Pset_small_data_block_size succeeded");
+#endif
+
+ /* Reopen file with new settings */
+ file_id = H5Fopen(filename, H5F_ACC_RDWR, fapl_id);
+ VRFY((file_id >= 0), "H5Fopen succeeded");
+
+ /* Create dataset */
+ space_id = H5Screate_simple(1, &dims, NULL);
+ VRFY((space_id >= 0), "H5Screate_simple succeeded");
+ dcpl_id = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY((dcpl_id >= 0), "H5Pcreate succeeded");
+ ret = H5Pset_chunk(dcpl_id, 1, &cdims);
+ VRFY((ret >= 0), "H5Pset_chunk succeeded");
+ dset_id = H5Dcreate2(file_id, "dset", H5T_NATIVE_CHAR, space_id, H5P_DEFAULT, dcpl_id, H5P_DEFAULT);
+ VRFY((dset_id >= 0), "H5Dcreate2 succeeded");
+
+ /* Close ids */
+ ret = H5Dclose(dset_id);
+ VRFY((dset_id >= 0), "H5Dclose succeeded");
+ ret = H5Sclose(space_id);
+ VRFY((space_id >= 0), "H5Sclose succeeded");
+ ret = H5Pclose(dcpl_id);
+ VRFY((dcpl_id >= 0), "H5Pclose succeeded");
+ ret = H5Pclose(fapl_id);
+ VRFY((fapl_id >= 0), "H5Pclose succeeded");
+
+ /* Close file */
+ ret = H5Fclose(file_id);
+ VRFY((ret >= 0), "H5Fclose succeeded");
+
+ return;
+} /* end chunk_align_bug_1() */
+
+/*=============================================================================
+ * End of t_mdset.c
+ *===========================================================================*/
diff --git a/testpar/API/t_ph5basic.c b/testpar/API/t_ph5basic.c
new file mode 100644
index 0000000..1639aff
--- /dev/null
+++ b/testpar/API/t_ph5basic.c
@@ -0,0 +1,192 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * Test parallel HDF5 basic components
+ */
+
+#include "hdf5.h"
+#include "testphdf5.h"
+
+/*-------------------------------------------------------------------------
+ * Function: test_fapl_mpio_dup
+ *
+ * Purpose: Test if fapl_mpio property list keeps a duplicate of the
+ * communicator and INFO objects given when set; and returns
+ * duplicates of its components when H5Pget_fapl_mpio is called.
+ *
+ * Return: Success: None
+ * Failure: Abort
+ *
+ * Programmer: Albert Cheng
+ * January 9, 2003
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+test_fapl_mpio_dup(void)
+{
+ int mpi_size, mpi_rank;
+ MPI_Comm comm, comm_tmp;
+ int mpi_size_old, mpi_rank_old;
+ int mpi_size_tmp, mpi_rank_tmp;
+ MPI_Info info = MPI_INFO_NULL;
+ MPI_Info info_tmp = MPI_INFO_NULL;
+ int mrc; /* MPI return value */
+ hid_t acc_pl; /* File access properties */
+ herr_t ret; /* HDF5 return value */
+ int nkeys, nkeys_tmp;
+
+ if (VERBOSE_MED)
+ HDprintf("Verify fapl_mpio duplicates communicator and INFO objects\n");
+
+ /* set up MPI parameters */
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+ if (VERBOSE_MED)
+ HDprintf("rank/size of MPI_COMM_WORLD are %d/%d\n", mpi_rank, mpi_size);
+
+ /* Create a new communicator that has the same processes as MPI_COMM_WORLD.
+ * Use MPI_Comm_split because it is simpler than MPI_Comm_create
+ */
+ mrc = MPI_Comm_split(MPI_COMM_WORLD, 0, 0, &comm);
+ VRFY((mrc == MPI_SUCCESS), "MPI_Comm_split");
+ MPI_Comm_size(comm, &mpi_size_old);
+ MPI_Comm_rank(comm, &mpi_rank_old);
+ if (VERBOSE_MED)
+ HDprintf("rank/size of comm are %d/%d\n", mpi_rank_old, mpi_size_old);
+
+ /* create a new INFO object with some trivial information. */
+ mrc = MPI_Info_create(&info);
+ VRFY((mrc == MPI_SUCCESS), "MPI_Info_create");
+ mrc = MPI_Info_set(info, "hdf_info_name", "XYZ");
+ VRFY((mrc == MPI_SUCCESS), "MPI_Info_set");
+ if (MPI_INFO_NULL != info) {
+ mrc = MPI_Info_get_nkeys(info, &nkeys);
+ VRFY((mrc == MPI_SUCCESS), "MPI_Info_get_nkeys");
+ }
+#if 0
+ if (VERBOSE_MED)
+ h5_dump_info_object(info);
+#endif
+
+ acc_pl = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY((acc_pl >= 0), "H5P_FILE_ACCESS");
+
+ ret = H5Pset_fapl_mpio(acc_pl, comm, info);
+ VRFY((ret >= 0), "");
+
+ /* Case 1:
+ * Free the created communicator and INFO object.
+ * Check if the access property list is still valid and can return
+ * valid communicator and INFO object.
+ */
+ mrc = MPI_Comm_free(&comm);
+ VRFY((mrc == MPI_SUCCESS), "MPI_Comm_free");
+ if (MPI_INFO_NULL != info) {
+ mrc = MPI_Info_free(&info);
+ VRFY((mrc == MPI_SUCCESS), "MPI_Info_free");
+ }
+
+ ret = H5Pget_fapl_mpio(acc_pl, &comm_tmp, &info_tmp);
+ VRFY((ret >= 0), "H5Pget_fapl_mpio");
+ MPI_Comm_size(comm_tmp, &mpi_size_tmp);
+ MPI_Comm_rank(comm_tmp, &mpi_rank_tmp);
+ if (VERBOSE_MED)
+ HDprintf("After H5Pget_fapl_mpio: rank/size of comm are %d/%d\n", mpi_rank_tmp, mpi_size_tmp);
+ VRFY((mpi_size_tmp == mpi_size), "MPI_Comm_size");
+ VRFY((mpi_rank_tmp == mpi_rank), "MPI_Comm_rank");
+ if (MPI_INFO_NULL != info_tmp) {
+ mrc = MPI_Info_get_nkeys(info_tmp, &nkeys_tmp);
+ VRFY((mrc == MPI_SUCCESS), "MPI_Info_get_nkeys");
+ VRFY((nkeys_tmp == nkeys), "new and old nkeys equal");
+ }
+#if 0
+ if (VERBOSE_MED)
+ h5_dump_info_object(info_tmp);
+#endif
+
+ /* Case 2:
+ * Free the retrieved communicator and INFO object.
+ * Check if the access property list is still valid and can return
+ * valid communicator and INFO object.
+ * Also verify the NULL argument option.
+ */
+ mrc = MPI_Comm_free(&comm_tmp);
+ VRFY((mrc == MPI_SUCCESS), "MPI_Comm_free");
+ if (MPI_INFO_NULL != info_tmp) {
+ mrc = MPI_Info_free(&info_tmp);
+ VRFY((mrc == MPI_SUCCESS), "MPI_Info_free");
+ }
+
+ /* check NULL argument options. */
+ ret = H5Pget_fapl_mpio(acc_pl, &comm_tmp, NULL);
+ VRFY((ret >= 0), "H5Pget_fapl_mpio Comm only");
+ mrc = MPI_Comm_free(&comm_tmp);
+ VRFY((mrc == MPI_SUCCESS), "MPI_Comm_free");
+
+ ret = H5Pget_fapl_mpio(acc_pl, NULL, &info_tmp);
+ VRFY((ret >= 0), "H5Pget_fapl_mpio Info only");
+ if (MPI_INFO_NULL != info_tmp) {
+ mrc = MPI_Info_free(&info_tmp);
+ VRFY((mrc == MPI_SUCCESS), "MPI_Info_free");
+ }
+
+ ret = H5Pget_fapl_mpio(acc_pl, NULL, NULL);
+ VRFY((ret >= 0), "H5Pget_fapl_mpio neither");
+
+ /* now get both and check validity too. */
+ /* Do not free the returned objects which are used in the next case. */
+ ret = H5Pget_fapl_mpio(acc_pl, &comm_tmp, &info_tmp);
+ VRFY((ret >= 0), "H5Pget_fapl_mpio");
+ MPI_Comm_size(comm_tmp, &mpi_size_tmp);
+ MPI_Comm_rank(comm_tmp, &mpi_rank_tmp);
+ if (VERBOSE_MED)
+ HDprintf("After second H5Pget_fapl_mpio: rank/size of comm are %d/%d\n", mpi_rank_tmp, mpi_size_tmp);
+ VRFY((mpi_size_tmp == mpi_size), "MPI_Comm_size");
+ VRFY((mpi_rank_tmp == mpi_rank), "MPI_Comm_rank");
+ if (MPI_INFO_NULL != info_tmp) {
+ mrc = MPI_Info_get_nkeys(info_tmp, &nkeys_tmp);
+ VRFY((mrc == MPI_SUCCESS), "MPI_Info_get_nkeys");
+ VRFY((nkeys_tmp == nkeys), "new and old nkeys equal");
+ }
+#if 0
+ if (VERBOSE_MED)
+ h5_dump_info_object(info_tmp);
+#endif
+
+ /* Case 3:
+ * Close the property list and verify the retrieved communicator and INFO
+ * object are still valid.
+ */
+ H5Pclose(acc_pl);
+ MPI_Comm_size(comm_tmp, &mpi_size_tmp);
+ MPI_Comm_rank(comm_tmp, &mpi_rank_tmp);
+ if (VERBOSE_MED)
+ HDprintf("After Property list closed: rank/size of comm are %d/%d\n", mpi_rank_tmp, mpi_size_tmp);
+ if (MPI_INFO_NULL != info_tmp) {
+ mrc = MPI_Info_get_nkeys(info_tmp, &nkeys_tmp);
+ VRFY((mrc == MPI_SUCCESS), "MPI_Info_get_nkeys");
+ }
+#if 0
+ if (VERBOSE_MED)
+ h5_dump_info_object(info_tmp);
+#endif
+
+ /* clean up */
+ mrc = MPI_Comm_free(&comm_tmp);
+ VRFY((mrc == MPI_SUCCESS), "MPI_Comm_free");
+ if (MPI_INFO_NULL != info_tmp) {
+ mrc = MPI_Info_free(&info_tmp);
+ VRFY((mrc == MPI_SUCCESS), "MPI_Info_free");
+ }
+} /* end test_fapl_mpio_dup() */
diff --git a/testpar/API/t_prop.c b/testpar/API/t_prop.c
new file mode 100644
index 0000000..3659501
--- /dev/null
+++ b/testpar/API/t_prop.c
@@ -0,0 +1,646 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * Parallel tests for encoding/decoding plists sent between processes
+ */
+
+#include "hdf5.h"
+#include "testphdf5.h"
+
+#if 0
+#include "H5ACprivate.h"
+#include "H5Pprivate.h"
+#endif
+
+static int
+test_encode_decode(hid_t orig_pl, int mpi_rank, int recv_proc)
+{
+ MPI_Request req[2];
+ MPI_Status status;
+ hid_t pl; /* Decoded property list */
+ size_t buf_size = 0;
+ void *sbuf = NULL;
+ herr_t ret; /* Generic return value */
+
+ if (mpi_rank == 0) {
+ int send_size = 0;
+
+ /* first call to encode returns only the size of the buffer needed */
+ ret = H5Pencode2(orig_pl, NULL, &buf_size, H5P_DEFAULT);
+ VRFY((ret >= 0), "H5Pencode succeeded");
+
+ sbuf = (uint8_t *)HDmalloc(buf_size);
+
+ ret = H5Pencode2(orig_pl, sbuf, &buf_size, H5P_DEFAULT);
+ VRFY((ret >= 0), "H5Pencode succeeded");
+
+ /* this is a temp fix to send this size_t */
+ send_size = (int)buf_size;
+
+ MPI_Isend(&send_size, 1, MPI_INT, recv_proc, 123, MPI_COMM_WORLD, &req[0]);
+ MPI_Isend(sbuf, send_size, MPI_BYTE, recv_proc, 124, MPI_COMM_WORLD, &req[1]);
+ } /* end if */
+
+ if (mpi_rank == recv_proc) {
+ int recv_size;
+ void *rbuf;
+
+ MPI_Recv(&recv_size, 1, MPI_INT, 0, 123, MPI_COMM_WORLD, &status);
+ VRFY((recv_size >= 0), "MPI_Recv succeeded");
+ buf_size = (size_t)recv_size;
+ rbuf = (uint8_t *)HDmalloc(buf_size);
+ MPI_Recv(rbuf, recv_size, MPI_BYTE, 0, 124, MPI_COMM_WORLD, &status);
+
+ pl = H5Pdecode(rbuf);
+ VRFY((pl >= 0), "H5Pdecode succeeded");
+
+ VRFY(H5Pequal(orig_pl, pl), "Property List Equal Succeeded");
+
+ ret = H5Pclose(pl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ if (NULL != rbuf)
+ HDfree(rbuf);
+ } /* end if */
+
+ if (0 == mpi_rank) {
+ /* gcc 11 complains about passing MPI_STATUSES_IGNORE as an MPI_Status
+ * array. See the discussion here:
+ *
+ * https://github.com/pmodels/mpich/issues/5687
+ */
+ /* H5_GCC_DIAG_OFF("stringop-overflow") */
+ MPI_Waitall(2, req, MPI_STATUSES_IGNORE);
+ /* H5_GCC_DIAG_ON("stringop-overflow") */
+ }
+
+ if (NULL != sbuf)
+ HDfree(sbuf);
+
+ MPI_Barrier(MPI_COMM_WORLD);
+ return 0;
+}
+
+void
+test_plist_ed(void)
+{
+ hid_t dcpl; /* dataset create prop. list */
+ hid_t dapl; /* dataset access prop. list */
+ hid_t dxpl; /* dataset transfer prop. list */
+ hid_t gcpl; /* group create prop. list */
+ hid_t lcpl; /* link create prop. list */
+ hid_t lapl; /* link access prop. list */
+ hid_t ocpypl; /* object copy prop. list */
+ hid_t ocpl; /* object create prop. list */
+ hid_t fapl; /* file access prop. list */
+ hid_t fcpl; /* file create prop. list */
+ hid_t strcpl; /* string create prop. list */
+ hid_t acpl; /* attribute create prop. list */
+
+ int mpi_size, mpi_rank, recv_proc;
+
+ hsize_t chunk_size = 16384; /* chunk size */
+ double fill = 2.7; /* Fill value */
+ size_t nslots = 521 * 2;
+ size_t nbytes = 1048576 * 10;
+ double w0 = 0.5;
+ unsigned max_compact;
+ unsigned min_dense;
+ hsize_t max_size[1]; /*data space maximum size */
+ const char *c_to_f = "x+32";
+ H5AC_cache_config_t my_cache_config = {H5AC__CURR_CACHE_CONFIG_VERSION,
+ TRUE,
+ FALSE,
+ FALSE,
+ "temp",
+ TRUE,
+ FALSE,
+ (2 * 2048 * 1024),
+ 0.3,
+ (64 * 1024 * 1024),
+ (4 * 1024 * 1024),
+ 60000,
+ H5C_incr__threshold,
+ 0.8,
+ 3.0,
+ TRUE,
+ (8 * 1024 * 1024),
+ H5C_flash_incr__add_space,
+ 2.0,
+ 0.25,
+ H5C_decr__age_out_with_threshold,
+ 0.997,
+ 0.8,
+ TRUE,
+ (3 * 1024 * 1024),
+ 3,
+ FALSE,
+ 0.2,
+ (256 * 2048),
+ 1 /* H5AC__DEFAULT_METADATA_WRITE_STRATEGY */};
+
+ herr_t ret; /* Generic return value */
+
+ if (VERBOSE_MED)
+ HDprintf("Encode/Decode DCPLs\n");
+
+ /* set up MPI parameters */
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ if (mpi_size == 1)
+ recv_proc = 0;
+ else
+ recv_proc = 1;
+
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY((dcpl >= 0), "H5Pcreate succeeded");
+
+ ret = H5Pset_chunk(dcpl, 1, &chunk_size);
+ VRFY((ret >= 0), "H5Pset_chunk succeeded");
+
+ ret = H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_LATE);
+ VRFY((ret >= 0), "H5Pset_alloc_time succeeded");
+
+ ret = H5Pset_fill_value(dcpl, H5T_NATIVE_DOUBLE, &fill);
+ VRFY((ret >= 0), "set fill-value succeeded");
+
+ max_size[0] = 100;
+ ret = H5Pset_external(dcpl, "ext1.data", (off_t)0, (hsize_t)(max_size[0] * sizeof(int) / 4));
+ VRFY((ret >= 0), "set external succeeded");
+ ret = H5Pset_external(dcpl, "ext2.data", (off_t)0, (hsize_t)(max_size[0] * sizeof(int) / 4));
+ VRFY((ret >= 0), "set external succeeded");
+ ret = H5Pset_external(dcpl, "ext3.data", (off_t)0, (hsize_t)(max_size[0] * sizeof(int) / 4));
+ VRFY((ret >= 0), "set external succeeded");
+ ret = H5Pset_external(dcpl, "ext4.data", (off_t)0, (hsize_t)(max_size[0] * sizeof(int) / 4));
+ VRFY((ret >= 0), "set external succeeded");
+
+ ret = test_encode_decode(dcpl, mpi_rank, recv_proc);
+ VRFY((ret >= 0), "test_encode_decode succeeded");
+
+ ret = H5Pclose(dcpl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ /******* ENCODE/DECODE DAPLS *****/
+ dapl = H5Pcreate(H5P_DATASET_ACCESS);
+ VRFY((dapl >= 0), "H5Pcreate succeeded");
+
+ ret = H5Pset_chunk_cache(dapl, nslots, nbytes, w0);
+ VRFY((ret >= 0), "H5Pset_chunk_cache succeeded");
+
+ ret = test_encode_decode(dapl, mpi_rank, recv_proc);
+ VRFY((ret >= 0), "test_encode_decode succeeded");
+
+ ret = H5Pclose(dapl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ /******* ENCODE/DECODE OCPLS *****/
+ ocpl = H5Pcreate(H5P_OBJECT_CREATE);
+ VRFY((ocpl >= 0), "H5Pcreate succeeded");
+
+ ret = H5Pset_attr_creation_order(ocpl, (H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED));
+ VRFY((ret >= 0), "H5Pset_attr_creation_order succeeded");
+
+ ret = H5Pset_attr_phase_change(ocpl, 110, 105);
+ VRFY((ret >= 0), "H5Pset_attr_phase_change succeeded");
+
+ ret = H5Pset_filter(ocpl, H5Z_FILTER_FLETCHER32, 0, (size_t)0, NULL);
+ VRFY((ret >= 0), "H5Pset_filter succeeded");
+
+ ret = test_encode_decode(ocpl, mpi_rank, recv_proc);
+ VRFY((ret >= 0), "test_encode_decode succeeded");
+
+ ret = H5Pclose(ocpl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ /******* ENCODE/DECODE DXPLS *****/
+ dxpl = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((dxpl >= 0), "H5Pcreate succeeded");
+
+ ret = H5Pset_btree_ratios(dxpl, 0.2, 0.6, 0.2);
+ VRFY((ret >= 0), "H5Pset_btree_ratios succeeded");
+
+ ret = H5Pset_hyper_vector_size(dxpl, 5);
+ VRFY((ret >= 0), "H5Pset_hyper_vector_size succeeded");
+
+ ret = H5Pset_dxpl_mpio(dxpl, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pset_dxpl_mpio succeeded");
+
+ ret = H5Pset_dxpl_mpio_collective_opt(dxpl, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((ret >= 0), "H5Pset_dxpl_mpio_collective_opt succeeded");
+
+ ret = H5Pset_dxpl_mpio_chunk_opt(dxpl, H5FD_MPIO_CHUNK_MULTI_IO);
+ VRFY((ret >= 0), "H5Pset_dxpl_mpio_chunk_opt succeeded");
+
+ ret = H5Pset_dxpl_mpio_chunk_opt_ratio(dxpl, 30);
+ VRFY((ret >= 0), "H5Pset_dxpl_mpio_chunk_opt_ratio succeeded");
+
+ ret = H5Pset_dxpl_mpio_chunk_opt_num(dxpl, 40);
+ VRFY((ret >= 0), "H5Pset_dxpl_mpio_chunk_opt_num succeeded");
+
+ ret = H5Pset_edc_check(dxpl, H5Z_DISABLE_EDC);
+ VRFY((ret >= 0), "H5Pset_edc_check succeeded");
+
+ ret = H5Pset_data_transform(dxpl, c_to_f);
+ VRFY((ret >= 0), "H5Pset_data_transform succeeded");
+
+ ret = test_encode_decode(dxpl, mpi_rank, recv_proc);
+ VRFY((ret >= 0), "test_encode_decode succeeded");
+
+ ret = H5Pclose(dxpl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ /******* ENCODE/DECODE GCPLS *****/
+ gcpl = H5Pcreate(H5P_GROUP_CREATE);
+ VRFY((gcpl >= 0), "H5Pcreate succeeded");
+
+ ret = H5Pset_local_heap_size_hint(gcpl, 256);
+ VRFY((ret >= 0), "H5Pset_local_heap_size_hint succeeded");
+
+ ret = H5Pset_link_phase_change(gcpl, 2, 2);
+ VRFY((ret >= 0), "H5Pset_link_phase_change succeeded");
+
+ /* Query the group creation properties */
+ ret = H5Pget_link_phase_change(gcpl, &max_compact, &min_dense);
+ VRFY((ret >= 0), "H5Pget_est_link_info succeeded");
+
+ ret = H5Pset_est_link_info(gcpl, 3, 9);
+ VRFY((ret >= 0), "H5Pset_est_link_info succeeded");
+
+ ret = H5Pset_link_creation_order(gcpl, (H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED));
+ VRFY((ret >= 0), "H5Pset_link_creation_order succeeded");
+
+ ret = test_encode_decode(gcpl, mpi_rank, recv_proc);
+ VRFY((ret >= 0), "test_encode_decode succeeded");
+
+ ret = H5Pclose(gcpl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ /******* ENCODE/DECODE LCPLS *****/
+ lcpl = H5Pcreate(H5P_LINK_CREATE);
+ VRFY((lcpl >= 0), "H5Pcreate succeeded");
+
+ ret = H5Pset_create_intermediate_group(lcpl, TRUE);
+ VRFY((ret >= 0), "H5Pset_create_intermediate_group succeeded");
+
+ ret = test_encode_decode(lcpl, mpi_rank, recv_proc);
+ VRFY((ret >= 0), "test_encode_decode succeeded");
+
+ ret = H5Pclose(lcpl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ /******* ENCODE/DECODE LAPLS *****/
+ lapl = H5Pcreate(H5P_LINK_ACCESS);
+ VRFY((lapl >= 0), "H5Pcreate succeeded");
+
+ ret = H5Pset_nlinks(lapl, (size_t)134);
+ VRFY((ret >= 0), "H5Pset_nlinks succeeded");
+
+ ret = H5Pset_elink_acc_flags(lapl, H5F_ACC_RDONLY);
+ VRFY((ret >= 0), "H5Pset_elink_acc_flags succeeded");
+
+ ret = H5Pset_elink_prefix(lapl, "/tmpasodiasod");
+ VRFY((ret >= 0), "H5Pset_nlinks succeeded");
+
+ /* Create FAPL for the elink FAPL */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY((fapl >= 0), "H5Pcreate succeeded");
+ ret = H5Pset_alignment(fapl, 2, 1024);
+ VRFY((ret >= 0), "H5Pset_alignment succeeded");
+
+ ret = H5Pset_elink_fapl(lapl, fapl);
+ VRFY((ret >= 0), "H5Pset_elink_fapl succeeded");
+
+ /* Close the elink's FAPL */
+ ret = H5Pclose(fapl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ ret = test_encode_decode(lapl, mpi_rank, recv_proc);
+ VRFY((ret >= 0), "test_encode_decode succeeded");
+
+ ret = H5Pclose(lapl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ /******* ENCODE/DECODE OCPYPLS *****/
+ ocpypl = H5Pcreate(H5P_OBJECT_COPY);
+ VRFY((ocpypl >= 0), "H5Pcreate succeeded");
+
+ ret = H5Pset_copy_object(ocpypl, H5O_COPY_EXPAND_EXT_LINK_FLAG);
+ VRFY((ret >= 0), "H5Pset_copy_object succeeded");
+
+ ret = H5Padd_merge_committed_dtype_path(ocpypl, "foo");
+ VRFY((ret >= 0), "H5Padd_merge_committed_dtype_path succeeded");
+
+ ret = H5Padd_merge_committed_dtype_path(ocpypl, "bar");
+ VRFY((ret >= 0), "H5Padd_merge_committed_dtype_path succeeded");
+
+ ret = test_encode_decode(ocpypl, mpi_rank, recv_proc);
+ VRFY((ret >= 0), "test_encode_decode succeeded");
+
+ ret = H5Pclose(ocpypl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ /******* ENCODE/DECODE FAPLS *****/
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY((fapl >= 0), "H5Pcreate succeeded");
+
+ ret = H5Pset_family_offset(fapl, 1024);
+ VRFY((ret >= 0), "H5Pset_family_offset succeeded");
+
+ ret = H5Pset_meta_block_size(fapl, 2098452);
+ VRFY((ret >= 0), "H5Pset_meta_block_size succeeded");
+
+ ret = H5Pset_sieve_buf_size(fapl, 1048576);
+ VRFY((ret >= 0), "H5Pset_sieve_buf_size succeeded");
+
+ ret = H5Pset_alignment(fapl, 2, 1024);
+ VRFY((ret >= 0), "H5Pset_alignment succeeded");
+
+ ret = H5Pset_cache(fapl, 1024, 128, 10485760, 0.3);
+ VRFY((ret >= 0), "H5Pset_cache succeeded");
+
+ ret = H5Pset_elink_file_cache_size(fapl, 10485760);
+ VRFY((ret >= 0), "H5Pset_elink_file_cache_size succeeded");
+
+ ret = H5Pset_gc_references(fapl, 1);
+ VRFY((ret >= 0), "H5Pset_gc_references succeeded");
+
+ ret = H5Pset_small_data_block_size(fapl, 2048);
+ VRFY((ret >= 0), "H5Pset_small_data_block_size succeeded");
+
+ ret = H5Pset_libver_bounds(fapl, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST);
+ VRFY((ret >= 0), "H5Pset_libver_bounds succeeded");
+
+ ret = H5Pset_fclose_degree(fapl, H5F_CLOSE_WEAK);
+ VRFY((ret >= 0), "H5Pset_fclose_degree succeeded");
+
+ ret = H5Pset_multi_type(fapl, H5FD_MEM_GHEAP);
+ VRFY((ret >= 0), "H5Pset_multi_type succeeded");
+
+ ret = H5Pset_mdc_config(fapl, &my_cache_config);
+ VRFY((ret >= 0), "H5Pset_mdc_config succeeded");
+
+ ret = test_encode_decode(fapl, mpi_rank, recv_proc);
+ VRFY((ret >= 0), "test_encode_decode succeeded");
+
+ ret = H5Pclose(fapl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ /******* ENCODE/DECODE FCPLS *****/
+ fcpl = H5Pcreate(H5P_FILE_CREATE);
+ VRFY((fcpl >= 0), "H5Pcreate succeeded");
+
+ ret = H5Pset_userblock(fcpl, 1024);
+ VRFY((ret >= 0), "H5Pset_userblock succeeded");
+
+ ret = H5Pset_istore_k(fcpl, 3);
+ VRFY((ret >= 0), "H5Pset_istore_k succeeded");
+
+ ret = H5Pset_sym_k(fcpl, 4, 5);
+ VRFY((ret >= 0), "H5Pset_sym_k succeeded");
+
+ ret = H5Pset_shared_mesg_nindexes(fcpl, 8);
+ VRFY((ret >= 0), "H5Pset_shared_mesg_nindexes succeeded");
+
+ ret = H5Pset_shared_mesg_index(fcpl, 1, H5O_SHMESG_SDSPACE_FLAG, 32);
+ VRFY((ret >= 0), "H5Pset_shared_mesg_index succeeded");
+
+ ret = H5Pset_shared_mesg_phase_change(fcpl, 60, 20);
+ VRFY((ret >= 0), "H5Pset_shared_mesg_phase_change succeeded");
+
+ ret = H5Pset_sizes(fcpl, 8, 4);
+ VRFY((ret >= 0), "H5Pset_sizes succeeded");
+
+ ret = test_encode_decode(fcpl, mpi_rank, recv_proc);
+ VRFY((ret >= 0), "test_encode_decode succeeded");
+
+ ret = H5Pclose(fcpl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ /******* ENCODE/DECODE STRCPLS *****/
+ strcpl = H5Pcreate(H5P_STRING_CREATE);
+ VRFY((strcpl >= 0), "H5Pcreate succeeded");
+
+ ret = H5Pset_char_encoding(strcpl, H5T_CSET_UTF8);
+ VRFY((ret >= 0), "H5Pset_char_encoding succeeded");
+
+ ret = test_encode_decode(strcpl, mpi_rank, recv_proc);
+ VRFY((ret >= 0), "test_encode_decode succeeded");
+
+ ret = H5Pclose(strcpl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ /******* ENCODE/DECODE ACPLS *****/
+ acpl = H5Pcreate(H5P_ATTRIBUTE_CREATE);
+ VRFY((acpl >= 0), "H5Pcreate succeeded");
+
+ ret = H5Pset_char_encoding(acpl, H5T_CSET_UTF8);
+ VRFY((ret >= 0), "H5Pset_char_encoding succeeded");
+
+ ret = test_encode_decode(acpl, mpi_rank, recv_proc);
+ VRFY((ret >= 0), "test_encode_decode succeeded");
+
+ ret = H5Pclose(acpl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+}
+
+#if 0
+void
+external_links(void)
+{
+ hid_t lcpl = H5I_INVALID_HID; /* link create prop. list */
+ hid_t lapl = H5I_INVALID_HID; /* link access prop. list */
+ hid_t fapl = H5I_INVALID_HID; /* file access prop. list */
+ hid_t gapl = H5I_INVALID_HID; /* group access prop. list */
+ hid_t fid = H5I_INVALID_HID; /* file id */
+ hid_t group = H5I_INVALID_HID; /* group id */
+ int mpi_size, mpi_rank;
+
+ MPI_Comm comm;
+ int doIO;
+ int i, mrc;
+
+ herr_t ret; /* Generic return value */
+ htri_t tri_status; /* tri return value */
+
+ const char *filename = "HDF5test.h5";
+ const char *filename_ext = "HDF5test_ext.h5";
+ const char *group_path = "/Base/Block/Step";
+ const char *link_name = "link"; /* external link */
+ char link_path[50];
+
+ if (VERBOSE_MED)
+ HDprintf("Check external links\n");
+
+ /* set up MPI parameters */
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Check MPI communicator access properties are passed to
+ linked external files */
+
+ if (mpi_rank == 0) {
+
+ lcpl = H5Pcreate(H5P_LINK_CREATE);
+ VRFY((lcpl >= 0), "H5Pcreate succeeded");
+
+ ret = H5Pset_create_intermediate_group(lcpl, 1);
+ VRFY((ret >= 0), "H5Pset_create_intermediate_group succeeded");
+
+ /* Create file to serve as target for external link.*/
+ fid = H5Fcreate(filename_ext, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((fid >= 0), "H5Fcreate succeeded");
+
+ group = H5Gcreate2(fid, group_path, lcpl, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((group >= 0), "H5Gcreate succeeded");
+
+ ret = H5Gclose(group);
+ VRFY((ret >= 0), "H5Gclose succeeded");
+
+ ret = H5Fclose(fid);
+ VRFY((ret >= 0), "H5Fclose succeeded");
+
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY((fapl >= 0), "H5Pcreate succeeded");
+
+ /* Create a new file using the file access property list. */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ VRFY((fid >= 0), "H5Fcreate succeeded");
+
+ ret = H5Pclose(fapl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ group = H5Gcreate2(fid, group_path, lcpl, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((group >= 0), "H5Gcreate succeeded");
+
+ /* Create external links to the target files. */
+ ret = H5Lcreate_external(filename_ext, group_path, group, link_name, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((ret >= 0), "H5Lcreate_external succeeded");
+
+ /* Close and release resources. */
+ ret = H5Pclose(lcpl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+ ret = H5Gclose(group);
+ VRFY((ret >= 0), "H5Gclose succeeded");
+ ret = H5Fclose(fid);
+ VRFY((ret >= 0), "H5Fclose succeeded");
+ }
+
+ MPI_Barrier(MPI_COMM_WORLD);
+
+ /*
+ * For the first case, use all the processes. For the second case
+ * use a sub-communicator to verify the correct communicator is
+ * being used for the externally linked files.
+ * There is no way to determine if MPI info is being used for the
+ * externally linked files.
+ */
+
+ for (i = 0; i < 2; i++) {
+
+ comm = MPI_COMM_WORLD;
+
+ if (i == 0)
+ doIO = 1;
+ else {
+ doIO = mpi_rank % 2;
+ mrc = MPI_Comm_split(MPI_COMM_WORLD, doIO, mpi_rank, &comm);
+ VRFY((mrc == MPI_SUCCESS), "");
+ }
+
+ if (doIO) {
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY((fapl >= 0), "H5Pcreate succeeded");
+ ret = H5Pset_fapl_mpio(fapl, comm, MPI_INFO_NULL);
+ VRFY((fapl >= 0), "H5Pset_fapl_mpio succeeded");
+
+ fid = H5Fopen(filename, H5F_ACC_RDWR, fapl);
+ VRFY((fid >= 0), "H5Fopen succeeded");
+
+ /* test opening a group that is to an external link, the external linked
+ file should inherit the source file's access properties */
+ HDsnprintf(link_path, sizeof(link_path), "%s%s%s", group_path, "/", link_name);
+ group = H5Gopen2(fid, link_path, H5P_DEFAULT);
+ VRFY((group >= 0), "H5Gopen succeeded");
+ ret = H5Gclose(group);
+ VRFY((ret >= 0), "H5Gclose succeeded");
+
+ /* test opening a group that is external link by setting group
+ creation property */
+ gapl = H5Pcreate(H5P_GROUP_ACCESS);
+ VRFY((gapl >= 0), "H5Pcreate succeeded");
+
+ ret = H5Pset_elink_fapl(gapl, fapl);
+ VRFY((ret >= 0), "H5Pset_elink_fapl succeeded");
+
+ group = H5Gopen2(fid, link_path, gapl);
+ VRFY((group >= 0), "H5Gopen succeeded");
+
+ ret = H5Gclose(group);
+ VRFY((ret >= 0), "H5Gclose succeeded");
+
+ ret = H5Pclose(gapl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ /* test link APIs */
+ lapl = H5Pcreate(H5P_LINK_ACCESS);
+ VRFY((lapl >= 0), "H5Pcreate succeeded");
+
+ ret = H5Pset_elink_fapl(lapl, fapl);
+ VRFY((ret >= 0), "H5Pset_elink_fapl succeeded");
+
+ tri_status = H5Lexists(fid, link_path, H5P_DEFAULT);
+ VRFY((tri_status == TRUE), "H5Lexists succeeded");
+
+ tri_status = H5Lexists(fid, link_path, lapl);
+ VRFY((tri_status == TRUE), "H5Lexists succeeded");
+
+ group = H5Oopen(fid, link_path, H5P_DEFAULT);
+ VRFY((group >= 0), "H5Oopen succeeded");
+
+ ret = H5Oclose(group);
+ VRFY((ret >= 0), "H5Oclose succeeded");
+
+ group = H5Oopen(fid, link_path, lapl);
+ VRFY((group >= 0), "H5Oopen succeeded");
+
+ ret = H5Oclose(group);
+ VRFY((ret >= 0), "H5Oclose succeeded");
+
+ ret = H5Pclose(lapl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ /* close the remaining resources */
+
+ ret = H5Pclose(fapl);
+ VRFY((ret >= 0), "H5Pclose succeeded");
+
+ ret = H5Fclose(fid);
+ VRFY((ret >= 0), "H5Fclose succeeded");
+ }
+
+ if (comm != MPI_COMM_WORLD) {
+ mrc = MPI_Comm_free(&comm);
+ VRFY((mrc == MPI_SUCCESS), "MPI_Comm_free succeeded");
+ }
+ }
+
+ MPI_Barrier(MPI_COMM_WORLD);
+
+ /* delete the test files */
+ if (mpi_rank == 0) {
+ MPI_File_delete(filename, MPI_INFO_NULL);
+ MPI_File_delete(filename_ext, MPI_INFO_NULL);
+ }
+}
+#endif
diff --git a/testpar/API/t_pshutdown.c b/testpar/API/t_pshutdown.c
new file mode 100644
index 0000000..48a8005
--- /dev/null
+++ b/testpar/API/t_pshutdown.c
@@ -0,0 +1,150 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * Programmer: Mohamad Chaarawi
+ * February 2015
+ *
+ * Purpose: This test creates a file and a bunch of objects in the
+ * file and then calls MPI_Finalize without closing anything. The
+ * library should exercise the attribute callback destroy attached to
+ * MPI_COMM_SELF and terminate the HDF5 library closing all open
+ * objects. The t_prestart test will read back the file and make sure
+ * all created objects are there.
+ */
+
+#include "hdf5.h"
+#include "testphdf5.h"
+
+int nerrors = 0; /* errors count */
+
+const char *FILENAME[] = {"shutdown.h5", NULL};
+
+int
+main(int argc, char **argv)
+{
+ hid_t file_id, dset_id, grp_id;
+ hid_t fapl, sid, mem_dataspace;
+ hsize_t dims[RANK], i;
+ herr_t ret;
+#if 0
+ char filename[1024];
+#endif
+ int mpi_size, mpi_rank;
+ MPI_Comm comm = MPI_COMM_WORLD;
+ MPI_Info info = MPI_INFO_NULL;
+ hsize_t start[RANK];
+ hsize_t count[RANK];
+ hsize_t stride[RANK];
+ hsize_t block[RANK];
+ DATATYPE *data_array = NULL; /* data buffer */
+
+ MPI_Init(&argc, &argv);
+ MPI_Comm_size(comm, &mpi_size);
+ MPI_Comm_rank(comm, &mpi_rank);
+
+ if (MAINPROCESS) {
+ printf("Testing %-62s", "proper shutdown of HDF5 library");
+ fflush(stdout);
+ }
+
+ /* Set up file access property list with parallel I/O access */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY((fapl >= 0), "H5Pcreate succeeded");
+
+ /* Get the capability flag of the VOL connector being used */
+ ret = H5Pget_vol_cap_flags(fapl, &vol_cap_flags_g);
+ VRFY((ret >= 0), "H5Pget_vol_cap_flags succeeded");
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_GROUP_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ HDprintf(
+ " API functions for basic file, group, or dataset aren't supported with this connector\n");
+ fflush(stdout);
+ }
+
+ MPI_Finalize();
+ return 0;
+ }
+
+ ret = H5Pset_fapl_mpio(fapl, comm, info);
+ VRFY((ret >= 0), "");
+
+#if 0
+ h5_fixname(FILENAME[0], fapl, filename, sizeof filename);
+#endif
+ file_id = H5Fcreate(FILENAME[0], H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ VRFY((file_id >= 0), "H5Fcreate succeeded");
+ grp_id = H5Gcreate2(file_id, "Group", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((grp_id >= 0), "H5Gcreate succeeded");
+
+ dims[0] = (hsize_t)ROW_FACTOR * (hsize_t)mpi_size;
+ dims[1] = (hsize_t)COL_FACTOR * (hsize_t)mpi_size;
+ sid = H5Screate_simple(RANK, dims, NULL);
+ VRFY((sid >= 0), "H5Screate_simple succeeded");
+
+ dset_id = H5Dcreate2(grp_id, "Dataset", H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ VRFY((dset_id >= 0), "H5Dcreate succeeded");
+
+ /* allocate memory for data buffer */
+ data_array = (DATATYPE *)HDmalloc(dims[0] * dims[1] * sizeof(DATATYPE));
+ VRFY((data_array != NULL), "data_array HDmalloc succeeded");
+
+ /* Each process takes a slabs of rows. */
+ block[0] = dims[0] / (hsize_t)mpi_size;
+ block[1] = dims[1];
+ stride[0] = block[0];
+ stride[1] = block[1];
+ count[0] = 1;
+ count[1] = 1;
+ start[0] = (hsize_t)mpi_rank * block[0];
+ start[1] = 0;
+
+ /* put some trivial data in the data_array */
+ for (i = 0; i < dims[0] * dims[1]; i++)
+ data_array[i] = mpi_rank + 1;
+
+ ret = H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sset_hyperslab succeeded");
+
+ /* create a memory dataspace independently */
+ mem_dataspace = H5Screate_simple(RANK, block, NULL);
+ VRFY((mem_dataspace >= 0), "");
+
+ /* write data independently */
+ ret = H5Dwrite(dset_id, H5T_NATIVE_INT, mem_dataspace, sid, H5P_DEFAULT, data_array);
+ VRFY((ret >= 0), "H5Dwrite succeeded");
+
+ /* release data buffers */
+ if (data_array)
+ HDfree(data_array);
+
+ MPI_Finalize();
+
+ /* nerrors += GetTestNumErrs(); */
+
+ if (MAINPROCESS) {
+ if (0 == nerrors) {
+ puts(" PASSED");
+ fflush(stdout);
+ }
+ else {
+ puts("*FAILED*");
+ fflush(stdout);
+ }
+ }
+
+ return (nerrors != 0);
+}
diff --git a/testpar/API/t_shapesame.c b/testpar/API/t_shapesame.c
new file mode 100644
index 0000000..340e89e
--- /dev/null
+++ b/testpar/API/t_shapesame.c
@@ -0,0 +1,4516 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ This program will test independent and collective reads and writes between
+ selections of different rank that non-the-less are deemed as having the
+ same shape by H5Sselect_shape_same().
+ */
+
+#define H5S_FRIEND /*suppress error about including H5Spkg */
+
+/* Define this macro to indicate that the testing APIs should be available */
+#define H5S_TESTING
+
+#if 0
+#include "H5Spkg.h" /* Dataspaces */
+#endif
+
+#include "hdf5.h"
+#include "testphdf5.h"
+
+/* FILENAME and filenames must have the same number of names.
+ * Use PARATESTFILE in general and use a separated filename only if the file
+ * created in one test is accessed by a different test.
+ * filenames[0] is reserved as the file name for PARATESTFILE.
+ */
+#define NFILENAME 2
+const char *FILENAME[NFILENAME] = {"ShapeSameTest.h5", NULL};
+char filenames[NFILENAME][PATH_MAX];
+hid_t fapl; /* file access property list */
+
+/* On Lustre (and perhaps other parallel file systems?), we have severe
+ * slow downs if two or more processes attempt to access the same file system
+ * block. To minimize this problem, we set alignment in the shape same tests
+ * to the default Lustre block size -- which greatly reduces contention in
+ * the chunked dataset case.
+ */
+
+#define SHAPE_SAME_TEST_ALIGNMENT ((hsize_t)(4 * 1024 * 1024))
+
+#define PAR_SS_DR_MAX_RANK 5 /* must update code if this changes */
+
+struct hs_dr_pio_test_vars_t {
+ int mpi_size;
+ int mpi_rank;
+ MPI_Comm mpi_comm;
+ MPI_Info mpi_info;
+ int test_num;
+ int edge_size;
+ int checker_edge_size;
+ int chunk_edge_size;
+ int small_rank;
+ int large_rank;
+ hid_t dset_type;
+ uint32_t *small_ds_buf_0;
+ uint32_t *small_ds_buf_1;
+ uint32_t *small_ds_buf_2;
+ uint32_t *small_ds_slice_buf;
+ uint32_t *large_ds_buf_0;
+ uint32_t *large_ds_buf_1;
+ uint32_t *large_ds_buf_2;
+ uint32_t *large_ds_slice_buf;
+ int small_ds_offset;
+ int large_ds_offset;
+ hid_t fid; /* HDF5 file ID */
+ hid_t xfer_plist;
+ hid_t full_mem_small_ds_sid;
+ hid_t full_file_small_ds_sid;
+ hid_t mem_small_ds_sid;
+ hid_t file_small_ds_sid_0;
+ hid_t file_small_ds_sid_1;
+ hid_t small_ds_slice_sid;
+ hid_t full_mem_large_ds_sid;
+ hid_t full_file_large_ds_sid;
+ hid_t mem_large_ds_sid;
+ hid_t file_large_ds_sid_0;
+ hid_t file_large_ds_sid_1;
+ hid_t file_large_ds_process_slice_sid;
+ hid_t mem_large_ds_process_slice_sid;
+ hid_t large_ds_slice_sid;
+ hid_t small_dataset; /* Dataset ID */
+ hid_t large_dataset; /* Dataset ID */
+ size_t small_ds_size;
+ size_t small_ds_slice_size;
+ size_t large_ds_size;
+ size_t large_ds_slice_size;
+ hsize_t dims[PAR_SS_DR_MAX_RANK];
+ hsize_t chunk_dims[PAR_SS_DR_MAX_RANK];
+ hsize_t start[PAR_SS_DR_MAX_RANK];
+ hsize_t stride[PAR_SS_DR_MAX_RANK];
+ hsize_t count[PAR_SS_DR_MAX_RANK];
+ hsize_t block[PAR_SS_DR_MAX_RANK];
+ hsize_t *start_ptr;
+ hsize_t *stride_ptr;
+ hsize_t *count_ptr;
+ hsize_t *block_ptr;
+ int skips;
+ int max_skips;
+ int64_t total_tests;
+ int64_t tests_run;
+ int64_t tests_skipped;
+};
+
+/*-------------------------------------------------------------------------
+ * Function: hs_dr_pio_test__setup()
+ *
+ * Purpose: Do setup for tests of I/O to/from hyperslab selections of
+ * different rank in the parallel case.
+ *
+ * Return: void
+ *
+ * Programmer: JRM -- 8/9/11
+ *
+ *-------------------------------------------------------------------------
+ */
+
+#define CONTIG_HS_DR_PIO_TEST__SETUP__DEBUG 0
+
+static void
+hs_dr_pio_test__setup(const int test_num, const int edge_size, const int checker_edge_size,
+ const int chunk_edge_size, const int small_rank, const int large_rank,
+ const hbool_t use_collective_io, const hid_t dset_type, const int express_test,
+ struct hs_dr_pio_test_vars_t *tv_ptr)
+{
+#if CONTIG_HS_DR_PIO_TEST__SETUP__DEBUG
+ const char *fcnName = "hs_dr_pio_test__setup()";
+#endif /* CONTIG_HS_DR_PIO_TEST__SETUP__DEBUG */
+ const char *filename;
+ hbool_t mis_match = FALSE;
+ int i;
+ int mrc;
+ int mpi_rank; /* needed by the VRFY macro */
+ uint32_t expected_value;
+ uint32_t *ptr_0;
+ uint32_t *ptr_1;
+ hid_t acc_tpl; /* File access templates */
+ hid_t small_ds_dcpl_id = H5P_DEFAULT;
+ hid_t large_ds_dcpl_id = H5P_DEFAULT;
+ herr_t ret; /* Generic return value */
+
+ HDassert(edge_size >= 6);
+ HDassert(edge_size >= chunk_edge_size);
+ HDassert((chunk_edge_size == 0) || (chunk_edge_size >= 3));
+ HDassert(1 < small_rank);
+ HDassert(small_rank < large_rank);
+ HDassert(large_rank <= PAR_SS_DR_MAX_RANK);
+
+ tv_ptr->test_num = test_num;
+ tv_ptr->edge_size = edge_size;
+ tv_ptr->checker_edge_size = checker_edge_size;
+ tv_ptr->chunk_edge_size = chunk_edge_size;
+ tv_ptr->small_rank = small_rank;
+ tv_ptr->large_rank = large_rank;
+ tv_ptr->dset_type = dset_type;
+
+ MPI_Comm_size(MPI_COMM_WORLD, &(tv_ptr->mpi_size));
+ MPI_Comm_rank(MPI_COMM_WORLD, &(tv_ptr->mpi_rank));
+ /* the VRFY() macro needs the local variable mpi_rank -- set it up now */
+ mpi_rank = tv_ptr->mpi_rank;
+
+ HDassert(tv_ptr->mpi_size >= 1);
+
+ tv_ptr->mpi_comm = MPI_COMM_WORLD;
+ tv_ptr->mpi_info = MPI_INFO_NULL;
+
+ for (i = 0; i < tv_ptr->small_rank - 1; i++) {
+ tv_ptr->small_ds_size *= (size_t)(tv_ptr->edge_size);
+ tv_ptr->small_ds_slice_size *= (size_t)(tv_ptr->edge_size);
+ }
+ tv_ptr->small_ds_size *= (size_t)(tv_ptr->mpi_size + 1);
+
+ /* used by checker board tests only */
+ tv_ptr->small_ds_offset = PAR_SS_DR_MAX_RANK - tv_ptr->small_rank;
+
+ HDassert(0 < tv_ptr->small_ds_offset);
+ HDassert(tv_ptr->small_ds_offset < PAR_SS_DR_MAX_RANK);
+
+ for (i = 0; i < tv_ptr->large_rank - 1; i++) {
+
+ tv_ptr->large_ds_size *= (size_t)(tv_ptr->edge_size);
+ tv_ptr->large_ds_slice_size *= (size_t)(tv_ptr->edge_size);
+ }
+ tv_ptr->large_ds_size *= (size_t)(tv_ptr->mpi_size + 1);
+
+ /* used by checker board tests only */
+ tv_ptr->large_ds_offset = PAR_SS_DR_MAX_RANK - tv_ptr->large_rank;
+
+ HDassert(0 <= tv_ptr->large_ds_offset);
+ HDassert(tv_ptr->large_ds_offset < PAR_SS_DR_MAX_RANK);
+
+ /* set up the start, stride, count, and block pointers */
+ /* used by contiguous tests only */
+ tv_ptr->start_ptr = &(tv_ptr->start[PAR_SS_DR_MAX_RANK - tv_ptr->large_rank]);
+ tv_ptr->stride_ptr = &(tv_ptr->stride[PAR_SS_DR_MAX_RANK - tv_ptr->large_rank]);
+ tv_ptr->count_ptr = &(tv_ptr->count[PAR_SS_DR_MAX_RANK - tv_ptr->large_rank]);
+ tv_ptr->block_ptr = &(tv_ptr->block[PAR_SS_DR_MAX_RANK - tv_ptr->large_rank]);
+
+ /* Allocate buffers */
+ tv_ptr->small_ds_buf_0 = (uint32_t *)HDmalloc(sizeof(uint32_t) * tv_ptr->small_ds_size);
+ VRFY((tv_ptr->small_ds_buf_0 != NULL), "malloc of small_ds_buf_0 succeeded");
+
+ tv_ptr->small_ds_buf_1 = (uint32_t *)HDmalloc(sizeof(uint32_t) * tv_ptr->small_ds_size);
+ VRFY((tv_ptr->small_ds_buf_1 != NULL), "malloc of small_ds_buf_1 succeeded");
+
+ tv_ptr->small_ds_buf_2 = (uint32_t *)HDmalloc(sizeof(uint32_t) * tv_ptr->small_ds_size);
+ VRFY((tv_ptr->small_ds_buf_2 != NULL), "malloc of small_ds_buf_2 succeeded");
+
+ tv_ptr->small_ds_slice_buf = (uint32_t *)HDmalloc(sizeof(uint32_t) * tv_ptr->small_ds_slice_size);
+ VRFY((tv_ptr->small_ds_slice_buf != NULL), "malloc of small_ds_slice_buf succeeded");
+
+ tv_ptr->large_ds_buf_0 = (uint32_t *)HDmalloc(sizeof(uint32_t) * tv_ptr->large_ds_size);
+ VRFY((tv_ptr->large_ds_buf_0 != NULL), "malloc of large_ds_buf_0 succeeded");
+
+ tv_ptr->large_ds_buf_1 = (uint32_t *)HDmalloc(sizeof(uint32_t) * tv_ptr->large_ds_size);
+ VRFY((tv_ptr->large_ds_buf_1 != NULL), "malloc of large_ds_buf_1 succeeded");
+
+ tv_ptr->large_ds_buf_2 = (uint32_t *)HDmalloc(sizeof(uint32_t) * tv_ptr->large_ds_size);
+ VRFY((tv_ptr->large_ds_buf_2 != NULL), "malloc of large_ds_buf_2 succeeded");
+
+ tv_ptr->large_ds_slice_buf = (uint32_t *)HDmalloc(sizeof(uint32_t) * tv_ptr->large_ds_slice_size);
+ VRFY((tv_ptr->large_ds_slice_buf != NULL), "malloc of large_ds_slice_buf succeeded");
+
+ /* initialize the buffers */
+
+ ptr_0 = tv_ptr->small_ds_buf_0;
+ for (i = 0; i < (int)(tv_ptr->small_ds_size); i++)
+ *ptr_0++ = (uint32_t)i;
+ HDmemset(tv_ptr->small_ds_buf_1, 0, sizeof(uint32_t) * tv_ptr->small_ds_size);
+ HDmemset(tv_ptr->small_ds_buf_2, 0, sizeof(uint32_t) * tv_ptr->small_ds_size);
+
+ HDmemset(tv_ptr->small_ds_slice_buf, 0, sizeof(uint32_t) * tv_ptr->small_ds_slice_size);
+
+ ptr_0 = tv_ptr->large_ds_buf_0;
+ for (i = 0; i < (int)(tv_ptr->large_ds_size); i++)
+ *ptr_0++ = (uint32_t)i;
+ HDmemset(tv_ptr->large_ds_buf_1, 0, sizeof(uint32_t) * tv_ptr->large_ds_size);
+ HDmemset(tv_ptr->large_ds_buf_2, 0, sizeof(uint32_t) * tv_ptr->large_ds_size);
+
+ HDmemset(tv_ptr->large_ds_slice_buf, 0, sizeof(uint32_t) * tv_ptr->large_ds_slice_size);
+
+ filename = filenames[0]; /* (const char *)GetTestParameters(); */
+ HDassert(filename != NULL);
+#if CONTIG_HS_DR_PIO_TEST__SETUP__DEBUG
+ if (MAINPROCESS) {
+
+ HDfprintf(stdout, "%d: test num = %d.\n", tv_ptr->mpi_rank, tv_ptr->test_num);
+ HDfprintf(stdout, "%d: mpi_size = %d.\n", tv_ptr->mpi_rank, tv_ptr->mpi_size);
+ HDfprintf(stdout, "%d: small/large rank = %d/%d, use_collective_io = %d.\n", tv_ptr->mpi_rank,
+ tv_ptr->small_rank, tv_ptr->large_rank, (int)use_collective_io);
+ HDfprintf(stdout, "%d: edge_size = %d, chunk_edge_size = %d.\n", tv_ptr->mpi_rank, tv_ptr->edge_size,
+ tv_ptr->chunk_edge_size);
+ HDfprintf(stdout, "%d: checker_edge_size = %d.\n", tv_ptr->mpi_rank, tv_ptr->checker_edge_size);
+ HDfprintf(stdout, "%d: small_ds_size = %d, large_ds_size = %d.\n", tv_ptr->mpi_rank,
+ (int)(tv_ptr->small_ds_size), (int)(tv_ptr->large_ds_size));
+ HDfprintf(stdout, "%d: filename = %s.\n", tv_ptr->mpi_rank, filename);
+ }
+#endif /* CONTIG_HS_DR_PIO_TEST__SETUP__DEBUG */
+ /* ----------------------------------------
+ * CREATE AN HDF5 FILE WITH PARALLEL ACCESS
+ * ---------------------------------------*/
+ /* setup file access template */
+ acc_tpl = create_faccess_plist(tv_ptr->mpi_comm, tv_ptr->mpi_info, facc_type);
+ VRFY((acc_tpl >= 0), "create_faccess_plist() succeeded");
+
+ /* set the alignment -- need it large so that we aren't always hitting the
+ * the same file system block. Do this only if express_test is greater
+ * than zero.
+ */
+ if (express_test > 0) {
+
+ ret = H5Pset_alignment(acc_tpl, (hsize_t)0, SHAPE_SAME_TEST_ALIGNMENT);
+ VRFY((ret != FAIL), "H5Pset_alignment() succeeded");
+ }
+
+ /* create the file collectively */
+ tv_ptr->fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, acc_tpl);
+ VRFY((tv_ptr->fid >= 0), "H5Fcreate succeeded");
+
+ MESG("File opened.");
+
+ /* Release file-access template */
+ ret = H5Pclose(acc_tpl);
+ VRFY((ret >= 0), "H5Pclose(acc_tpl) succeeded");
+
+ /* setup dims: */
+ tv_ptr->dims[0] = (hsize_t)(tv_ptr->mpi_size + 1);
+ tv_ptr->dims[1] = tv_ptr->dims[2] = tv_ptr->dims[3] = tv_ptr->dims[4] = (hsize_t)(tv_ptr->edge_size);
+
+ /* Create small ds dataspaces */
+ tv_ptr->full_mem_small_ds_sid = H5Screate_simple(tv_ptr->small_rank, tv_ptr->dims, NULL);
+ VRFY((tv_ptr->full_mem_small_ds_sid != 0), "H5Screate_simple() full_mem_small_ds_sid succeeded");
+
+ tv_ptr->full_file_small_ds_sid = H5Screate_simple(tv_ptr->small_rank, tv_ptr->dims, NULL);
+ VRFY((tv_ptr->full_file_small_ds_sid != 0), "H5Screate_simple() full_file_small_ds_sid succeeded");
+
+ tv_ptr->mem_small_ds_sid = H5Screate_simple(tv_ptr->small_rank, tv_ptr->dims, NULL);
+ VRFY((tv_ptr->mem_small_ds_sid != 0), "H5Screate_simple() mem_small_ds_sid succeeded");
+
+ tv_ptr->file_small_ds_sid_0 = H5Screate_simple(tv_ptr->small_rank, tv_ptr->dims, NULL);
+ VRFY((tv_ptr->file_small_ds_sid_0 != 0), "H5Screate_simple() file_small_ds_sid_0 succeeded");
+
+ /* used by checker board tests only */
+ tv_ptr->file_small_ds_sid_1 = H5Screate_simple(tv_ptr->small_rank, tv_ptr->dims, NULL);
+ VRFY((tv_ptr->file_small_ds_sid_1 != 0), "H5Screate_simple() file_small_ds_sid_1 succeeded");
+
+ tv_ptr->small_ds_slice_sid = H5Screate_simple(tv_ptr->small_rank - 1, &(tv_ptr->dims[1]), NULL);
+ VRFY((tv_ptr->small_ds_slice_sid != 0), "H5Screate_simple() small_ds_slice_sid succeeded");
+
+ /* Create large ds dataspaces */
+ tv_ptr->full_mem_large_ds_sid = H5Screate_simple(tv_ptr->large_rank, tv_ptr->dims, NULL);
+ VRFY((tv_ptr->full_mem_large_ds_sid != 0), "H5Screate_simple() full_mem_large_ds_sid succeeded");
+
+ tv_ptr->full_file_large_ds_sid = H5Screate_simple(tv_ptr->large_rank, tv_ptr->dims, NULL);
+ VRFY((tv_ptr->full_file_large_ds_sid != FAIL), "H5Screate_simple() full_file_large_ds_sid succeeded");
+
+ tv_ptr->mem_large_ds_sid = H5Screate_simple(tv_ptr->large_rank, tv_ptr->dims, NULL);
+ VRFY((tv_ptr->mem_large_ds_sid != FAIL), "H5Screate_simple() mem_large_ds_sid succeeded");
+
+ tv_ptr->file_large_ds_sid_0 = H5Screate_simple(tv_ptr->large_rank, tv_ptr->dims, NULL);
+ VRFY((tv_ptr->file_large_ds_sid_0 != FAIL), "H5Screate_simple() file_large_ds_sid_0 succeeded");
+
+ /* used by checker board tests only */
+ tv_ptr->file_large_ds_sid_1 = H5Screate_simple(tv_ptr->large_rank, tv_ptr->dims, NULL);
+ VRFY((tv_ptr->file_large_ds_sid_1 != FAIL), "H5Screate_simple() file_large_ds_sid_1 succeeded");
+
+ tv_ptr->mem_large_ds_process_slice_sid = H5Screate_simple(tv_ptr->large_rank, tv_ptr->dims, NULL);
+ VRFY((tv_ptr->mem_large_ds_process_slice_sid != FAIL),
+ "H5Screate_simple() mem_large_ds_process_slice_sid succeeded");
+
+ tv_ptr->file_large_ds_process_slice_sid = H5Screate_simple(tv_ptr->large_rank, tv_ptr->dims, NULL);
+ VRFY((tv_ptr->file_large_ds_process_slice_sid != FAIL),
+ "H5Screate_simple() file_large_ds_process_slice_sid succeeded");
+
+ tv_ptr->large_ds_slice_sid = H5Screate_simple(tv_ptr->large_rank - 1, &(tv_ptr->dims[1]), NULL);
+ VRFY((tv_ptr->large_ds_slice_sid != 0), "H5Screate_simple() large_ds_slice_sid succeeded");
+
+ /* if chunk edge size is greater than zero, set up the small and
+ * large data set creation property lists to specify chunked
+ * datasets.
+ */
+ if (tv_ptr->chunk_edge_size > 0) {
+
+ /* Under Lustre (and perhaps other parallel file systems?) we get
+ * locking delays when two or more processes attempt to access the
+ * same file system block.
+ *
+ * To minimize this problem, I have changed chunk_dims[0]
+ * from (mpi_size + 1) to just when any sort of express test is
+ * selected. Given the structure of the test, and assuming we
+ * set the alignment large enough, this avoids the contention
+ * issue by seeing to it that each chunk is only accessed by one
+ * process.
+ *
+ * One can argue as to whether this is a good thing to do in our
+ * tests, but for now it is necessary if we want the test to complete
+ * in a reasonable amount of time.
+ *
+ * JRM -- 9/16/10
+ */
+
+ tv_ptr->chunk_dims[0] = 1;
+
+ tv_ptr->chunk_dims[1] = tv_ptr->chunk_dims[2] = tv_ptr->chunk_dims[3] = tv_ptr->chunk_dims[4] =
+ (hsize_t)(tv_ptr->chunk_edge_size);
+
+ small_ds_dcpl_id = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY((ret != FAIL), "H5Pcreate() small_ds_dcpl_id succeeded");
+
+ ret = H5Pset_layout(small_ds_dcpl_id, H5D_CHUNKED);
+ VRFY((ret != FAIL), "H5Pset_layout() small_ds_dcpl_id succeeded");
+
+ ret = H5Pset_chunk(small_ds_dcpl_id, tv_ptr->small_rank, tv_ptr->chunk_dims);
+ VRFY((ret != FAIL), "H5Pset_chunk() small_ds_dcpl_id succeeded");
+
+ large_ds_dcpl_id = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY((ret != FAIL), "H5Pcreate() large_ds_dcpl_id succeeded");
+
+ ret = H5Pset_layout(large_ds_dcpl_id, H5D_CHUNKED);
+ VRFY((ret != FAIL), "H5Pset_layout() large_ds_dcpl_id succeeded");
+
+ ret = H5Pset_chunk(large_ds_dcpl_id, tv_ptr->large_rank, tv_ptr->chunk_dims);
+ VRFY((ret != FAIL), "H5Pset_chunk() large_ds_dcpl_id succeeded");
+ }
+
+ /* create the small dataset */
+ tv_ptr->small_dataset =
+ H5Dcreate2(tv_ptr->fid, "small_dataset", tv_ptr->dset_type, tv_ptr->file_small_ds_sid_0, H5P_DEFAULT,
+ small_ds_dcpl_id, H5P_DEFAULT);
+ VRFY((ret != FAIL), "H5Dcreate2() small_dataset succeeded");
+
+ /* create the large dataset */
+ tv_ptr->large_dataset =
+ H5Dcreate2(tv_ptr->fid, "large_dataset", tv_ptr->dset_type, tv_ptr->file_large_ds_sid_0, H5P_DEFAULT,
+ large_ds_dcpl_id, H5P_DEFAULT);
+ VRFY((ret != FAIL), "H5Dcreate2() large_dataset succeeded");
+
+ /* setup xfer property list */
+ tv_ptr->xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((tv_ptr->xfer_plist >= 0), "H5Pcreate(H5P_DATASET_XFER) succeeded");
+
+ if (use_collective_io) {
+ ret = H5Pset_dxpl_mpio(tv_ptr->xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pset_dxpl_mpio succeeded");
+ }
+
+ /* setup selection to write initial data to the small and large data sets */
+ tv_ptr->start[0] = (hsize_t)(tv_ptr->mpi_rank);
+ tv_ptr->stride[0] = (hsize_t)(2 * (tv_ptr->mpi_size + 1));
+ tv_ptr->count[0] = 1;
+ tv_ptr->block[0] = 1;
+
+ for (i = 1; i < tv_ptr->large_rank; i++) {
+
+ tv_ptr->start[i] = 0;
+ tv_ptr->stride[i] = (hsize_t)(2 * tv_ptr->edge_size);
+ tv_ptr->count[i] = 1;
+ tv_ptr->block[i] = (hsize_t)(tv_ptr->edge_size);
+ }
+
+ /* setup selections for writing initial data to the small data set */
+ ret = H5Sselect_hyperslab(tv_ptr->mem_small_ds_sid, H5S_SELECT_SET, tv_ptr->start, tv_ptr->stride,
+ tv_ptr->count, tv_ptr->block);
+ VRFY((ret >= 0), "H5Sselect_hyperslab(mem_small_ds_sid, set) succeeded");
+
+ ret = H5Sselect_hyperslab(tv_ptr->file_small_ds_sid_0, H5S_SELECT_SET, tv_ptr->start, tv_ptr->stride,
+ tv_ptr->count, tv_ptr->block);
+ VRFY((ret >= 0), "H5Sselect_hyperslab(file_small_ds_sid_0, set) succeeded");
+
+ if (MAINPROCESS) { /* add an additional slice to the selections */
+
+ tv_ptr->start[0] = (hsize_t)(tv_ptr->mpi_size);
+
+ ret = H5Sselect_hyperslab(tv_ptr->mem_small_ds_sid, H5S_SELECT_OR, tv_ptr->start, tv_ptr->stride,
+ tv_ptr->count, tv_ptr->block);
+ VRFY((ret >= 0), "H5Sselect_hyperslab(mem_small_ds_sid, or) succeeded");
+
+ ret = H5Sselect_hyperslab(tv_ptr->file_small_ds_sid_0, H5S_SELECT_OR, tv_ptr->start, tv_ptr->stride,
+ tv_ptr->count, tv_ptr->block);
+ VRFY((ret >= 0), "H5Sselect_hyperslab(file_small_ds_sid_0, or) succeeded");
+ }
+
+ /* write the initial value of the small data set to file */
+ ret = H5Dwrite(tv_ptr->small_dataset, tv_ptr->dset_type, tv_ptr->mem_small_ds_sid,
+ tv_ptr->file_small_ds_sid_0, tv_ptr->xfer_plist, tv_ptr->small_ds_buf_0);
+
+ VRFY((ret >= 0), "H5Dwrite() small_dataset initial write succeeded");
+
+ /* sync with the other processes before checking data */
+ mrc = MPI_Barrier(MPI_COMM_WORLD);
+ VRFY((mrc == MPI_SUCCESS), "Sync after small dataset writes");
+
+ /* read the small data set back to verify that it contains the
+ * expected data. Note that each process reads in the entire
+ * data set and verifies it.
+ */
+ ret = H5Dread(tv_ptr->small_dataset, H5T_NATIVE_UINT32, tv_ptr->full_mem_small_ds_sid,
+ tv_ptr->full_file_small_ds_sid, tv_ptr->xfer_plist, tv_ptr->small_ds_buf_1);
+ VRFY((ret >= 0), "H5Dread() small_dataset initial read succeeded");
+
+ /* verify that the correct data was written to the small data set */
+ expected_value = 0;
+ mis_match = FALSE;
+ ptr_1 = tv_ptr->small_ds_buf_1;
+
+ i = 0;
+ for (i = 0; i < (int)(tv_ptr->small_ds_size); i++) {
+
+ if (*ptr_1 != expected_value) {
+
+ mis_match = TRUE;
+ }
+ ptr_1++;
+ expected_value++;
+ }
+ VRFY((mis_match == FALSE), "small ds init data good.");
+
+ /* setup selections for writing initial data to the large data set */
+
+ tv_ptr->start[0] = (hsize_t)(tv_ptr->mpi_rank);
+
+ ret = H5Sselect_hyperslab(tv_ptr->mem_large_ds_sid, H5S_SELECT_SET, tv_ptr->start, tv_ptr->stride,
+ tv_ptr->count, tv_ptr->block);
+ VRFY((ret >= 0), "H5Sselect_hyperslab(mem_large_ds_sid, set) succeeded");
+
+ ret = H5Sselect_hyperslab(tv_ptr->file_large_ds_sid_0, H5S_SELECT_SET, tv_ptr->start, tv_ptr->stride,
+ tv_ptr->count, tv_ptr->block);
+ VRFY((ret >= 0), "H5Sselect_hyperslab(file_large_ds_sid_0, set) succeeded");
+
+ /* In passing, setup the process slice dataspaces as well */
+
+ ret = H5Sselect_hyperslab(tv_ptr->mem_large_ds_process_slice_sid, H5S_SELECT_SET, tv_ptr->start,
+ tv_ptr->stride, tv_ptr->count, tv_ptr->block);
+ VRFY((ret >= 0), "H5Sselect_hyperslab(mem_large_ds_process_slice_sid, set) succeeded");
+
+ ret = H5Sselect_hyperslab(tv_ptr->file_large_ds_process_slice_sid, H5S_SELECT_SET, tv_ptr->start,
+ tv_ptr->stride, tv_ptr->count, tv_ptr->block);
+ VRFY((ret >= 0), "H5Sselect_hyperslab(file_large_ds_process_slice_sid, set) succeeded");
+
+ if (MAINPROCESS) { /* add an additional slice to the selections */
+
+ tv_ptr->start[0] = (hsize_t)(tv_ptr->mpi_size);
+
+ ret = H5Sselect_hyperslab(tv_ptr->mem_large_ds_sid, H5S_SELECT_OR, tv_ptr->start, tv_ptr->stride,
+ tv_ptr->count, tv_ptr->block);
+ VRFY((ret >= 0), "H5Sselect_hyperslab(mem_large_ds_sid, or) succeeded");
+
+ ret = H5Sselect_hyperslab(tv_ptr->file_large_ds_sid_0, H5S_SELECT_OR, tv_ptr->start, tv_ptr->stride,
+ tv_ptr->count, tv_ptr->block);
+ VRFY((ret >= 0), "H5Sselect_hyperslab(file_large_ds_sid_0, or) succeeded");
+ }
+
+ /* write the initial value of the large data set to file */
+ ret = H5Dwrite(tv_ptr->large_dataset, tv_ptr->dset_type, tv_ptr->mem_large_ds_sid,
+ tv_ptr->file_large_ds_sid_0, tv_ptr->xfer_plist, tv_ptr->large_ds_buf_0);
+ if (ret < 0)
+ H5Eprint2(H5E_DEFAULT, stderr);
+ VRFY((ret >= 0), "H5Dwrite() large_dataset initial write succeeded");
+
+ /* sync with the other processes before checking data */
+ mrc = MPI_Barrier(MPI_COMM_WORLD);
+ VRFY((mrc == MPI_SUCCESS), "Sync after large dataset writes");
+
+ /* read the large data set back to verify that it contains the
+ * expected data. Note that each process reads in the entire
+ * data set.
+ */
+ ret = H5Dread(tv_ptr->large_dataset, H5T_NATIVE_UINT32, tv_ptr->full_mem_large_ds_sid,
+ tv_ptr->full_file_large_ds_sid, tv_ptr->xfer_plist, tv_ptr->large_ds_buf_1);
+ VRFY((ret >= 0), "H5Dread() large_dataset initial read succeeded");
+
+ /* verify that the correct data was written to the large data set */
+ expected_value = 0;
+ mis_match = FALSE;
+ ptr_1 = tv_ptr->large_ds_buf_1;
+
+ i = 0;
+ for (i = 0; i < (int)(tv_ptr->large_ds_size); i++) {
+
+ if (*ptr_1 != expected_value) {
+
+ mis_match = TRUE;
+ }
+ ptr_1++;
+ expected_value++;
+ }
+ VRFY((mis_match == FALSE), "large ds init data good.");
+
+ /* sync with the other processes before changing data */
+ mrc = MPI_Barrier(MPI_COMM_WORLD);
+ VRFY((mrc == MPI_SUCCESS), "Sync initial values check");
+
+ return;
+
+} /* hs_dr_pio_test__setup() */
+
+/*-------------------------------------------------------------------------
+ * Function: hs_dr_pio_test__takedown()
+ *
+ * Purpose: Do takedown after tests of I/O to/from hyperslab selections
+ * of different rank in the parallel case.
+ *
+ * Return: void
+ *
+ * Programmer: JRM -- 9/18/09
+ *
+ *-------------------------------------------------------------------------
+ */
+
+#define HS_DR_PIO_TEST__TAKEDOWN__DEBUG 0
+
+static void
+hs_dr_pio_test__takedown(struct hs_dr_pio_test_vars_t *tv_ptr)
+{
+#if HS_DR_PIO_TEST__TAKEDOWN__DEBUG
+ const char *fcnName = "hs_dr_pio_test__takedown()";
+#endif /* HS_DR_PIO_TEST__TAKEDOWN__DEBUG */
+ int mpi_rank; /* needed by the VRFY macro */
+ herr_t ret; /* Generic return value */
+
+ /* initialize the local copy of mpi_rank */
+ mpi_rank = tv_ptr->mpi_rank;
+
+ /* Close property lists */
+ if (tv_ptr->xfer_plist != H5P_DEFAULT) {
+ ret = H5Pclose(tv_ptr->xfer_plist);
+ VRFY((ret != FAIL), "H5Pclose(xfer_plist) succeeded");
+ }
+
+ /* Close dataspaces */
+ ret = H5Sclose(tv_ptr->full_mem_small_ds_sid);
+ VRFY((ret != FAIL), "H5Sclose(full_mem_small_ds_sid) succeeded");
+
+ ret = H5Sclose(tv_ptr->full_file_small_ds_sid);
+ VRFY((ret != FAIL), "H5Sclose(full_file_small_ds_sid) succeeded");
+
+ ret = H5Sclose(tv_ptr->mem_small_ds_sid);
+ VRFY((ret != FAIL), "H5Sclose(mem_small_ds_sid) succeeded");
+
+ ret = H5Sclose(tv_ptr->file_small_ds_sid_0);
+ VRFY((ret != FAIL), "H5Sclose(file_small_ds_sid_0) succeeded");
+
+ ret = H5Sclose(tv_ptr->file_small_ds_sid_1);
+ VRFY((ret != FAIL), "H5Sclose(file_small_ds_sid_1) succeeded");
+
+ ret = H5Sclose(tv_ptr->small_ds_slice_sid);
+ VRFY((ret != FAIL), "H5Sclose(small_ds_slice_sid) succeeded");
+
+ ret = H5Sclose(tv_ptr->full_mem_large_ds_sid);
+ VRFY((ret != FAIL), "H5Sclose(full_mem_large_ds_sid) succeeded");
+
+ ret = H5Sclose(tv_ptr->full_file_large_ds_sid);
+ VRFY((ret != FAIL), "H5Sclose(full_file_large_ds_sid) succeeded");
+
+ ret = H5Sclose(tv_ptr->mem_large_ds_sid);
+ VRFY((ret != FAIL), "H5Sclose(mem_large_ds_sid) succeeded");
+
+ ret = H5Sclose(tv_ptr->file_large_ds_sid_0);
+ VRFY((ret != FAIL), "H5Sclose(file_large_ds_sid_0) succeeded");
+
+ ret = H5Sclose(tv_ptr->file_large_ds_sid_1);
+ VRFY((ret != FAIL), "H5Sclose(file_large_ds_sid_1) succeeded");
+
+ ret = H5Sclose(tv_ptr->mem_large_ds_process_slice_sid);
+ VRFY((ret != FAIL), "H5Sclose(mem_large_ds_process_slice_sid) succeeded");
+
+ ret = H5Sclose(tv_ptr->file_large_ds_process_slice_sid);
+ VRFY((ret != FAIL), "H5Sclose(file_large_ds_process_slice_sid) succeeded");
+
+ ret = H5Sclose(tv_ptr->large_ds_slice_sid);
+ VRFY((ret != FAIL), "H5Sclose(large_ds_slice_sid) succeeded");
+
+ /* Close Datasets */
+ ret = H5Dclose(tv_ptr->small_dataset);
+ VRFY((ret != FAIL), "H5Dclose(small_dataset) succeeded");
+
+ ret = H5Dclose(tv_ptr->large_dataset);
+ VRFY((ret != FAIL), "H5Dclose(large_dataset) succeeded");
+
+ /* close the file collectively */
+ MESG("about to close file.");
+ ret = H5Fclose(tv_ptr->fid);
+ VRFY((ret != FAIL), "file close succeeded");
+
+ /* Free memory buffers */
+
+ if (tv_ptr->small_ds_buf_0 != NULL)
+ HDfree(tv_ptr->small_ds_buf_0);
+ if (tv_ptr->small_ds_buf_1 != NULL)
+ HDfree(tv_ptr->small_ds_buf_1);
+ if (tv_ptr->small_ds_buf_2 != NULL)
+ HDfree(tv_ptr->small_ds_buf_2);
+ if (tv_ptr->small_ds_slice_buf != NULL)
+ HDfree(tv_ptr->small_ds_slice_buf);
+
+ if (tv_ptr->large_ds_buf_0 != NULL)
+ HDfree(tv_ptr->large_ds_buf_0);
+ if (tv_ptr->large_ds_buf_1 != NULL)
+ HDfree(tv_ptr->large_ds_buf_1);
+ if (tv_ptr->large_ds_buf_2 != NULL)
+ HDfree(tv_ptr->large_ds_buf_2);
+ if (tv_ptr->large_ds_slice_buf != NULL)
+ HDfree(tv_ptr->large_ds_slice_buf);
+
+ return;
+
+} /* hs_dr_pio_test__takedown() */
+
+/*-------------------------------------------------------------------------
+ * Function: contig_hs_dr_pio_test__d2m_l2s()
+ *
+ * Purpose: Part one of a series of tests of I/O to/from hyperslab
+ * selections of different rank in the parallel.
+ *
+ * Verify that we can read from disk correctly using
+ * selections of different rank that H5Sselect_shape_same()
+ * views as being of the same shape.
+ *
+ * In this function, we test this by reading small_rank - 1
+ * slices from the on disk large cube, and verifying that the
+ * data read is correct. Verify that H5Sselect_shape_same()
+ * returns true on the memory and file selections.
+ *
+ * Return: void
+ *
+ * Programmer: JRM -- 9/10/11
+ *
+ *-------------------------------------------------------------------------
+ */
+
+#define CONTIG_HS_DR_PIO_TEST__D2M_L2S__DEBUG 0
+
+static void
+contig_hs_dr_pio_test__d2m_l2s(struct hs_dr_pio_test_vars_t *tv_ptr)
+{
+#if CONTIG_HS_DR_PIO_TEST__D2M_L2S__DEBUG
+ const char *fcnName = "contig_hs_dr_pio_test__run_test()";
+#endif /* CONTIG_HS_DR_PIO_TEST__D2M_L2S__DEBUG */
+ hbool_t mis_match = FALSE;
+ int i, j, k, l;
+ size_t n;
+ int mpi_rank; /* needed by the VRFY macro */
+ uint32_t expected_value;
+ uint32_t *ptr_1;
+ htri_t check; /* Shape comparison return value */
+ herr_t ret; /* Generic return value */
+
+ /* initialize the local copy of mpi_rank */
+ mpi_rank = tv_ptr->mpi_rank;
+
+ /* We have already done a H5Sselect_all() on the dataspace
+ * small_ds_slice_sid in the initialization phase, so no need to
+ * call H5Sselect_all() again.
+ */
+
+ /* set up start, stride, count, and block -- note that we will
+ * change start[] so as to read slices of the large cube.
+ */
+ for (i = 0; i < PAR_SS_DR_MAX_RANK; i++) {
+
+ tv_ptr->start[i] = 0;
+ tv_ptr->stride[i] = (hsize_t)(2 * tv_ptr->edge_size);
+ tv_ptr->count[i] = 1;
+ if ((PAR_SS_DR_MAX_RANK - i) > (tv_ptr->small_rank - 1)) {
+
+ tv_ptr->block[i] = 1;
+ }
+ else {
+
+ tv_ptr->block[i] = (hsize_t)(tv_ptr->edge_size);
+ }
+ }
+
+ /* zero out the buffer we will be reading into */
+ HDmemset(tv_ptr->small_ds_slice_buf, 0, sizeof(uint32_t) * tv_ptr->small_ds_slice_size);
+
+#if CONTIG_HS_DR_PIO_TEST__D2M_L2S__DEBUG
+ HDfprintf(stdout, "%s reading slices from big cube on disk into small cube slice.\n", fcnName);
+#endif /* CONTIG_HS_DR_PIO_TEST__D2M_L2S__DEBUG */
+
+ /* in serial versions of this test, we loop through all the dimensions
+ * of the large data set. However, in the parallel version, each
+ * process only works with that slice of the large cube indicated
+ * by its rank -- hence we set the most slowly changing index to
+ * mpi_rank, and don't iterate over it.
+ */
+
+ if (PAR_SS_DR_MAX_RANK - tv_ptr->large_rank == 0) {
+
+ i = tv_ptr->mpi_rank;
+ }
+ else {
+
+ i = 0;
+ }
+
+ /* since large_rank is at most PAR_SS_DR_MAX_RANK, no need to
+ * loop over it -- either we are setting i to mpi_rank, or
+ * we are setting it to zero. It will not change during the
+ * test.
+ */
+
+ if (PAR_SS_DR_MAX_RANK - tv_ptr->large_rank == 1) {
+
+ j = tv_ptr->mpi_rank;
+ }
+ else {
+
+ j = 0;
+ }
+
+ do {
+ if (PAR_SS_DR_MAX_RANK - tv_ptr->large_rank == 2) {
+
+ k = tv_ptr->mpi_rank;
+ }
+ else {
+
+ k = 0;
+ }
+
+ do {
+ /* since small rank >= 2 and large_rank > small_rank, we
+ * have large_rank >= 3. Since PAR_SS_DR_MAX_RANK == 5
+ * (baring major re-orgaization), this gives us:
+ *
+ * (PAR_SS_DR_MAX_RANK - large_rank) <= 2
+ *
+ * so no need to repeat the test in the outer loops --
+ * just set l = 0.
+ */
+
+ l = 0;
+ do {
+ if ((tv_ptr->skips)++ < tv_ptr->max_skips) { /* skip the test */
+
+ (tv_ptr->tests_skipped)++;
+ }
+ else { /* run the test */
+
+ tv_ptr->skips = 0; /* reset the skips counter */
+
+ /* we know that small_rank - 1 >= 1 and that
+ * large_rank > small_rank by the assertions at the head
+ * of this function. Thus no need for another inner loop.
+ */
+ tv_ptr->start[0] = (hsize_t)i;
+ tv_ptr->start[1] = (hsize_t)j;
+ tv_ptr->start[2] = (hsize_t)k;
+ tv_ptr->start[3] = (hsize_t)l;
+ tv_ptr->start[4] = 0;
+
+ ret = H5Sselect_hyperslab(tv_ptr->file_large_ds_sid_0, H5S_SELECT_SET, tv_ptr->start_ptr,
+ tv_ptr->stride_ptr, tv_ptr->count_ptr, tv_ptr->block_ptr);
+ VRFY((ret != FAIL), "H5Sselect_hyperslab(file_large_cube_sid) succeeded");
+
+ /* verify that H5Sselect_shape_same() reports the two
+ * selections as having the same shape.
+ */
+ check = H5Sselect_shape_same(tv_ptr->small_ds_slice_sid, tv_ptr->file_large_ds_sid_0);
+ VRFY((check == TRUE), "H5Sselect_shape_same passed");
+
+ /* Read selection from disk */
+#if CONTIG_HS_DR_PIO_TEST__D2M_L2S__DEBUG
+ HDfprintf(stdout, "%s:%d: start = %d %d %d %d %d.\n", fcnName, (int)(tv_ptr->mpi_rank),
+ (int)(tv_ptr->start[0]), (int)(tv_ptr->start[1]), (int)(tv_ptr->start[2]),
+ (int)(tv_ptr->start[3]), (int)(tv_ptr->start[4]));
+ HDfprintf(stdout, "%s slice/file extent dims = %d/%d.\n", fcnName,
+ H5Sget_simple_extent_ndims(tv_ptr->small_ds_slice_sid),
+ H5Sget_simple_extent_ndims(tv_ptr->file_large_ds_sid_0));
+#endif /* CONTIG_HS_DR_PIO_TEST__D2M_L2S__DEBUG */
+ ret =
+ H5Dread(tv_ptr->large_dataset, H5T_NATIVE_UINT32, tv_ptr->small_ds_slice_sid,
+ tv_ptr->file_large_ds_sid_0, tv_ptr->xfer_plist, tv_ptr->small_ds_slice_buf);
+ VRFY((ret >= 0), "H5Dread() slice from large ds succeeded.");
+
+ /* verify that expected data is retrieved */
+
+ mis_match = FALSE;
+ ptr_1 = tv_ptr->small_ds_slice_buf;
+ expected_value =
+ (uint32_t)((i * tv_ptr->edge_size * tv_ptr->edge_size * tv_ptr->edge_size *
+ tv_ptr->edge_size) +
+ (j * tv_ptr->edge_size * tv_ptr->edge_size * tv_ptr->edge_size) +
+ (k * tv_ptr->edge_size * tv_ptr->edge_size) + (l * tv_ptr->edge_size));
+
+ for (n = 0; n < tv_ptr->small_ds_slice_size; n++) {
+
+ if (*ptr_1 != expected_value) {
+
+ mis_match = TRUE;
+ }
+
+ *ptr_1 = 0; /* zero data for next use */
+
+ ptr_1++;
+ expected_value++;
+ }
+
+ VRFY((mis_match == FALSE), "small slice read from large ds data good.");
+
+ (tv_ptr->tests_run)++;
+ }
+
+ l++;
+
+ (tv_ptr->total_tests)++;
+
+ } while ((tv_ptr->large_rank > 2) && ((tv_ptr->small_rank - 1) <= 1) && (l < tv_ptr->edge_size));
+ k++;
+ } while ((tv_ptr->large_rank > 3) && ((tv_ptr->small_rank - 1) <= 2) && (k < tv_ptr->edge_size));
+ j++;
+ } while ((tv_ptr->large_rank > 4) && ((tv_ptr->small_rank - 1) <= 3) && (j < tv_ptr->edge_size));
+
+ return;
+
+} /* contig_hs_dr_pio_test__d2m_l2s() */
+
+/*-------------------------------------------------------------------------
+ * Function: contig_hs_dr_pio_test__d2m_s2l()
+ *
+ * Purpose: Part two of a series of tests of I/O to/from hyperslab
+ * selections of different rank in the parallel.
+ *
+ * Verify that we can read from disk correctly using
+ * selections of different rank that H5Sselect_shape_same()
+ * views as being of the same shape.
+ *
+ * In this function, we test this by reading slices of the
+ * on disk small data set into slices through the in memory
+ * large data set, and verify that the correct data (and
+ * only the correct data) is read.
+ *
+ * Return: void
+ *
+ * Programmer: JRM -- 8/10/11
+ *
+ *-------------------------------------------------------------------------
+ */
+
+#define CONTIG_HS_DR_PIO_TEST__D2M_S2L__DEBUG 0
+
+static void
+contig_hs_dr_pio_test__d2m_s2l(struct hs_dr_pio_test_vars_t *tv_ptr)
+{
+#if CONTIG_HS_DR_PIO_TEST__D2M_S2L__DEBUG
+ const char *fcnName = "contig_hs_dr_pio_test__d2m_s2l()";
+#endif /* CONTIG_HS_DR_PIO_TEST__D2M_S2L__DEBUG */
+ hbool_t mis_match = FALSE;
+ int i, j, k, l;
+ size_t n;
+ int mpi_rank; /* needed by the VRFY macro */
+ size_t start_index;
+ size_t stop_index;
+ uint32_t expected_value;
+ uint32_t *ptr_1;
+ htri_t check; /* Shape comparison return value */
+ herr_t ret; /* Generic return value */
+
+ /* initialize the local copy of mpi_rank */
+ mpi_rank = tv_ptr->mpi_rank;
+
+ /* Read slices of the on disk small data set into slices
+ * through the in memory large data set, and verify that the correct
+ * data (and only the correct data) is read.
+ */
+
+ tv_ptr->start[0] = (hsize_t)(tv_ptr->mpi_rank);
+ tv_ptr->stride[0] = (hsize_t)(2 * (tv_ptr->mpi_size + 1));
+ tv_ptr->count[0] = 1;
+ tv_ptr->block[0] = 1;
+
+ for (i = 1; i < tv_ptr->large_rank; i++) {
+
+ tv_ptr->start[i] = 0;
+ tv_ptr->stride[i] = (hsize_t)(2 * tv_ptr->edge_size);
+ tv_ptr->count[i] = 1;
+ tv_ptr->block[i] = (hsize_t)(tv_ptr->edge_size);
+ }
+
+ ret = H5Sselect_hyperslab(tv_ptr->file_small_ds_sid_0, H5S_SELECT_SET, tv_ptr->start, tv_ptr->stride,
+ tv_ptr->count, tv_ptr->block);
+ VRFY((ret >= 0), "H5Sselect_hyperslab(file_small_ds_sid_0, set) succeeded");
+
+#if CONTIG_HS_DR_PIO_TEST__D2M_S2L__DEBUG
+ HDfprintf(stdout, "%s reading slices of on disk small data set into slices of big data set.\n", fcnName);
+#endif /* CONTIG_HS_DR_PIO_TEST__D2M_S2L__DEBUG */
+
+ /* zero out the in memory large ds */
+ HDmemset(tv_ptr->large_ds_buf_1, 0, sizeof(uint32_t) * tv_ptr->large_ds_size);
+
+ /* set up start, stride, count, and block -- note that we will
+ * change start[] so as to read slices of the large cube.
+ */
+ for (i = 0; i < PAR_SS_DR_MAX_RANK; i++) {
+
+ tv_ptr->start[i] = 0;
+ tv_ptr->stride[i] = (hsize_t)(2 * tv_ptr->edge_size);
+ tv_ptr->count[i] = 1;
+ if ((PAR_SS_DR_MAX_RANK - i) > (tv_ptr->small_rank - 1)) {
+
+ tv_ptr->block[i] = 1;
+ }
+ else {
+
+ tv_ptr->block[i] = (hsize_t)(tv_ptr->edge_size);
+ }
+ }
+
+ /* in serial versions of this test, we loop through all the dimensions
+ * of the large data set that don't appear in the small data set.
+ *
+ * However, in the parallel version, each process only works with that
+ * slice of the large (and small) data set indicated by its rank -- hence
+ * we set the most slowly changing index to mpi_rank, and don't iterate
+ * over it.
+ */
+
+ if (PAR_SS_DR_MAX_RANK - tv_ptr->large_rank == 0) {
+
+ i = tv_ptr->mpi_rank;
+ }
+ else {
+
+ i = 0;
+ }
+
+ /* since large_rank is at most PAR_SS_DR_MAX_RANK, no need to
+ * loop over it -- either we are setting i to mpi_rank, or
+ * we are setting it to zero. It will not change during the
+ * test.
+ */
+
+ if (PAR_SS_DR_MAX_RANK - tv_ptr->large_rank == 1) {
+
+ j = tv_ptr->mpi_rank;
+ }
+ else {
+
+ j = 0;
+ }
+
+ do {
+ if (PAR_SS_DR_MAX_RANK - tv_ptr->large_rank == 2) {
+
+ k = tv_ptr->mpi_rank;
+ }
+ else {
+
+ k = 0;
+ }
+
+ do {
+ /* since small rank >= 2 and large_rank > small_rank, we
+ * have large_rank >= 3. Since PAR_SS_DR_MAX_RANK == 5
+ * (baring major re-orgaization), this gives us:
+ *
+ * (PAR_SS_DR_MAX_RANK - large_rank) <= 2
+ *
+ * so no need to repeat the test in the outer loops --
+ * just set l = 0.
+ */
+
+ l = 0;
+ do {
+ if ((tv_ptr->skips)++ < tv_ptr->max_skips) { /* skip the test */
+
+ (tv_ptr->tests_skipped)++;
+ }
+ else { /* run the test */
+
+ tv_ptr->skips = 0; /* reset the skips counter */
+
+ /* we know that small_rank >= 1 and that large_rank > small_rank
+ * by the assertions at the head of this function. Thus no
+ * need for another inner loop.
+ */
+ tv_ptr->start[0] = (hsize_t)i;
+ tv_ptr->start[1] = (hsize_t)j;
+ tv_ptr->start[2] = (hsize_t)k;
+ tv_ptr->start[3] = (hsize_t)l;
+ tv_ptr->start[4] = 0;
+
+ ret = H5Sselect_hyperslab(tv_ptr->mem_large_ds_sid, H5S_SELECT_SET, tv_ptr->start_ptr,
+ tv_ptr->stride_ptr, tv_ptr->count_ptr, tv_ptr->block_ptr);
+ VRFY((ret != FAIL), "H5Sselect_hyperslab(mem_large_ds_sid) succeeded");
+
+ /* verify that H5Sselect_shape_same() reports the two
+ * selections as having the same shape.
+ */
+ check = H5Sselect_shape_same(tv_ptr->file_small_ds_sid_0, tv_ptr->mem_large_ds_sid);
+ VRFY((check == TRUE), "H5Sselect_shape_same passed");
+
+ /* Read selection from disk */
+#if CONTIG_HS_DR_PIO_TEST__D2M_S2L__DEBUG
+ HDfprintf(stdout, "%s:%d: start = %d %d %d %d %d.\n", fcnName, (int)(tv_ptr->mpi_rank),
+ (int)(tv_ptr->start[0]), (int)(tv_ptr->start[1]), (int)(tv_ptr->start[2]),
+ (int)(tv_ptr->start[3]), (int)(tv_ptr->start[4]));
+ HDfprintf(stdout, "%s:%d: mem/file extent dims = %d/%d.\n", fcnName, tv_ptr->mpi_rank,
+ H5Sget_simple_extent_ndims(tv_ptr->mem_large_ds_sid),
+ H5Sget_simple_extent_ndims(tv_ptr->file_small_ds_sid_0));
+#endif /* CONTIG_HS_DR_PIO_TEST__D2M_S2L__DEBUG */
+ ret = H5Dread(tv_ptr->small_dataset, H5T_NATIVE_UINT32, tv_ptr->mem_large_ds_sid,
+ tv_ptr->file_small_ds_sid_0, tv_ptr->xfer_plist, tv_ptr->large_ds_buf_1);
+ VRFY((ret >= 0), "H5Dread() slice from small ds succeeded.");
+
+ /* verify that the expected data and only the
+ * expected data was read.
+ */
+ ptr_1 = tv_ptr->large_ds_buf_1;
+ expected_value = (uint32_t)((size_t)(tv_ptr->mpi_rank) * tv_ptr->small_ds_slice_size);
+ start_index =
+ (size_t)((i * tv_ptr->edge_size * tv_ptr->edge_size * tv_ptr->edge_size *
+ tv_ptr->edge_size) +
+ (j * tv_ptr->edge_size * tv_ptr->edge_size * tv_ptr->edge_size) +
+ (k * tv_ptr->edge_size * tv_ptr->edge_size) + (l * tv_ptr->edge_size));
+ stop_index = start_index + tv_ptr->small_ds_slice_size - 1;
+
+ HDassert(start_index < stop_index);
+ HDassert(stop_index <= tv_ptr->large_ds_size);
+
+ for (n = 0; n < tv_ptr->large_ds_size; n++) {
+
+ if ((n >= start_index) && (n <= stop_index)) {
+
+ if (*ptr_1 != expected_value) {
+
+ mis_match = TRUE;
+ }
+ expected_value++;
+ }
+ else {
+
+ if (*ptr_1 != 0) {
+
+ mis_match = TRUE;
+ }
+ }
+ /* zero out the value for the next pass */
+ *ptr_1 = 0;
+
+ ptr_1++;
+ }
+
+ VRFY((mis_match == FALSE), "small slice read from large ds data good.");
+
+ (tv_ptr->tests_run)++;
+ }
+
+ l++;
+
+ (tv_ptr->total_tests)++;
+
+ } while ((tv_ptr->large_rank > 2) && ((tv_ptr->small_rank - 1) <= 1) && (l < tv_ptr->edge_size));
+ k++;
+ } while ((tv_ptr->large_rank > 3) && ((tv_ptr->small_rank - 1) <= 2) && (k < tv_ptr->edge_size));
+ j++;
+ } while ((tv_ptr->large_rank > 4) && ((tv_ptr->small_rank - 1) <= 3) && (j < tv_ptr->edge_size));
+
+ return;
+
+} /* contig_hs_dr_pio_test__d2m_s2l() */
+
+/*-------------------------------------------------------------------------
+ * Function: contig_hs_dr_pio_test__m2d_l2s()
+ *
+ * Purpose: Part three of a series of tests of I/O to/from hyperslab
+ * selections of different rank in the parallel.
+ *
+ * Verify that we can write from memory to file using
+ * selections of different rank that H5Sselect_shape_same()
+ * views as being of the same shape.
+ *
+ * Do this by writing small_rank - 1 dimensional slices from
+ * the in memory large data set to the on disk small cube
+ * dataset. After each write, read the slice of the small
+ * dataset back from disk, and verify that it contains
+ * the expected data. Verify that H5Sselect_shape_same()
+ * returns true on the memory and file selections.
+ *
+ * Return: void
+ *
+ * Programmer: JRM -- 8/10/11
+ *
+ *-------------------------------------------------------------------------
+ */
+
+#define CONTIG_HS_DR_PIO_TEST__M2D_L2S__DEBUG 0
+
+static void
+contig_hs_dr_pio_test__m2d_l2s(struct hs_dr_pio_test_vars_t *tv_ptr)
+{
+#if CONTIG_HS_DR_PIO_TEST__M2D_L2S__DEBUG
+ const char *fcnName = "contig_hs_dr_pio_test__m2d_l2s()";
+#endif /* CONTIG_HS_DR_PIO_TEST__M2D_L2S__DEBUG */
+ hbool_t mis_match = FALSE;
+ int i, j, k, l;
+ size_t n;
+ int mpi_rank; /* needed by the VRFY macro */
+ size_t start_index;
+ size_t stop_index;
+ uint32_t expected_value;
+ uint32_t *ptr_1;
+ htri_t check; /* Shape comparison return value */
+ herr_t ret; /* Generic return value */
+
+ /* initialize the local copy of mpi_rank */
+ mpi_rank = tv_ptr->mpi_rank;
+
+ /* now we go in the opposite direction, verifying that we can write
+ * from memory to file using selections of different rank that
+ * H5Sselect_shape_same() views as being of the same shape.
+ *
+ * Start by writing small_rank - 1 dimensional slices from the in memory large
+ * data set to the on disk small cube dataset. After each write, read the
+ * slice of the small dataset back from disk, and verify that it contains
+ * the expected data. Verify that H5Sselect_shape_same() returns true on
+ * the memory and file selections.
+ */
+
+ tv_ptr->start[0] = (hsize_t)(tv_ptr->mpi_rank);
+ tv_ptr->stride[0] = (hsize_t)(2 * (tv_ptr->mpi_size + 1));
+ tv_ptr->count[0] = 1;
+ tv_ptr->block[0] = 1;
+
+ for (i = 1; i < tv_ptr->large_rank; i++) {
+
+ tv_ptr->start[i] = 0;
+ tv_ptr->stride[i] = (hsize_t)(2 * tv_ptr->edge_size);
+ tv_ptr->count[i] = 1;
+ tv_ptr->block[i] = (hsize_t)(tv_ptr->edge_size);
+ }
+
+ ret = H5Sselect_hyperslab(tv_ptr->file_small_ds_sid_0, H5S_SELECT_SET, tv_ptr->start, tv_ptr->stride,
+ tv_ptr->count, tv_ptr->block);
+ VRFY((ret >= 0), "H5Sselect_hyperslab(file_small_ds_sid_0, set) succeeded");
+
+ ret = H5Sselect_hyperslab(tv_ptr->mem_small_ds_sid, H5S_SELECT_SET, tv_ptr->start, tv_ptr->stride,
+ tv_ptr->count, tv_ptr->block);
+ VRFY((ret >= 0), "H5Sselect_hyperslab(mem_small_ds_sid, set) succeeded");
+
+ /* set up start, stride, count, and block -- note that we will
+ * change start[] so as to read slices of the large cube.
+ */
+ for (i = 0; i < PAR_SS_DR_MAX_RANK; i++) {
+
+ tv_ptr->start[i] = 0;
+ tv_ptr->stride[i] = (hsize_t)(2 * tv_ptr->edge_size);
+ tv_ptr->count[i] = 1;
+ if ((PAR_SS_DR_MAX_RANK - i) > (tv_ptr->small_rank - 1)) {
+
+ tv_ptr->block[i] = 1;
+ }
+ else {
+
+ tv_ptr->block[i] = (hsize_t)(tv_ptr->edge_size);
+ }
+ }
+
+ /* zero out the in memory small ds */
+ HDmemset(tv_ptr->small_ds_buf_1, 0, sizeof(uint32_t) * tv_ptr->small_ds_size);
+
+#if CONTIG_HS_DR_PIO_TEST__M2D_L2S__DEBUG
+ HDfprintf(stdout, "%s writing slices from big ds to slices of small ds on disk.\n", fcnName);
+#endif /* CONTIG_HS_DR_PIO_TEST__M2D_L2S__DEBUG */
+
+ /* in serial versions of this test, we loop through all the dimensions
+ * of the large data set that don't appear in the small data set.
+ *
+ * However, in the parallel version, each process only works with that
+ * slice of the large (and small) data set indicated by its rank -- hence
+ * we set the most slowly changing index to mpi_rank, and don't iterate
+ * over it.
+ */
+
+ if (PAR_SS_DR_MAX_RANK - tv_ptr->large_rank == 0) {
+
+ i = tv_ptr->mpi_rank;
+ }
+ else {
+
+ i = 0;
+ }
+
+ /* since large_rank is at most PAR_SS_DR_MAX_RANK, no need to
+ * loop over it -- either we are setting i to mpi_rank, or
+ * we are setting it to zero. It will not change during the
+ * test.
+ */
+
+ if (PAR_SS_DR_MAX_RANK - tv_ptr->large_rank == 1) {
+
+ j = tv_ptr->mpi_rank;
+ }
+ else {
+
+ j = 0;
+ }
+
+ j = 0;
+ do {
+ if (PAR_SS_DR_MAX_RANK - tv_ptr->large_rank == 2) {
+
+ k = tv_ptr->mpi_rank;
+ }
+ else {
+
+ k = 0;
+ }
+
+ do {
+ /* since small rank >= 2 and large_rank > small_rank, we
+ * have large_rank >= 3. Since PAR_SS_DR_MAX_RANK == 5
+ * (baring major re-orgaization), this gives us:
+ *
+ * (PAR_SS_DR_MAX_RANK - large_rank) <= 2
+ *
+ * so no need to repeat the test in the outer loops --
+ * just set l = 0.
+ */
+
+ l = 0;
+ do {
+ if ((tv_ptr->skips)++ < tv_ptr->max_skips) { /* skip the test */
+
+ (tv_ptr->tests_skipped)++;
+ }
+ else { /* run the test */
+
+ tv_ptr->skips = 0; /* reset the skips counter */
+
+ /* we know that small_rank >= 1 and that large_rank > small_rank
+ * by the assertions at the head of this function. Thus no
+ * need for another inner loop.
+ */
+
+ /* zero out this rank's slice of the on disk small data set */
+ ret = H5Dwrite(tv_ptr->small_dataset, H5T_NATIVE_UINT32, tv_ptr->mem_small_ds_sid,
+ tv_ptr->file_small_ds_sid_0, tv_ptr->xfer_plist, tv_ptr->small_ds_buf_2);
+ VRFY((ret >= 0), "H5Dwrite() zero slice to small ds succeeded.");
+
+ /* select the portion of the in memory large cube from which we
+ * are going to write data.
+ */
+ tv_ptr->start[0] = (hsize_t)i;
+ tv_ptr->start[1] = (hsize_t)j;
+ tv_ptr->start[2] = (hsize_t)k;
+ tv_ptr->start[3] = (hsize_t)l;
+ tv_ptr->start[4] = 0;
+
+ ret = H5Sselect_hyperslab(tv_ptr->mem_large_ds_sid, H5S_SELECT_SET, tv_ptr->start_ptr,
+ tv_ptr->stride_ptr, tv_ptr->count_ptr, tv_ptr->block_ptr);
+ VRFY((ret >= 0), "H5Sselect_hyperslab() mem_large_ds_sid succeeded.");
+
+ /* verify that H5Sselect_shape_same() reports the in
+ * memory slice through the cube selection and the
+ * on disk full square selections as having the same shape.
+ */
+ check = H5Sselect_shape_same(tv_ptr->file_small_ds_sid_0, tv_ptr->mem_large_ds_sid);
+ VRFY((check == TRUE), "H5Sselect_shape_same passed.");
+
+ /* write the slice from the in memory large data set to the
+ * slice of the on disk small dataset. */
+#if CONTIG_HS_DR_PIO_TEST__M2D_L2S__DEBUG
+ HDfprintf(stdout, "%s:%d: start = %d %d %d %d %d.\n", fcnName, (int)(tv_ptr->mpi_rank),
+ (int)(tv_ptr->start[0]), (int)(tv_ptr->start[1]), (int)(tv_ptr->start[2]),
+ (int)(tv_ptr->start[3]), (int)(tv_ptr->start[4]));
+ HDfprintf(stdout, "%s:%d: mem/file extent dims = %d/%d.\n", fcnName, tv_ptr->mpi_rank,
+ H5Sget_simple_extent_ndims(tv_ptr->mem_large_ds_sid),
+ H5Sget_simple_extent_ndims(tv_ptr->file_small_ds_sid_0));
+#endif /* CONTIG_HS_DR_PIO_TEST__M2D_L2S__DEBUG */
+ ret = H5Dwrite(tv_ptr->small_dataset, H5T_NATIVE_UINT32, tv_ptr->mem_large_ds_sid,
+ tv_ptr->file_small_ds_sid_0, tv_ptr->xfer_plist, tv_ptr->large_ds_buf_0);
+ VRFY((ret >= 0), "H5Dwrite() slice to large ds succeeded.");
+
+ /* read the on disk square into memory */
+ ret = H5Dread(tv_ptr->small_dataset, H5T_NATIVE_UINT32, tv_ptr->mem_small_ds_sid,
+ tv_ptr->file_small_ds_sid_0, tv_ptr->xfer_plist, tv_ptr->small_ds_buf_1);
+ VRFY((ret >= 0), "H5Dread() slice from small ds succeeded.");
+
+ /* verify that expected data is retrieved */
+
+ mis_match = FALSE;
+ ptr_1 = tv_ptr->small_ds_buf_1;
+
+ expected_value =
+ (uint32_t)((i * tv_ptr->edge_size * tv_ptr->edge_size * tv_ptr->edge_size *
+ tv_ptr->edge_size) +
+ (j * tv_ptr->edge_size * tv_ptr->edge_size * tv_ptr->edge_size) +
+ (k * tv_ptr->edge_size * tv_ptr->edge_size) + (l * tv_ptr->edge_size));
+
+ start_index = (size_t)(tv_ptr->mpi_rank) * tv_ptr->small_ds_slice_size;
+ stop_index = start_index + tv_ptr->small_ds_slice_size - 1;
+
+ HDassert(start_index < stop_index);
+ HDassert(stop_index <= tv_ptr->small_ds_size);
+
+ for (n = 0; n < tv_ptr->small_ds_size; n++) {
+
+ if ((n >= start_index) && (n <= stop_index)) {
+
+ if (*ptr_1 != expected_value) {
+
+ mis_match = TRUE;
+ }
+ expected_value++;
+ }
+ else {
+
+ if (*ptr_1 != 0) {
+
+ mis_match = TRUE;
+ }
+ }
+ /* zero out the value for the next pass */
+ *ptr_1 = 0;
+
+ ptr_1++;
+ }
+
+ VRFY((mis_match == FALSE), "small slice write from large ds data good.");
+
+ (tv_ptr->tests_run)++;
+ }
+
+ l++;
+
+ (tv_ptr->total_tests)++;
+
+ } while ((tv_ptr->large_rank > 2) && ((tv_ptr->small_rank - 1) <= 1) && (l < tv_ptr->edge_size));
+ k++;
+ } while ((tv_ptr->large_rank > 3) && ((tv_ptr->small_rank - 1) <= 2) && (k < tv_ptr->edge_size));
+ j++;
+ } while ((tv_ptr->large_rank > 4) && ((tv_ptr->small_rank - 1) <= 3) && (j < tv_ptr->edge_size));
+
+ return;
+
+} /* contig_hs_dr_pio_test__m2d_l2s() */
+
+/*-------------------------------------------------------------------------
+ * Function: contig_hs_dr_pio_test__m2d_s2l()
+ *
+ * Purpose: Part four of a series of tests of I/O to/from hyperslab
+ * selections of different rank in the parallel.
+ *
+ * Verify that we can write from memory to file using
+ * selections of different rank that H5Sselect_shape_same()
+ * views as being of the same shape.
+ *
+ * Do this by writing the contents of the process's slice of
+ * the in memory small data set to slices of the on disk
+ * large data set. After each write, read the process's
+ * slice of the large data set back into memory, and verify
+ * that it contains the expected data.
+ *
+ * Verify that H5Sselect_shape_same() returns true on the
+ * memory and file selections.
+ *
+ * Return: void
+ *
+ * Programmer: JRM -- 8/10/11
+ *
+ *-------------------------------------------------------------------------
+ */
+
+#define CONTIG_HS_DR_PIO_TEST__M2D_S2L__DEBUG 0
+
+static void
+contig_hs_dr_pio_test__m2d_s2l(struct hs_dr_pio_test_vars_t *tv_ptr)
+{
+#if CONTIG_HS_DR_PIO_TEST__M2D_S2L__DEBUG
+ const char *fcnName = "contig_hs_dr_pio_test__m2d_s2l()";
+#endif /* CONTIG_HS_DR_PIO_TEST__M2D_S2L__DEBUG */
+ hbool_t mis_match = FALSE;
+ int i, j, k, l;
+ size_t n;
+ int mpi_rank; /* needed by the VRFY macro */
+ size_t start_index;
+ size_t stop_index;
+ uint32_t expected_value;
+ uint32_t *ptr_1;
+ htri_t check; /* Shape comparison return value */
+ herr_t ret; /* Generic return value */
+
+ /* initialize the local copy of mpi_rank */
+ mpi_rank = tv_ptr->mpi_rank;
+
+ /* Now write the contents of the process's slice of the in memory
+ * small data set to slices of the on disk large data set. After
+ * each write, read the process's slice of the large data set back
+ * into memory, and verify that it contains the expected data.
+ * Verify that H5Sselect_shape_same() returns true on the memory
+ * and file selections.
+ */
+
+ /* select the slice of the in memory small data set associated with
+ * the process's mpi rank.
+ */
+ tv_ptr->start[0] = (hsize_t)(tv_ptr->mpi_rank);
+ tv_ptr->stride[0] = (hsize_t)(2 * (tv_ptr->mpi_size + 1));
+ tv_ptr->count[0] = 1;
+ tv_ptr->block[0] = 1;
+
+ for (i = 1; i < tv_ptr->large_rank; i++) {
+
+ tv_ptr->start[i] = 0;
+ tv_ptr->stride[i] = (hsize_t)(2 * tv_ptr->edge_size);
+ tv_ptr->count[i] = 1;
+ tv_ptr->block[i] = (hsize_t)(tv_ptr->edge_size);
+ }
+
+ ret = H5Sselect_hyperslab(tv_ptr->mem_small_ds_sid, H5S_SELECT_SET, tv_ptr->start, tv_ptr->stride,
+ tv_ptr->count, tv_ptr->block);
+ VRFY((ret >= 0), "H5Sselect_hyperslab(mem_small_ds_sid, set) succeeded");
+
+ /* set up start, stride, count, and block -- note that we will
+ * change start[] so as to write slices of the small data set to
+ * slices of the large data set.
+ */
+ for (i = 0; i < PAR_SS_DR_MAX_RANK; i++) {
+
+ tv_ptr->start[i] = 0;
+ tv_ptr->stride[i] = (hsize_t)(2 * tv_ptr->edge_size);
+ tv_ptr->count[i] = 1;
+ if ((PAR_SS_DR_MAX_RANK - i) > (tv_ptr->small_rank - 1)) {
+
+ tv_ptr->block[i] = 1;
+ }
+ else {
+
+ tv_ptr->block[i] = (hsize_t)(tv_ptr->edge_size);
+ }
+ }
+
+ /* zero out the in memory large ds */
+ HDmemset(tv_ptr->large_ds_buf_1, 0, sizeof(uint32_t) * tv_ptr->large_ds_size);
+
+#if CONTIG_HS_DR_PIO_TEST__M2D_S2L__DEBUG
+ HDfprintf(stdout, "%s writing process slices of small ds to slices of large ds on disk.\n", fcnName);
+#endif /* CONTIG_HS_DR_PIO_TEST__M2D_S2L__DEBUG */
+
+ if (PAR_SS_DR_MAX_RANK - tv_ptr->large_rank == 0) {
+
+ i = tv_ptr->mpi_rank;
+ }
+ else {
+
+ i = 0;
+ }
+
+ /* since large_rank is at most PAR_SS_DR_MAX_RANK, no need to
+ * loop over it -- either we are setting i to mpi_rank, or
+ * we are setting it to zero. It will not change during the
+ * test.
+ */
+
+ if (PAR_SS_DR_MAX_RANK - tv_ptr->large_rank == 1) {
+
+ j = tv_ptr->mpi_rank;
+ }
+ else {
+
+ j = 0;
+ }
+
+ do {
+ if (PAR_SS_DR_MAX_RANK - tv_ptr->large_rank == 2) {
+
+ k = tv_ptr->mpi_rank;
+ }
+ else {
+
+ k = 0;
+ }
+
+ do {
+ /* since small rank >= 2 and large_rank > small_rank, we
+ * have large_rank >= 3. Since PAR_SS_DR_MAX_RANK == 5
+ * (baring major re-orgaization), this gives us:
+ *
+ * (PAR_SS_DR_MAX_RANK - large_rank) <= 2
+ *
+ * so no need to repeat the test in the outer loops --
+ * just set l = 0.
+ */
+
+ l = 0;
+ do {
+ if ((tv_ptr->skips)++ < tv_ptr->max_skips) { /* skip the test */
+
+ (tv_ptr->tests_skipped)++;
+
+#if CONTIG_HS_DR_PIO_TEST__M2D_S2L__DEBUG
+ tv_ptr->start[0] = (hsize_t)i;
+ tv_ptr->start[1] = (hsize_t)j;
+ tv_ptr->start[2] = (hsize_t)k;
+ tv_ptr->start[3] = (hsize_t)l;
+ tv_ptr->start[4] = 0;
+
+ HDfprintf(stdout, "%s:%d: skipping test with start = %d %d %d %d %d.\n", fcnName,
+ (int)(tv_ptr->mpi_rank), (int)(tv_ptr->start[0]), (int)(tv_ptr->start[1]),
+ (int)(tv_ptr->start[2]), (int)(tv_ptr->start[3]), (int)(tv_ptr->start[4]));
+ HDfprintf(stdout, "%s:%d: mem/file extent dims = %d/%d.\n", fcnName, tv_ptr->mpi_rank,
+ H5Sget_simple_extent_ndims(tv_ptr->mem_small_ds_sid),
+ H5Sget_simple_extent_ndims(tv_ptr->file_large_ds_sid_0));
+#endif /* CONTIG_HS_DR_PIO_TEST__M2D_S2L__DEBUG */
+ }
+ else { /* run the test */
+
+ tv_ptr->skips = 0; /* reset the skips counter */
+
+ /* we know that small_rank >= 1 and that large_rank > small_rank
+ * by the assertions at the head of this function. Thus no
+ * need for another inner loop.
+ */
+
+ /* Zero out this processes slice of the on disk large data set.
+ * Note that this will leave one slice with its original data
+ * as there is one more slice than processes.
+ */
+ ret = H5Dwrite(tv_ptr->large_dataset, H5T_NATIVE_UINT32, tv_ptr->large_ds_slice_sid,
+ tv_ptr->file_large_ds_process_slice_sid, tv_ptr->xfer_plist,
+ tv_ptr->large_ds_buf_2);
+ VRFY((ret != FAIL), "H5Dwrite() to zero large ds succeeded");
+
+ /* select the portion of the in memory large cube to which we
+ * are going to write data.
+ */
+ tv_ptr->start[0] = (hsize_t)i;
+ tv_ptr->start[1] = (hsize_t)j;
+ tv_ptr->start[2] = (hsize_t)k;
+ tv_ptr->start[3] = (hsize_t)l;
+ tv_ptr->start[4] = 0;
+
+ ret = H5Sselect_hyperslab(tv_ptr->file_large_ds_sid_0, H5S_SELECT_SET, tv_ptr->start_ptr,
+ tv_ptr->stride_ptr, tv_ptr->count_ptr, tv_ptr->block_ptr);
+ VRFY((ret != FAIL), "H5Sselect_hyperslab() target large ds slice succeeded");
+
+ /* verify that H5Sselect_shape_same() reports the in
+ * memory small data set slice selection and the
+ * on disk slice through the large data set selection
+ * as having the same shape.
+ */
+ check = H5Sselect_shape_same(tv_ptr->mem_small_ds_sid, tv_ptr->file_large_ds_sid_0);
+ VRFY((check == TRUE), "H5Sselect_shape_same passed");
+
+ /* write the small data set slice from memory to the
+ * target slice of the disk data set
+ */
+#if CONTIG_HS_DR_PIO_TEST__M2D_S2L__DEBUG
+ HDfprintf(stdout, "%s:%d: start = %d %d %d %d %d.\n", fcnName, (int)(tv_ptr->mpi_rank),
+ (int)(tv_ptr->start[0]), (int)(tv_ptr->start[1]), (int)(tv_ptr->start[2]),
+ (int)(tv_ptr->start[3]), (int)(tv_ptr->start[4]));
+ HDfprintf(stdout, "%s:%d: mem/file extent dims = %d/%d.\n", fcnName, tv_ptr->mpi_rank,
+ H5Sget_simple_extent_ndims(tv_ptr->mem_small_ds_sid),
+ H5Sget_simple_extent_ndims(tv_ptr->file_large_ds_sid_0));
+#endif /* CONTIG_HS_DR_PIO_TEST__M2D_S2L__DEBUG */
+ ret = H5Dwrite(tv_ptr->large_dataset, H5T_NATIVE_UINT32, tv_ptr->mem_small_ds_sid,
+ tv_ptr->file_large_ds_sid_0, tv_ptr->xfer_plist, tv_ptr->small_ds_buf_0);
+ VRFY((ret != FAIL), "H5Dwrite of small ds slice to large ds succeeded");
+
+ /* read this processes slice on the on disk large
+ * data set into memory.
+ */
+
+ ret = H5Dread(
+ tv_ptr->large_dataset, H5T_NATIVE_UINT32, tv_ptr->mem_large_ds_process_slice_sid,
+ tv_ptr->file_large_ds_process_slice_sid, tv_ptr->xfer_plist, tv_ptr->large_ds_buf_1);
+ VRFY((ret != FAIL), "H5Dread() of process slice of large ds succeeded");
+
+ /* verify that the expected data and only the
+ * expected data was read.
+ */
+ ptr_1 = tv_ptr->large_ds_buf_1;
+ expected_value = (uint32_t)((size_t)(tv_ptr->mpi_rank) * tv_ptr->small_ds_slice_size);
+
+ start_index =
+ (size_t)((i * tv_ptr->edge_size * tv_ptr->edge_size * tv_ptr->edge_size *
+ tv_ptr->edge_size) +
+ (j * tv_ptr->edge_size * tv_ptr->edge_size * tv_ptr->edge_size) +
+ (k * tv_ptr->edge_size * tv_ptr->edge_size) + (l * tv_ptr->edge_size));
+ stop_index = start_index + tv_ptr->small_ds_slice_size - 1;
+
+ HDassert(start_index < stop_index);
+ HDassert(stop_index < tv_ptr->large_ds_size);
+
+ for (n = 0; n < tv_ptr->large_ds_size; n++) {
+
+ if ((n >= start_index) && (n <= stop_index)) {
+
+ if (*ptr_1 != expected_value) {
+
+ mis_match = TRUE;
+ }
+
+ expected_value++;
+ }
+ else {
+
+ if (*ptr_1 != 0) {
+
+ mis_match = TRUE;
+ }
+ }
+ /* zero out buffer for next test */
+ *ptr_1 = 0;
+ ptr_1++;
+ }
+
+ VRFY((mis_match == FALSE), "small ds slice write to large ds slice data good.");
+
+ (tv_ptr->tests_run)++;
+ }
+
+ l++;
+
+ (tv_ptr->total_tests)++;
+
+ } while ((tv_ptr->large_rank > 2) && ((tv_ptr->small_rank - 1) <= 1) && (l < tv_ptr->edge_size));
+ k++;
+ } while ((tv_ptr->large_rank > 3) && ((tv_ptr->small_rank - 1) <= 2) && (k < tv_ptr->edge_size));
+ j++;
+ } while ((tv_ptr->large_rank > 4) && ((tv_ptr->small_rank - 1) <= 3) && (j < tv_ptr->edge_size));
+
+ return;
+
+} /* contig_hs_dr_pio_test__m2d_s2l() */
+
+/*-------------------------------------------------------------------------
+ * Function: contig_hs_dr_pio_test__run_test()
+ *
+ * Purpose: Test I/O to/from hyperslab selections of different rank in
+ * the parallel.
+ *
+ * Return: void
+ *
+ * Programmer: JRM -- 9/18/09
+ *
+ *-------------------------------------------------------------------------
+ */
+
+#define CONTIG_HS_DR_PIO_TEST__RUN_TEST__DEBUG 0
+
+static void
+contig_hs_dr_pio_test__run_test(const int test_num, const int edge_size, const int chunk_edge_size,
+ const int small_rank, const int large_rank, const hbool_t use_collective_io,
+ const hid_t dset_type, int express_test, int *skips_ptr, int max_skips,
+ int64_t *total_tests_ptr, int64_t *tests_run_ptr, int64_t *tests_skipped_ptr,
+ int mpi_rank)
+{
+#if CONTIG_HS_DR_PIO_TEST__RUN_TEST__DEBUG
+ const char *fcnName = "contig_hs_dr_pio_test__run_test()";
+#endif /* CONTIG_HS_DR_PIO_TEST__RUN_TEST__DEBUG */
+ struct hs_dr_pio_test_vars_t test_vars = {
+ /* int mpi_size = */ -1,
+ /* int mpi_rank = */ -1,
+ /* MPI_Comm mpi_comm = */ MPI_COMM_NULL,
+ /* MPI_Inf mpi_info = */ MPI_INFO_NULL,
+ /* int test_num = */ -1,
+ /* int edge_size = */ -1,
+ /* int checker_edge_size = */ -1,
+ /* int chunk_edge_size = */ -1,
+ /* int small_rank = */ -1,
+ /* int large_rank = */ -1,
+ /* hid_t dset_type = */ -1,
+ /* uint32_t * small_ds_buf_0 = */ NULL,
+ /* uint32_t * small_ds_buf_1 = */ NULL,
+ /* uint32_t * small_ds_buf_2 = */ NULL,
+ /* uint32_t * small_ds_slice_buf = */ NULL,
+ /* uint32_t * large_ds_buf_0 = */ NULL,
+ /* uint32_t * large_ds_buf_1 = */ NULL,
+ /* uint32_t * large_ds_buf_2 = */ NULL,
+ /* uint32_t * large_ds_slice_buf = */ NULL,
+ /* int small_ds_offset = */ -1,
+ /* int large_ds_offset = */ -1,
+ /* hid_t fid = */ -1, /* HDF5 file ID */
+ /* hid_t xfer_plist = */ H5P_DEFAULT,
+ /* hid_t full_mem_small_ds_sid = */ -1,
+ /* hid_t full_file_small_ds_sid = */ -1,
+ /* hid_t mem_small_ds_sid = */ -1,
+ /* hid_t file_small_ds_sid_0 = */ -1,
+ /* hid_t file_small_ds_sid_1 = */ -1,
+ /* hid_t small_ds_slice_sid = */ -1,
+ /* hid_t full_mem_large_ds_sid = */ -1,
+ /* hid_t full_file_large_ds_sid = */ -1,
+ /* hid_t mem_large_ds_sid = */ -1,
+ /* hid_t file_large_ds_sid_0 = */ -1,
+ /* hid_t file_large_ds_sid_1 = */ -1,
+ /* hid_t file_large_ds_process_slice_sid = */ -1,
+ /* hid_t mem_large_ds_process_slice_sid = */ -1,
+ /* hid_t large_ds_slice_sid = */ -1,
+ /* hid_t small_dataset = */ -1, /* Dataset ID */
+ /* hid_t large_dataset = */ -1, /* Dataset ID */
+ /* size_t small_ds_size = */ 1,
+ /* size_t small_ds_slice_size = */ 1,
+ /* size_t large_ds_size = */ 1,
+ /* size_t large_ds_slice_size = */ 1,
+ /* hsize_t dims[PAR_SS_DR_MAX_RANK] = */ {0, 0, 0, 0, 0},
+ /* hsize_t chunk_dims[PAR_SS_DR_MAX_RANK] = */ {0, 0, 0, 0, 0},
+ /* hsize_t start[PAR_SS_DR_MAX_RANK] = */ {0, 0, 0, 0, 0},
+ /* hsize_t stride[PAR_SS_DR_MAX_RANK] = */ {0, 0, 0, 0, 0},
+ /* hsize_t count[PAR_SS_DR_MAX_RANK] = */ {0, 0, 0, 0, 0},
+ /* hsize_t block[PAR_SS_DR_MAX_RANK] = */ {0, 0, 0, 0, 0},
+ /* hsize_t * start_ptr = */ NULL,
+ /* hsize_t * stride_ptr = */ NULL,
+ /* hsize_t * count_ptr = */ NULL,
+ /* hsize_t * block_ptr = */ NULL,
+ /* int skips = */ 0,
+ /* int max_skips = */ 0,
+ /* int64_t total_tests = */ 0,
+ /* int64_t tests_run = */ 0,
+ /* int64_t tests_skipped = */ 0};
+ struct hs_dr_pio_test_vars_t *tv_ptr = &test_vars;
+
+ if (MAINPROCESS)
+ printf("\r - running test #%lld: small rank = %d, large rank = %d", (long long)(test_num + 1),
+ small_rank, large_rank);
+
+ hs_dr_pio_test__setup(test_num, edge_size, -1, chunk_edge_size, small_rank, large_rank, use_collective_io,
+ dset_type, express_test, tv_ptr);
+
+ /* initialize skips & max_skips */
+ tv_ptr->skips = *skips_ptr;
+ tv_ptr->max_skips = max_skips;
+
+#if CONTIG_HS_DR_PIO_TEST__RUN_TEST__DEBUG
+ if (MAINPROCESS) {
+ HDfprintf(stdout, "test %d: small rank = %d, large rank = %d.\n", test_num, small_rank, large_rank);
+ HDfprintf(stdout, "test %d: Initialization complete.\n", test_num);
+ }
+#endif /* CONTIG_HS_DR_PIO_TEST__RUN_TEST__DEBUG */
+
+ /* first, verify that we can read from disk correctly using selections
+ * of different rank that H5Sselect_shape_same() views as being of the
+ * same shape.
+ *
+ * Start by reading small_rank - 1 dimensional slice from the on disk
+ * large cube, and verifying that the data read is correct. Verify that
+ * H5Sselect_shape_same() returns true on the memory and file selections.
+ */
+
+#if CONTIG_HS_DR_PIO_TEST__RUN_TEST__DEBUG
+ if (MAINPROCESS) {
+ HDfprintf(stdout, "test %d: running contig_hs_dr_pio_test__d2m_l2s.\n", test_num);
+ }
+#endif /* CONTIG_HS_DR_PIO_TEST__RUN_TEST__DEBUG */
+ contig_hs_dr_pio_test__d2m_l2s(tv_ptr);
+
+ /* Second, read slices of the on disk small data set into slices
+ * through the in memory large data set, and verify that the correct
+ * data (and only the correct data) is read.
+ */
+
+#if CONTIG_HS_DR_PIO_TEST__RUN_TEST__DEBUG
+ if (MAINPROCESS) {
+ HDfprintf(stdout, "test %d: running contig_hs_dr_pio_test__d2m_s2l.\n", test_num);
+ }
+#endif /* CONTIG_HS_DR_PIO_TEST__RUN_TEST__DEBUG */
+ contig_hs_dr_pio_test__d2m_s2l(tv_ptr);
+
+ /* now we go in the opposite direction, verifying that we can write
+ * from memory to file using selections of different rank that
+ * H5Sselect_shape_same() views as being of the same shape.
+ *
+ * Start by writing small_rank - 1 D slices from the in memory large data
+ * set to the on disk small cube dataset. After each write, read the
+ * slice of the small dataset back from disk, and verify that it contains
+ * the expected data. Verify that H5Sselect_shape_same() returns true on
+ * the memory and file selections.
+ */
+
+#if CONTIG_HS_DR_PIO_TEST__RUN_TEST__DEBUG
+ if (MAINPROCESS) {
+ HDfprintf(stdout, "test %d: running contig_hs_dr_pio_test__m2d_l2s.\n", test_num);
+ }
+#endif /* CONTIG_HS_DR_PIO_TEST__RUN_TEST__DEBUG */
+ contig_hs_dr_pio_test__m2d_l2s(tv_ptr);
+
+ /* Now write the contents of the process's slice of the in memory
+ * small data set to slices of the on disk large data set. After
+ * each write, read the process's slice of the large data set back
+ * into memory, and verify that it contains the expected data.
+ * Verify that H5Sselect_shape_same() returns true on the memory
+ * and file selections.
+ */
+
+#if CONTIG_HS_DR_PIO_TEST__RUN_TEST__DEBUG
+ if (MAINPROCESS) {
+ HDfprintf(stdout, "test %d: running contig_hs_dr_pio_test__m2d_s2l.\n", test_num);
+ }
+#endif /* CONTIG_HS_DR_PIO_TEST__RUN_TEST__DEBUG */
+ contig_hs_dr_pio_test__m2d_s2l(tv_ptr);
+
+#if CONTIG_HS_DR_PIO_TEST__RUN_TEST__DEBUG
+ if (MAINPROCESS) {
+ HDfprintf(stdout, "test %d: Subtests complete -- tests run/skipped/total = %lld/%lld/%lld.\n",
+ test_num, (long long)(tv_ptr->tests_run), (long long)(tv_ptr->tests_skipped),
+ (long long)(tv_ptr->total_tests));
+ }
+#endif /* CONTIG_HS_DR_PIO_TEST__RUN_TEST__DEBUG */
+
+ hs_dr_pio_test__takedown(tv_ptr);
+
+#if CONTIG_HS_DR_PIO_TEST__RUN_TEST__DEBUG
+ if (MAINPROCESS) {
+ HDfprintf(stdout, "test %d: Takedown complete.\n", test_num);
+ }
+#endif /* CONTIG_HS_DR_PIO_TEST__RUN_TEST__DEBUG */
+
+ *skips_ptr = tv_ptr->skips;
+ *total_tests_ptr += tv_ptr->total_tests;
+ *tests_run_ptr += tv_ptr->tests_run;
+ *tests_skipped_ptr += tv_ptr->tests_skipped;
+
+ return;
+
+} /* contig_hs_dr_pio_test__run_test() */
+
+/*-------------------------------------------------------------------------
+ * Function: contig_hs_dr_pio_test(ShapeSameTestMethods sstest_type)
+ *
+ * Purpose: Test I/O to/from hyperslab selections of different rank in
+ * the parallel case.
+ *
+ * Return: void
+ *
+ * Programmer: JRM -- 9/18/09
+ *
+ *-------------------------------------------------------------------------
+ */
+
+#define CONTIG_HS_DR_PIO_TEST__DEBUG 0
+
+static void
+contig_hs_dr_pio_test(ShapeSameTestMethods sstest_type)
+{
+ int express_test;
+ int local_express_test;
+ int mpi_rank = -1;
+ int mpi_size;
+ int test_num = 0;
+ int edge_size;
+ int chunk_edge_size = 0;
+ int small_rank;
+ int large_rank;
+ int mpi_result;
+ int skips = 0;
+ int max_skips = 0;
+ /* The following table list the number of sub-tests skipped between
+ * each test that is actually executed as a function of the express
+ * test level. Note that any value in excess of 4880 will cause all
+ * sub tests to be skipped.
+ */
+ int max_skips_tbl[4] = {0, 4, 64, 1024};
+ hid_t dset_type = H5T_NATIVE_UINT;
+ int64_t total_tests = 0;
+ int64_t tests_run = 0;
+ int64_t tests_skipped = 0;
+
+ HDcompile_assert(sizeof(uint32_t) == sizeof(unsigned));
+
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ edge_size = (mpi_size > 6 ? mpi_size : 6);
+
+ local_express_test = EXPRESS_MODE; /* GetTestExpress(); */
+
+ mpi_result = MPI_Allreduce((void *)&local_express_test, (void *)&express_test, 1, MPI_INT, MPI_MAX,
+ MPI_COMM_WORLD);
+
+ VRFY((mpi_result == MPI_SUCCESS), "MPI_Allreduce(0) succeeded");
+
+ if (local_express_test < 0) {
+ max_skips = max_skips_tbl[0];
+ }
+ else if (local_express_test > 3) {
+ max_skips = max_skips_tbl[3];
+ }
+ else {
+ max_skips = max_skips_tbl[local_express_test];
+ }
+
+ for (large_rank = 3; large_rank <= PAR_SS_DR_MAX_RANK; large_rank++) {
+
+ for (small_rank = 2; small_rank < large_rank; small_rank++) {
+
+ switch (sstest_type) {
+ case IND_CONTIG:
+ /* contiguous data set, independent I/O */
+ chunk_edge_size = 0;
+
+ contig_hs_dr_pio_test__run_test(
+ test_num, edge_size, chunk_edge_size, small_rank, large_rank, FALSE, dset_type,
+ express_test, &skips, max_skips, &total_tests, &tests_run, &tests_skipped, mpi_rank);
+ test_num++;
+ break;
+ /* end of case IND_CONTIG */
+
+ case COL_CONTIG:
+ /* contiguous data set, collective I/O */
+ chunk_edge_size = 0;
+
+ contig_hs_dr_pio_test__run_test(
+ test_num, edge_size, chunk_edge_size, small_rank, large_rank, TRUE, dset_type,
+ express_test, &skips, max_skips, &total_tests, &tests_run, &tests_skipped, mpi_rank);
+ test_num++;
+ break;
+ /* end of case COL_CONTIG */
+
+ case IND_CHUNKED:
+ /* chunked data set, independent I/O */
+ chunk_edge_size = 5;
+
+ contig_hs_dr_pio_test__run_test(
+ test_num, edge_size, chunk_edge_size, small_rank, large_rank, FALSE, dset_type,
+ express_test, &skips, max_skips, &total_tests, &tests_run, &tests_skipped, mpi_rank);
+ test_num++;
+ break;
+ /* end of case IND_CHUNKED */
+
+ case COL_CHUNKED:
+ /* chunked data set, collective I/O */
+ chunk_edge_size = 5;
+
+ contig_hs_dr_pio_test__run_test(
+ test_num, edge_size, chunk_edge_size, small_rank, large_rank, TRUE, dset_type,
+ express_test, &skips, max_skips, &total_tests, &tests_run, &tests_skipped, mpi_rank);
+ test_num++;
+ break;
+ /* end of case COL_CHUNKED */
+
+ default:
+ VRFY((FALSE), "unknown test type");
+ break;
+
+ } /* end of switch(sstest_type) */
+#if CONTIG_HS_DR_PIO_TEST__DEBUG
+ if ((MAINPROCESS) && (tests_skipped > 0)) {
+ HDfprintf(stdout, " run/skipped/total = %lld/%lld/%lld.\n", tests_run, tests_skipped,
+ total_tests);
+ }
+#endif /* CONTIG_HS_DR_PIO_TEST__DEBUG */
+ }
+ }
+
+ if (MAINPROCESS) {
+ if (tests_skipped > 0) {
+ HDfprintf(stdout, " %" PRId64 " of %" PRId64 " subtests skipped to expedite testing.\n",
+ tests_skipped, total_tests);
+ }
+ else
+ HDprintf("\n");
+ }
+
+ return;
+
+} /* contig_hs_dr_pio_test() */
+
+/****************************************************************
+**
+** ckrbrd_hs_dr_pio_test__slct_ckrbrd():
+** Given a dataspace of tgt_rank, and dimensions:
+**
+** (mpi_size + 1), edge_size, ... , edge_size
+**
+** edge_size, and a checker_edge_size, select a checker
+** board selection of a sel_rank (sel_rank < tgt_rank)
+** dimensional slice through the dataspace parallel to the
+** sel_rank fastest changing indices, with origin (in the
+** higher indices) as indicated by the start array.
+**
+** Note that this function, like all its relatives, is
+** hard coded to presume a maximum dataspace rank of 5.
+** While this maximum is declared as a constant, increasing
+** it will require extensive coding in addition to changing
+** the value of the constant.
+**
+** JRM -- 10/8/09
+**
+****************************************************************/
+
+#define CKRBRD_HS_DR_PIO_TEST__SELECT_CHECKER_BOARD__DEBUG 0
+
+static void
+ckrbrd_hs_dr_pio_test__slct_ckrbrd(const int mpi_rank, const hid_t tgt_sid, const int tgt_rank,
+ const int edge_size, const int checker_edge_size, const int sel_rank,
+ hsize_t sel_start[])
+{
+#if CKRBRD_HS_DR_PIO_TEST__SELECT_CHECKER_BOARD__DEBUG
+ const char *fcnName = "ckrbrd_hs_dr_pio_test__slct_ckrbrd():";
+#endif
+ hbool_t first_selection = TRUE;
+ int i, j, k, l, m;
+ int n_cube_offset;
+ int sel_offset;
+ const int test_max_rank = PAR_SS_DR_MAX_RANK; /* must update code if */
+ /* this changes */
+ hsize_t base_count;
+ hsize_t offset_count;
+ hsize_t start[PAR_SS_DR_MAX_RANK];
+ hsize_t stride[PAR_SS_DR_MAX_RANK];
+ hsize_t count[PAR_SS_DR_MAX_RANK];
+ hsize_t block[PAR_SS_DR_MAX_RANK];
+ herr_t ret; /* Generic return value */
+
+ HDassert(edge_size >= 6);
+ HDassert(0 < checker_edge_size);
+ HDassert(checker_edge_size <= edge_size);
+ HDassert(0 < sel_rank);
+ HDassert(sel_rank <= tgt_rank);
+ HDassert(tgt_rank <= test_max_rank);
+ HDassert(test_max_rank <= PAR_SS_DR_MAX_RANK);
+
+ sel_offset = test_max_rank - sel_rank;
+ HDassert(sel_offset >= 0);
+
+ n_cube_offset = test_max_rank - tgt_rank;
+ HDassert(n_cube_offset >= 0);
+ HDassert(n_cube_offset <= sel_offset);
+
+#if CKRBRD_HS_DR_PIO_TEST__SELECT_CHECKER_BOARD__DEBUG
+ HDfprintf(stdout, "%s:%d: edge_size/checker_edge_size = %d/%d\n", fcnName, mpi_rank, edge_size,
+ checker_edge_size);
+ HDfprintf(stdout, "%s:%d: sel_rank/sel_offset = %d/%d.\n", fcnName, mpi_rank, sel_rank, sel_offset);
+ HDfprintf(stdout, "%s:%d: tgt_rank/n_cube_offset = %d/%d.\n", fcnName, mpi_rank, tgt_rank, n_cube_offset);
+#endif /* CKRBRD_HS_DR_PIO_TEST__SELECT_CHECKER_BOARD__DEBUG */
+
+ /* First, compute the base count (which assumes start == 0
+ * for the associated offset) and offset_count (which
+ * assumes start == checker_edge_size for the associated
+ * offset).
+ *
+ * Note that the following computation depends on the C99
+ * requirement that integer division discard any fraction
+ * (truncation towards zero) to function correctly. As we
+ * now require C99, this shouldn't be a problem, but noting
+ * it may save us some pain if we are ever obliged to support
+ * pre-C99 compilers again.
+ */
+
+ base_count = (hsize_t)(edge_size / (checker_edge_size * 2));
+
+ if ((edge_size % (checker_edge_size * 2)) > 0) {
+
+ base_count++;
+ }
+
+ offset_count = (hsize_t)((edge_size - checker_edge_size) / (checker_edge_size * 2));
+
+ if (((edge_size - checker_edge_size) % (checker_edge_size * 2)) > 0) {
+
+ offset_count++;
+ }
+
+ /* Now set up the stride and block arrays, and portions of the start
+ * and count arrays that will not be altered during the selection of
+ * the checker board.
+ */
+ i = 0;
+ while (i < n_cube_offset) {
+
+ /* these values should never be used */
+ start[i] = 0;
+ stride[i] = 0;
+ count[i] = 0;
+ block[i] = 0;
+
+ i++;
+ }
+
+ while (i < sel_offset) {
+
+ start[i] = sel_start[i];
+ stride[i] = (hsize_t)(2 * edge_size);
+ count[i] = 1;
+ block[i] = 1;
+
+ i++;
+ }
+
+ while (i < test_max_rank) {
+
+ stride[i] = (hsize_t)(2 * checker_edge_size);
+ block[i] = (hsize_t)checker_edge_size;
+
+ i++;
+ }
+
+ i = 0;
+ do {
+ if (0 >= sel_offset) {
+
+ if (i == 0) {
+
+ start[0] = 0;
+ count[0] = base_count;
+ }
+ else {
+
+ start[0] = (hsize_t)checker_edge_size;
+ count[0] = offset_count;
+ }
+ }
+
+ j = 0;
+ do {
+ if (1 >= sel_offset) {
+
+ if (j == 0) {
+
+ start[1] = 0;
+ count[1] = base_count;
+ }
+ else {
+
+ start[1] = (hsize_t)checker_edge_size;
+ count[1] = offset_count;
+ }
+ }
+
+ k = 0;
+ do {
+ if (2 >= sel_offset) {
+
+ if (k == 0) {
+
+ start[2] = 0;
+ count[2] = base_count;
+ }
+ else {
+
+ start[2] = (hsize_t)checker_edge_size;
+ count[2] = offset_count;
+ }
+ }
+
+ l = 0;
+ do {
+ if (3 >= sel_offset) {
+
+ if (l == 0) {
+
+ start[3] = 0;
+ count[3] = base_count;
+ }
+ else {
+
+ start[3] = (hsize_t)checker_edge_size;
+ count[3] = offset_count;
+ }
+ }
+
+ m = 0;
+ do {
+ if (4 >= sel_offset) {
+
+ if (m == 0) {
+
+ start[4] = 0;
+ count[4] = base_count;
+ }
+ else {
+
+ start[4] = (hsize_t)checker_edge_size;
+ count[4] = offset_count;
+ }
+ }
+
+ if (((i + j + k + l + m) % 2) == 0) {
+
+#if CKRBRD_HS_DR_PIO_TEST__SELECT_CHECKER_BOARD__DEBUG
+ HDfprintf(stdout, "%s%d: *** first_selection = %d ***\n", fcnName, mpi_rank,
+ (int)first_selection);
+ HDfprintf(stdout, "%s:%d: i/j/k/l/m = %d/%d/%d/%d/%d\n", fcnName, mpi_rank, i, j,
+ k, l, m);
+ HDfprintf(stdout, "%s:%d: start = %d %d %d %d %d.\n", fcnName, mpi_rank,
+ (int)start[0], (int)start[1], (int)start[2], (int)start[3],
+ (int)start[4]);
+ HDfprintf(stdout, "%s:%d: stride = %d %d %d %d %d.\n", fcnName, mpi_rank,
+ (int)stride[0], (int)stride[1], (int)stride[2], (int)stride[3],
+ (int)stride[4]);
+ HDfprintf(stdout, "%s:%d: count = %d %d %d %d %d.\n", fcnName, mpi_rank,
+ (int)count[0], (int)count[1], (int)count[2], (int)count[3],
+ (int)count[4]);
+ HDfprintf(stdout, "%s:%d: block = %d %d %d %d %d.\n", fcnName, mpi_rank,
+ (int)block[0], (int)block[1], (int)block[2], (int)block[3],
+ (int)block[4]);
+ HDfprintf(stdout, "%s:%d: n-cube extent dims = %d.\n", fcnName, mpi_rank,
+ H5Sget_simple_extent_ndims(tgt_sid));
+ HDfprintf(stdout, "%s:%d: selection rank = %d.\n", fcnName, mpi_rank, sel_rank);
+#endif
+
+ if (first_selection) {
+
+ first_selection = FALSE;
+
+ ret = H5Sselect_hyperslab(tgt_sid, H5S_SELECT_SET, &(start[n_cube_offset]),
+ &(stride[n_cube_offset]), &(count[n_cube_offset]),
+ &(block[n_cube_offset]));
+
+ VRFY((ret != FAIL), "H5Sselect_hyperslab(SET) succeeded");
+ }
+ else {
+
+ ret = H5Sselect_hyperslab(tgt_sid, H5S_SELECT_OR, &(start[n_cube_offset]),
+ &(stride[n_cube_offset]), &(count[n_cube_offset]),
+ &(block[n_cube_offset]));
+
+ VRFY((ret != FAIL), "H5Sselect_hyperslab(OR) succeeded");
+ }
+ }
+
+ m++;
+
+ } while ((m <= 1) && (4 >= sel_offset));
+
+ l++;
+
+ } while ((l <= 1) && (3 >= sel_offset));
+
+ k++;
+
+ } while ((k <= 1) && (2 >= sel_offset));
+
+ j++;
+
+ } while ((j <= 1) && (1 >= sel_offset));
+
+ i++;
+
+ } while ((i <= 1) && (0 >= sel_offset));
+
+#if CKRBRD_HS_DR_PIO_TEST__SELECT_CHECKER_BOARD__DEBUG
+ HDfprintf(stdout, "%s%d: H5Sget_select_npoints(tgt_sid) = %d.\n", fcnName, mpi_rank,
+ (int)H5Sget_select_npoints(tgt_sid));
+#endif /* CKRBRD_HS_DR_PIO_TEST__SELECT_CHECKER_BOARD__DEBUG */
+
+ /* Clip the selection back to the dataspace proper. */
+
+ for (i = 0; i < test_max_rank; i++) {
+
+ start[i] = 0;
+ stride[i] = (hsize_t)edge_size;
+ count[i] = 1;
+ block[i] = (hsize_t)edge_size;
+ }
+
+ ret = H5Sselect_hyperslab(tgt_sid, H5S_SELECT_AND, start, stride, count, block);
+
+ VRFY((ret != FAIL), "H5Sselect_hyperslab(AND) succeeded");
+
+#if CKRBRD_HS_DR_PIO_TEST__SELECT_CHECKER_BOARD__DEBUG
+ HDfprintf(stdout, "%s%d: H5Sget_select_npoints(tgt_sid) = %d.\n", fcnName, mpi_rank,
+ (int)H5Sget_select_npoints(tgt_sid));
+ HDfprintf(stdout, "%s%d: done.\n", fcnName, mpi_rank);
+#endif /* CKRBRD_HS_DR_PIO_TEST__SELECT_CHECKER_BOARD__DEBUG */
+
+ return;
+
+} /* ckrbrd_hs_dr_pio_test__slct_ckrbrd() */
+
+/****************************************************************
+**
+** ckrbrd_hs_dr_pio_test__verify_data():
+**
+** Examine the supplied buffer to see if it contains the
+** expected data. Return TRUE if it does, and FALSE
+** otherwise.
+**
+** The supplied buffer is presumed to this process's slice
+** of the target data set. Each such slice will be an
+** n-cube of rank (rank -1) and the supplied edge_size with
+** origin (mpi_rank, 0, ... , 0) in the target data set.
+**
+** Further, the buffer is presumed to be the result of reading
+** or writing a checker board selection of an m (1 <= m <
+** rank) dimensional slice through this processes slice
+** of the target data set. Also, this slice must be parallel
+** to the fastest changing indices.
+**
+** It is further presumed that the buffer was zeroed before
+** the read/write, and that the full target data set (i.e.
+** the buffer/data set for all processes) was initialized
+** with the natural numbers listed in order from the origin
+** along the fastest changing axis.
+**
+** Thus for a 20x10x10 dataset, the value stored in location
+** (x, y, z) (assuming that z is the fastest changing index
+** and x the slowest) is assumed to be:
+**
+** (10 * 10 * x) + (10 * y) + z
+**
+** Further, supposing that this is process 10, this process's
+** slice of the dataset would be a 10 x 10 2-cube with origin
+** (10, 0, 0) in the data set, and would be initialize (prior
+** to the checkerboard selection) as follows:
+**
+** 1000, 1001, 1002, ... 1008, 1009
+** 1010, 1011, 1012, ... 1018, 1019
+** . . . . .
+** . . . . .
+** . . . . .
+** 1090, 1091, 1092, ... 1098, 1099
+**
+** In the case of a read from the processors slice of another
+** data set of different rank, the values expected will have
+** to be adjusted accordingly. This is done via the
+** first_expected_val parameter.
+**
+** Finally, the function presumes that the first element
+** of the buffer resides either at the origin of either
+** a selected or an unselected checker. (Translation:
+** if partial checkers appear in the buffer, they will
+** intersect the edges of the n-cube opposite the origin.)
+**
+****************************************************************/
+
+#define CKRBRD_HS_DR_PIO_TEST__VERIFY_DATA__DEBUG 0
+
+static hbool_t
+ckrbrd_hs_dr_pio_test__verify_data(uint32_t *buf_ptr, const int rank, const int edge_size,
+ const int checker_edge_size, uint32_t first_expected_val,
+ hbool_t buf_starts_in_checker)
+{
+#if CKRBRD_HS_DR_PIO_TEST__VERIFY_DATA__DEBUG
+ const char *fcnName = "ckrbrd_hs_dr_pio_test__verify_data():";
+#endif
+ hbool_t good_data = TRUE;
+ hbool_t in_checker;
+ hbool_t start_in_checker[5];
+ uint32_t expected_value;
+ uint32_t *val_ptr;
+ int i, j, k, l, m; /* to track position in n-cube */
+ int v, w, x, y, z; /* to track position in checker */
+ const int test_max_rank = 5; /* code changes needed if this is increased */
+
+ HDassert(buf_ptr != NULL);
+ HDassert(0 < rank);
+ HDassert(rank <= test_max_rank);
+ HDassert(edge_size >= 6);
+ HDassert(0 < checker_edge_size);
+ HDassert(checker_edge_size <= edge_size);
+ HDassert(test_max_rank <= PAR_SS_DR_MAX_RANK);
+
+#if CKRBRD_HS_DR_PIO_TEST__VERIFY_DATA__DEBUG
+
+ int mpi_rank;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+ HDfprintf(stdout, "%s mpi_rank = %d.\n", fcnName, mpi_rank);
+ HDfprintf(stdout, "%s rank = %d.\n", fcnName, rank);
+ HDfprintf(stdout, "%s edge_size = %d.\n", fcnName, edge_size);
+ HDfprintf(stdout, "%s checker_edge_size = %d.\n", fcnName, checker_edge_size);
+ HDfprintf(stdout, "%s first_expected_val = %d.\n", fcnName, (int)first_expected_val);
+ HDfprintf(stdout, "%s starts_in_checker = %d.\n", fcnName, (int)buf_starts_in_checker);
+}
+#endif
+
+val_ptr = buf_ptr;
+expected_value = first_expected_val;
+
+i = 0;
+v = 0;
+start_in_checker[0] = buf_starts_in_checker;
+do {
+ if (v >= checker_edge_size) {
+
+ start_in_checker[0] = !start_in_checker[0];
+ v = 0;
+ }
+
+ j = 0;
+ w = 0;
+ start_in_checker[1] = start_in_checker[0];
+ do {
+ if (w >= checker_edge_size) {
+
+ start_in_checker[1] = !start_in_checker[1];
+ w = 0;
+ }
+
+ k = 0;
+ x = 0;
+ start_in_checker[2] = start_in_checker[1];
+ do {
+ if (x >= checker_edge_size) {
+
+ start_in_checker[2] = !start_in_checker[2];
+ x = 0;
+ }
+
+ l = 0;
+ y = 0;
+ start_in_checker[3] = start_in_checker[2];
+ do {
+ if (y >= checker_edge_size) {
+
+ start_in_checker[3] = !start_in_checker[3];
+ y = 0;
+ }
+
+ m = 0;
+ z = 0;
+#if CKRBRD_HS_DR_PIO_TEST__VERIFY_DATA__DEBUG
+ HDfprintf(stdout, "%d, %d, %d, %d, %d:", i, j, k, l, m);
+#endif
+ in_checker = start_in_checker[3];
+ do {
+#if CKRBRD_HS_DR_PIO_TEST__VERIFY_DATA__DEBUG
+ HDfprintf(stdout, " %d", (int)(*val_ptr));
+#endif
+ if (z >= checker_edge_size) {
+
+ in_checker = !in_checker;
+ z = 0;
+ }
+
+ if (in_checker) {
+
+ if (*val_ptr != expected_value) {
+
+ good_data = FALSE;
+ }
+
+ /* zero out buffer for re-use */
+ *val_ptr = 0;
+ }
+ else if (*val_ptr != 0) {
+
+ good_data = FALSE;
+
+ /* zero out buffer for re-use */
+ *val_ptr = 0;
+ }
+
+ val_ptr++;
+ expected_value++;
+ m++;
+ z++;
+
+ } while ((rank >= (test_max_rank - 4)) && (m < edge_size));
+#if CKRBRD_HS_DR_PIO_TEST__VERIFY_DATA__DEBUG
+ HDfprintf(stdout, "\n");
+#endif
+ l++;
+ y++;
+ } while ((rank >= (test_max_rank - 3)) && (l < edge_size));
+ k++;
+ x++;
+ } while ((rank >= (test_max_rank - 2)) && (k < edge_size));
+ j++;
+ w++;
+ } while ((rank >= (test_max_rank - 1)) && (j < edge_size));
+ i++;
+ v++;
+} while ((rank >= test_max_rank) && (i < edge_size));
+
+return (good_data);
+
+} /* ckrbrd_hs_dr_pio_test__verify_data() */
+
+/*-------------------------------------------------------------------------
+ * Function: ckrbrd_hs_dr_pio_test__d2m_l2s()
+ *
+ * Purpose: Part one of a series of tests of I/O to/from hyperslab
+ * selections of different rank in the parallel.
+ *
+ * Verify that we can read from disk correctly using checker
+ * board selections of different rank that
+ * H5Sselect_shape_same() views as being of the same shape.
+ *
+ * In this function, we test this by reading small_rank - 1
+ * checker board slices from the on disk large cube, and
+ * verifying that the data read is correct. Verify that
+ * H5Sselect_shape_same() returns true on the memory and
+ * file selections.
+ *
+ * Return: void
+ *
+ * Programmer: JRM -- 9/15/11
+ *
+ *-------------------------------------------------------------------------
+ */
+
+#define CHECKER_BOARD_HS_DR_PIO_TEST__D2M_L2S__DEBUG 0
+
+static void
+ckrbrd_hs_dr_pio_test__d2m_l2s(struct hs_dr_pio_test_vars_t *tv_ptr)
+{
+#if CHECKER_BOARD_HS_DR_PIO_TEST__D2M_L2S__DEBUG
+ const char *fcnName = "ckrbrd_hs_dr_pio_test__d2m_l2s()";
+ uint32_t *ptr_0;
+#endif /* CHECKER_BOARD_HS_DR_PIO_TEST__D2M_L2S__DEBUG */
+ hbool_t data_ok = FALSE;
+ int i, j, k, l;
+ uint32_t expected_value;
+ int mpi_rank; /* needed by VRFY */
+ hsize_t sel_start[PAR_SS_DR_MAX_RANK];
+ htri_t check; /* Shape comparison return value */
+ herr_t ret; /* Generic return value */
+
+ /* initialize the local copy of mpi_rank */
+ mpi_rank = tv_ptr->mpi_rank;
+
+ /* first, verify that we can read from disk correctly using selections
+ * of different rank that H5Sselect_shape_same() views as being of the
+ * same shape.
+ *
+ * Start by reading a (small_rank - 1)-D checker board slice from this
+ * processes slice of the on disk large data set, and verifying that the
+ * data read is correct. Verify that H5Sselect_shape_same() returns
+ * true on the memory and file selections.
+ *
+ * The first step is to set up the needed checker board selection in the
+ * in memory small small cube
+ */
+
+ sel_start[0] = sel_start[1] = sel_start[2] = sel_start[3] = sel_start[4] = 0;
+ sel_start[tv_ptr->small_ds_offset] = (hsize_t)(tv_ptr->mpi_rank);
+
+ ckrbrd_hs_dr_pio_test__slct_ckrbrd(tv_ptr->mpi_rank, tv_ptr->small_ds_slice_sid, tv_ptr->small_rank - 1,
+ tv_ptr->edge_size, tv_ptr->checker_edge_size, tv_ptr->small_rank - 1,
+ sel_start);
+
+ /* zero out the buffer we will be reading into */
+ HDmemset(tv_ptr->small_ds_slice_buf, 0, sizeof(uint32_t) * tv_ptr->small_ds_slice_size);
+
+#if CHECKER_BOARD_HS_DR_PIO_TEST__D2M_L2S__DEBUG
+ HDfprintf(stdout, "%s:%d: initial small_ds_slice_buf = ", fcnName, tv_ptr->mpi_rank);
+ ptr_0 = tv_ptr->small_ds_slice_buf;
+ for (i = 0; i < (int)(tv_ptr->small_ds_slice_size); i++) {
+ HDfprintf(stdout, "%d ", (int)(*ptr_0));
+ ptr_0++;
+ }
+ HDfprintf(stdout, "\n");
+#endif /* CHECKER_BOARD_HS_DR_PIO_TEST__D2M_L2S__DEBUG */
+
+ /* set up start, stride, count, and block -- note that we will
+ * change start[] so as to read slices of the large cube.
+ */
+ for (i = 0; i < PAR_SS_DR_MAX_RANK; i++) {
+
+ tv_ptr->start[i] = 0;
+ tv_ptr->stride[i] = (hsize_t)(2 * tv_ptr->edge_size);
+ tv_ptr->count[i] = 1;
+ if ((PAR_SS_DR_MAX_RANK - i) > (tv_ptr->small_rank - 1)) {
+
+ tv_ptr->block[i] = 1;
+ }
+ else {
+
+ tv_ptr->block[i] = (hsize_t)(tv_ptr->edge_size);
+ }
+ }
+
+#if CHECKER_BOARD_HS_DR_PIO_TEST__D2M_L2S__DEBUG
+ HDfprintf(stdout, "%s:%d: reading slice from big ds on disk into small ds slice.\n", fcnName,
+ tv_ptr->mpi_rank);
+#endif /* CHECKER_BOARD_HS_DR_PIO_TEST__D2M_L2S__DEBUG */
+ /* in serial versions of this test, we loop through all the dimensions
+ * of the large data set. However, in the parallel version, each
+ * process only works with that slice of the large cube indicated
+ * by its rank -- hence we set the most slowly changing index to
+ * mpi_rank, and don't iterate over it.
+ */
+
+ if (PAR_SS_DR_MAX_RANK - tv_ptr->large_rank == 0) {
+
+ i = tv_ptr->mpi_rank;
+ }
+ else {
+
+ i = 0;
+ }
+
+ /* since large_rank is at most PAR_SS_DR_MAX_RANK, no need to
+ * loop over it -- either we are setting i to mpi_rank, or
+ * we are setting it to zero. It will not change during the
+ * test.
+ */
+
+ if (PAR_SS_DR_MAX_RANK - tv_ptr->large_rank == 1) {
+
+ j = tv_ptr->mpi_rank;
+ }
+ else {
+
+ j = 0;
+ }
+
+ do {
+ if (PAR_SS_DR_MAX_RANK - tv_ptr->large_rank == 2) {
+
+ k = tv_ptr->mpi_rank;
+ }
+ else {
+
+ k = 0;
+ }
+
+ do {
+ /* since small rank >= 2 and large_rank > small_rank, we
+ * have large_rank >= 3. Since PAR_SS_DR_MAX_RANK == 5
+ * (baring major re-orgaization), this gives us:
+ *
+ * (PAR_SS_DR_MAX_RANK - large_rank) <= 2
+ *
+ * so no need to repeat the test in the outer loops --
+ * just set l = 0.
+ */
+
+ l = 0;
+ do {
+ if ((tv_ptr->skips)++ < tv_ptr->max_skips) { /* skip the test */
+
+ (tv_ptr->tests_skipped)++;
+ }
+ else { /* run the test */
+
+ tv_ptr->skips = 0; /* reset the skips counter */
+
+ /* we know that small_rank - 1 >= 1 and that
+ * large_rank > small_rank by the assertions at the head
+ * of this function. Thus no need for another inner loop.
+ */
+ tv_ptr->start[0] = (hsize_t)i;
+ tv_ptr->start[1] = (hsize_t)j;
+ tv_ptr->start[2] = (hsize_t)k;
+ tv_ptr->start[3] = (hsize_t)l;
+ tv_ptr->start[4] = 0;
+
+ HDassert((tv_ptr->start[0] == 0) || (0 < tv_ptr->small_ds_offset + 1));
+ HDassert((tv_ptr->start[1] == 0) || (1 < tv_ptr->small_ds_offset + 1));
+ HDassert((tv_ptr->start[2] == 0) || (2 < tv_ptr->small_ds_offset + 1));
+ HDassert((tv_ptr->start[3] == 0) || (3 < tv_ptr->small_ds_offset + 1));
+ HDassert((tv_ptr->start[4] == 0) || (4 < tv_ptr->small_ds_offset + 1));
+
+ ckrbrd_hs_dr_pio_test__slct_ckrbrd(
+ tv_ptr->mpi_rank, tv_ptr->file_large_ds_sid_0, tv_ptr->large_rank, tv_ptr->edge_size,
+ tv_ptr->checker_edge_size, tv_ptr->small_rank - 1, tv_ptr->start);
+
+ /* verify that H5Sselect_shape_same() reports the two
+ * selections as having the same shape.
+ */
+ check = H5Sselect_shape_same(tv_ptr->small_ds_slice_sid, tv_ptr->file_large_ds_sid_0);
+ VRFY((check == TRUE), "H5Sselect_shape_same passed");
+
+ /* Read selection from disk */
+#if CHECKER_BOARD_HS_DR_PIO_TEST__D2M_L2S__DEBUG
+ HDfprintf(stdout, "%s:%d: start = %d %d %d %d %d.\n", fcnName, tv_ptr->mpi_rank,
+ tv_ptr->start[0], tv_ptr->start[1], tv_ptr->start[2], tv_ptr->start[3],
+ tv_ptr->start[4]);
+ HDfprintf(stdout, "%s slice/file extent dims = %d/%d.\n", fcnName,
+ H5Sget_simple_extent_ndims(tv_ptr->small_ds_slice_sid),
+ H5Sget_simple_extent_ndims(tv_ptr->file_large_ds_sid_0));
+#endif /* CHECKER_BOARD_HS_DR_PIO_TEST__D2M_L2S__DEBUG */
+
+ ret =
+ H5Dread(tv_ptr->large_dataset, H5T_NATIVE_UINT32, tv_ptr->small_ds_slice_sid,
+ tv_ptr->file_large_ds_sid_0, tv_ptr->xfer_plist, tv_ptr->small_ds_slice_buf);
+ VRFY((ret >= 0), "H5Dread() slice from large ds succeeded.");
+
+#if CHECKER_BOARD_HS_DR_PIO_TEST__D2M_L2S__DEBUG
+ HDfprintf(stdout, "%s:%d: H5Dread() returns.\n", fcnName, tv_ptr->mpi_rank);
+#endif /* CHECKER_BOARD_HS_DR_PIO_TEST__D2M_L2S__DEBUG */
+
+ /* verify that expected data is retrieved */
+
+ expected_value =
+ (uint32_t)((i * tv_ptr->edge_size * tv_ptr->edge_size * tv_ptr->edge_size *
+ tv_ptr->edge_size) +
+ (j * tv_ptr->edge_size * tv_ptr->edge_size * tv_ptr->edge_size) +
+ (k * tv_ptr->edge_size * tv_ptr->edge_size) + (l * tv_ptr->edge_size));
+
+ data_ok = ckrbrd_hs_dr_pio_test__verify_data(
+ tv_ptr->small_ds_slice_buf, tv_ptr->small_rank - 1, tv_ptr->edge_size,
+ tv_ptr->checker_edge_size, expected_value, (hbool_t)TRUE);
+
+ VRFY((data_ok == TRUE), "small slice read from large ds data good.");
+
+ (tv_ptr->tests_run)++;
+ }
+
+ l++;
+
+ (tv_ptr->total_tests)++;
+
+ } while ((tv_ptr->large_rank > 2) && ((tv_ptr->small_rank - 1) <= 1) && (l < tv_ptr->edge_size));
+ k++;
+ } while ((tv_ptr->large_rank > 3) && ((tv_ptr->small_rank - 1) <= 2) && (k < tv_ptr->edge_size));
+ j++;
+ } while ((tv_ptr->large_rank > 4) && ((tv_ptr->small_rank - 1) <= 3) && (j < tv_ptr->edge_size));
+
+ return;
+
+} /* ckrbrd_hs_dr_pio_test__d2m_l2s() */
+
+/*-------------------------------------------------------------------------
+ * Function: ckrbrd_hs_dr_pio_test__d2m_s2l()
+ *
+ * Purpose: Part two of a series of tests of I/O to/from hyperslab
+ * selections of different rank in the parallel.
+ *
+ * Verify that we can read from disk correctly using
+ * selections of different rank that H5Sselect_shape_same()
+ * views as being of the same shape.
+ *
+ * In this function, we test this by reading checker board
+ * slices of the on disk small data set into slices through
+ * the in memory large data set, and verify that the correct
+ * data (and only the correct data) is read.
+ *
+ * Return: void
+ *
+ * Programmer: JRM -- 8/15/11
+ *
+ *-------------------------------------------------------------------------
+ */
+
+#define CHECKER_BOARD_HS_DR_PIO_TEST__D2M_S2L__DEBUG 0
+
+static void
+ckrbrd_hs_dr_pio_test__d2m_s2l(struct hs_dr_pio_test_vars_t *tv_ptr)
+{
+#if CHECKER_BOARD_HS_DR_PIO_TEST__D2M_S2L__DEBUG
+ const char *fcnName = "ckrbrd_hs_dr_pio_test__d2m_s2l()";
+#endif /* CHECKER_BOARD_HS_DR_PIO_TEST__D2M_S2L__DEBUG */
+ hbool_t data_ok = FALSE;
+ int i, j, k, l;
+ size_t u;
+ size_t start_index;
+ size_t stop_index;
+ uint32_t expected_value;
+ uint32_t *ptr_1;
+ int mpi_rank; /* needed by VRFY */
+ hsize_t sel_start[PAR_SS_DR_MAX_RANK];
+ htri_t check; /* Shape comparison return value */
+ herr_t ret; /* Generic return value */
+
+ /* initialize the local copy of mpi_rank */
+ mpi_rank = tv_ptr->mpi_rank;
+
+ /* similarly, read slices of the on disk small data set into slices
+ * through the in memory large data set, and verify that the correct
+ * data (and only the correct data) is read.
+ */
+
+ sel_start[0] = sel_start[1] = sel_start[2] = sel_start[3] = sel_start[4] = 0;
+ sel_start[tv_ptr->small_ds_offset] = (hsize_t)(tv_ptr->mpi_rank);
+
+ ckrbrd_hs_dr_pio_test__slct_ckrbrd(tv_ptr->mpi_rank, tv_ptr->file_small_ds_sid_0, tv_ptr->small_rank,
+ tv_ptr->edge_size, tv_ptr->checker_edge_size, tv_ptr->small_rank - 1,
+ sel_start);
+
+#if CHECKER_BOARD_HS_DR_PIO_TEST__D2M_S2L__DEBUG
+ HDfprintf(stdout, "%s reading slices of on disk small data set into slices of big data set.\n", fcnName);
+#endif /* CHECKER_BOARD_HS_DR_PIO_TEST__D2M_S2L__DEBUG */
+
+ /* zero out the buffer we will be reading into */
+ HDmemset(tv_ptr->large_ds_buf_1, 0, sizeof(uint32_t) * tv_ptr->large_ds_size);
+
+ /* set up start, stride, count, and block -- note that we will
+ * change start[] so as to read the slice of the small data set
+ * into different slices of the process slice of the large data
+ * set.
+ */
+ for (i = 0; i < PAR_SS_DR_MAX_RANK; i++) {
+
+ tv_ptr->start[i] = 0;
+ tv_ptr->stride[i] = (hsize_t)(2 * tv_ptr->edge_size);
+ tv_ptr->count[i] = 1;
+ if ((PAR_SS_DR_MAX_RANK - i) > (tv_ptr->small_rank - 1)) {
+
+ tv_ptr->block[i] = 1;
+ }
+ else {
+
+ tv_ptr->block[i] = (hsize_t)(tv_ptr->edge_size);
+ }
+ }
+
+ /* in serial versions of this test, we loop through all the dimensions
+ * of the large data set that don't appear in the small data set.
+ *
+ * However, in the parallel version, each process only works with that
+ * slice of the large (and small) data set indicated by its rank -- hence
+ * we set the most slowly changing index to mpi_rank, and don't iterate
+ * over it.
+ */
+
+ if (PAR_SS_DR_MAX_RANK - tv_ptr->large_rank == 0) {
+
+ i = tv_ptr->mpi_rank;
+ }
+ else {
+
+ i = 0;
+ }
+
+ /* since large_rank is at most PAR_SS_DR_MAX_RANK, no need to
+ * loop over it -- either we are setting i to mpi_rank, or
+ * we are setting it to zero. It will not change during the
+ * test.
+ */
+
+ if (PAR_SS_DR_MAX_RANK - tv_ptr->large_rank == 1) {
+
+ j = tv_ptr->mpi_rank;
+ }
+ else {
+
+ j = 0;
+ }
+
+ do {
+ if (PAR_SS_DR_MAX_RANK - tv_ptr->large_rank == 2) {
+
+ k = tv_ptr->mpi_rank;
+ }
+ else {
+
+ k = 0;
+ }
+
+ do {
+ /* since small rank >= 2 and large_rank > small_rank, we
+ * have large_rank >= 3. Since PAR_SS_DR_MAX_RANK == 5
+ * (baring major re-orgaization), this gives us:
+ *
+ * (PAR_SS_DR_MAX_RANK - large_rank) <= 2
+ *
+ * so no need to repeat the test in the outer loops --
+ * just set l = 0.
+ */
+
+ l = 0;
+ do {
+ if ((tv_ptr->skips)++ < tv_ptr->max_skips) { /* skip the test */
+
+ (tv_ptr->tests_skipped)++;
+ }
+ else { /* run the test */
+
+ tv_ptr->skips = 0; /* reset the skips counter */
+
+ /* we know that small_rank >= 1 and that large_rank > small_rank
+ * by the assertions at the head of this function. Thus no
+ * need for another inner loop.
+ */
+ tv_ptr->start[0] = (hsize_t)i;
+ tv_ptr->start[1] = (hsize_t)j;
+ tv_ptr->start[2] = (hsize_t)k;
+ tv_ptr->start[3] = (hsize_t)l;
+ tv_ptr->start[4] = 0;
+
+ HDassert((tv_ptr->start[0] == 0) || (0 < tv_ptr->small_ds_offset + 1));
+ HDassert((tv_ptr->start[1] == 0) || (1 < tv_ptr->small_ds_offset + 1));
+ HDassert((tv_ptr->start[2] == 0) || (2 < tv_ptr->small_ds_offset + 1));
+ HDassert((tv_ptr->start[3] == 0) || (3 < tv_ptr->small_ds_offset + 1));
+ HDassert((tv_ptr->start[4] == 0) || (4 < tv_ptr->small_ds_offset + 1));
+
+ ckrbrd_hs_dr_pio_test__slct_ckrbrd(
+ tv_ptr->mpi_rank, tv_ptr->mem_large_ds_sid, tv_ptr->large_rank, tv_ptr->edge_size,
+ tv_ptr->checker_edge_size, tv_ptr->small_rank - 1, tv_ptr->start);
+
+ /* verify that H5Sselect_shape_same() reports the two
+ * selections as having the same shape.
+ */
+ check = H5Sselect_shape_same(tv_ptr->file_small_ds_sid_0, tv_ptr->mem_large_ds_sid);
+ VRFY((check == TRUE), "H5Sselect_shape_same passed");
+
+ /* Read selection from disk */
+#if CHECKER_BOARD_HS_DR_PIO_TEST__D2M_S2L__DEBUG
+ HDfprintf(stdout, "%s:%d: start = %d %d %d %d %d.\n", fcnName, tv_ptr->mpi_rank,
+ tv_ptr->start[0], tv_ptr->start[1], tv_ptr->start[2], tv_ptr->start[3],
+ tv_ptr->start[4]);
+ HDfprintf(stdout, "%s:%d: mem/file extent dims = %d/%d.\n", fcnName, tv_ptr->mpi_rank,
+ H5Sget_simple_extent_ndims(tv_ptr->large_ds_slice_sid),
+ H5Sget_simple_extent_ndims(tv_ptr->file_small_ds_sid_0));
+#endif /* CHECKER_BOARD_HS_DR_PIO_TEST__D2M_S2L__DEBUG */
+ ret = H5Dread(tv_ptr->small_dataset, H5T_NATIVE_UINT32, tv_ptr->mem_large_ds_sid,
+ tv_ptr->file_small_ds_sid_0, tv_ptr->xfer_plist, tv_ptr->large_ds_buf_1);
+ VRFY((ret >= 0), "H5Dread() slice from small ds succeeded.");
+
+ /* verify that the expected data and only the
+ * expected data was read.
+ */
+ data_ok = TRUE;
+ ptr_1 = tv_ptr->large_ds_buf_1;
+ expected_value = (uint32_t)((size_t)(tv_ptr->mpi_rank) * tv_ptr->small_ds_slice_size);
+ start_index =
+ (size_t)((i * tv_ptr->edge_size * tv_ptr->edge_size * tv_ptr->edge_size *
+ tv_ptr->edge_size) +
+ (j * tv_ptr->edge_size * tv_ptr->edge_size * tv_ptr->edge_size) +
+ (k * tv_ptr->edge_size * tv_ptr->edge_size) + (l * tv_ptr->edge_size));
+ stop_index = start_index + tv_ptr->small_ds_slice_size - 1;
+
+#if CHECKER_BOARD_HS_DR_PIO_TEST__D2M_S2L__DEBUG
+ {
+ int m, n;
+
+ HDfprintf(stdout, "%s:%d: expected_value = %d.\n", fcnName, tv_ptr->mpi_rank,
+ expected_value);
+ HDfprintf(stdout, "%s:%d: start/stop index = %d/%d.\n", fcnName, tv_ptr->mpi_rank,
+ start_index, stop_index);
+ n = 0;
+ for (m = 0; (unsigned)m < tv_ptr->large_ds_size; m++) {
+ HDfprintf(stdout, "%d ", (int)(*ptr_1));
+ ptr_1++;
+ n++;
+ if (n >= tv_ptr->edge_size) {
+ HDfprintf(stdout, "\n");
+ n = 0;
+ }
+ }
+ HDfprintf(stdout, "\n");
+ ptr_1 = tv_ptr->large_ds_buf_1;
+ }
+#endif /* CHECKER_BOARD_HS_DR_PIO_TEST__D2M_S2L__DEBUG */
+
+ HDassert(start_index < stop_index);
+ HDassert(stop_index <= tv_ptr->large_ds_size);
+
+ for (u = 0; u < start_index; u++) {
+
+ if (*ptr_1 != 0) {
+
+ data_ok = FALSE;
+ }
+
+ /* zero out the value for the next pass */
+ *ptr_1 = 0;
+
+ ptr_1++;
+ }
+
+ VRFY((data_ok == TRUE), "slice read from small to large ds data good(1).");
+
+ data_ok = ckrbrd_hs_dr_pio_test__verify_data(ptr_1, tv_ptr->small_rank - 1,
+ tv_ptr->edge_size, tv_ptr->checker_edge_size,
+ expected_value, (hbool_t)TRUE);
+
+ VRFY((data_ok == TRUE), "slice read from small to large ds data good(2).");
+
+ ptr_1 = tv_ptr->large_ds_buf_1 + stop_index + 1;
+
+ for (u = stop_index + 1; u < tv_ptr->large_ds_size; u++) {
+
+ if (*ptr_1 != 0) {
+
+ data_ok = FALSE;
+ }
+
+ /* zero out the value for the next pass */
+ *ptr_1 = 0;
+
+ ptr_1++;
+ }
+
+ VRFY((data_ok == TRUE), "slice read from small to large ds data good(3).");
+
+ (tv_ptr->tests_run)++;
+ }
+
+ l++;
+
+ (tv_ptr->total_tests)++;
+
+ } while ((tv_ptr->large_rank > 2) && ((tv_ptr->small_rank - 1) <= 1) && (l < tv_ptr->edge_size));
+ k++;
+ } while ((tv_ptr->large_rank > 3) && ((tv_ptr->small_rank - 1) <= 2) && (k < tv_ptr->edge_size));
+ j++;
+ } while ((tv_ptr->large_rank > 4) && ((tv_ptr->small_rank - 1) <= 3) && (j < tv_ptr->edge_size));
+
+ return;
+
+} /* ckrbrd_hs_dr_pio_test__d2m_s2l() */
+
+/*-------------------------------------------------------------------------
+ * Function: ckrbrd_hs_dr_pio_test__m2d_l2s()
+ *
+ * Purpose: Part three of a series of tests of I/O to/from checker
+ * board hyperslab selections of different rank in the
+ * parallel.
+ *
+ * Verify that we can write from memory to file using checker
+ * board selections of different rank that
+ * H5Sselect_shape_same() views as being of the same shape.
+ *
+ * Do this by writing small_rank - 1 dimensional checker
+ * board slices from the in memory large data set to the on
+ * disk small cube dataset. After each write, read the
+ * slice of the small dataset back from disk, and verify
+ * that it contains the expected data. Verify that
+ * H5Sselect_shape_same() returns true on the memory and
+ * file selections.
+ *
+ * Return: void
+ *
+ * Programmer: JRM -- 8/15/11
+ *
+ *-------------------------------------------------------------------------
+ */
+
+#define CHECKER_BOARD_HS_DR_PIO_TEST__M2D_L2S__DEBUG 0
+
+static void
+ckrbrd_hs_dr_pio_test__m2d_l2s(struct hs_dr_pio_test_vars_t *tv_ptr)
+{
+#if CHECKER_BOARD_HS_DR_PIO_TEST__M2D_L2S__DEBUG
+ const char *fcnName = "ckrbrd_hs_dr_pio_test__m2d_l2s()";
+#endif /* CHECKER_BOARD_HS_DR_PIO_TEST__M2D_L2S__DEBUG */
+ hbool_t data_ok = FALSE;
+ int i, j, k, l;
+ size_t u;
+ size_t start_index;
+ size_t stop_index;
+ uint32_t expected_value;
+ uint32_t *ptr_1;
+ int mpi_rank; /* needed by VRFY */
+ hsize_t sel_start[PAR_SS_DR_MAX_RANK];
+ htri_t check; /* Shape comparison return value */
+ herr_t ret; /* Generic return value */
+
+ /* initialize the local copy of mpi_rank */
+ mpi_rank = tv_ptr->mpi_rank;
+
+ /* now we go in the opposite direction, verifying that we can write
+ * from memory to file using selections of different rank that
+ * H5Sselect_shape_same() views as being of the same shape.
+ *
+ * Start by writing small_rank - 1 D slices from the in memory large data
+ * set to the on disk small dataset. After each write, read the slice of
+ * the small dataset back from disk, and verify that it contains the
+ * expected data. Verify that H5Sselect_shape_same() returns true on
+ * the memory and file selections.
+ */
+
+ tv_ptr->start[0] = (hsize_t)(tv_ptr->mpi_rank);
+ tv_ptr->stride[0] = (hsize_t)(2 * (tv_ptr->mpi_size + 1));
+ tv_ptr->count[0] = 1;
+ tv_ptr->block[0] = 1;
+
+ for (i = 1; i < tv_ptr->large_rank; i++) {
+
+ tv_ptr->start[i] = 0;
+ tv_ptr->stride[i] = (hsize_t)(2 * tv_ptr->edge_size);
+ tv_ptr->count[i] = 1;
+ tv_ptr->block[i] = (hsize_t)(tv_ptr->edge_size);
+ }
+
+ ret = H5Sselect_hyperslab(tv_ptr->file_small_ds_sid_0, H5S_SELECT_SET, tv_ptr->start, tv_ptr->stride,
+ tv_ptr->count, tv_ptr->block);
+ VRFY((ret >= 0), "H5Sselect_hyperslab(file_small_ds_sid_0, set) succeeded");
+
+ ret = H5Sselect_hyperslab(tv_ptr->mem_small_ds_sid, H5S_SELECT_SET, tv_ptr->start, tv_ptr->stride,
+ tv_ptr->count, tv_ptr->block);
+ VRFY((ret >= 0), "H5Sselect_hyperslab(mem_small_ds_sid, set) succeeded");
+
+ sel_start[0] = sel_start[1] = sel_start[2] = sel_start[3] = sel_start[4] = 0;
+ sel_start[tv_ptr->small_ds_offset] = (hsize_t)(tv_ptr->mpi_rank);
+
+ ckrbrd_hs_dr_pio_test__slct_ckrbrd(tv_ptr->mpi_rank, tv_ptr->file_small_ds_sid_1, tv_ptr->small_rank,
+ tv_ptr->edge_size, tv_ptr->checker_edge_size, tv_ptr->small_rank - 1,
+ sel_start);
+
+ /* set up start, stride, count, and block -- note that we will
+ * change start[] so as to read slices of the large cube.
+ */
+ for (i = 0; i < PAR_SS_DR_MAX_RANK; i++) {
+
+ tv_ptr->start[i] = 0;
+ tv_ptr->stride[i] = (hsize_t)(2 * tv_ptr->edge_size);
+ tv_ptr->count[i] = 1;
+ if ((PAR_SS_DR_MAX_RANK - i) > (tv_ptr->small_rank - 1)) {
+
+ tv_ptr->block[i] = 1;
+ }
+ else {
+
+ tv_ptr->block[i] = (hsize_t)(tv_ptr->edge_size);
+ }
+ }
+
+ /* zero out the in memory small ds */
+ HDmemset(tv_ptr->small_ds_buf_1, 0, sizeof(uint32_t) * tv_ptr->small_ds_size);
+
+#if CHECKER_BOARD_HS_DR_PIO_TEST__M2D_L2S__DEBUG
+ HDfprintf(stdout,
+ "%s writing checker boards selections of slices from big ds to slices of small ds on disk.\n",
+ fcnName);
+#endif /* CHECKER_BOARD_HS_DR_PIO_TEST__M2D_L2S__DEBUG */
+
+ /* in serial versions of this test, we loop through all the dimensions
+ * of the large data set that don't appear in the small data set.
+ *
+ * However, in the parallel version, each process only works with that
+ * slice of the large (and small) data set indicated by its rank -- hence
+ * we set the most slowly changing index to mpi_rank, and don't iterate
+ * over it.
+ */
+
+ if (PAR_SS_DR_MAX_RANK - tv_ptr->large_rank == 0) {
+
+ i = tv_ptr->mpi_rank;
+ }
+ else {
+
+ i = 0;
+ }
+
+ /* since large_rank is at most PAR_SS_DR_MAX_RANK, no need to
+ * loop over it -- either we are setting i to mpi_rank, or
+ * we are setting it to zero. It will not change during the
+ * test.
+ */
+
+ if (PAR_SS_DR_MAX_RANK - tv_ptr->large_rank == 1) {
+
+ j = tv_ptr->mpi_rank;
+ }
+ else {
+
+ j = 0;
+ }
+
+ j = 0;
+ do {
+ if (PAR_SS_DR_MAX_RANK - tv_ptr->large_rank == 2) {
+
+ k = tv_ptr->mpi_rank;
+ }
+ else {
+
+ k = 0;
+ }
+
+ do {
+ /* since small rank >= 2 and large_rank > small_rank, we
+ * have large_rank >= 3. Since PAR_SS_DR_MAX_RANK == 5
+ * (baring major re-orgaization), this gives us:
+ *
+ * (PAR_SS_DR_MAX_RANK - large_rank) <= 2
+ *
+ * so no need to repeat the test in the outer loops --
+ * just set l = 0.
+ */
+
+ l = 0;
+ do {
+ if ((tv_ptr->skips)++ < tv_ptr->max_skips) { /* skip the test */
+
+ (tv_ptr->tests_skipped)++;
+ }
+ else { /* run the test */
+
+ tv_ptr->skips = 0; /* reset the skips counter */
+
+ /* we know that small_rank >= 1 and that large_rank > small_rank
+ * by the assertions at the head of this function. Thus no
+ * need for another inner loop.
+ */
+
+ /* zero out this rank's slice of the on disk small data set */
+ ret = H5Dwrite(tv_ptr->small_dataset, H5T_NATIVE_UINT32, tv_ptr->mem_small_ds_sid,
+ tv_ptr->file_small_ds_sid_0, tv_ptr->xfer_plist, tv_ptr->small_ds_buf_2);
+ VRFY((ret >= 0), "H5Dwrite() zero slice to small ds succeeded.");
+
+ /* select the portion of the in memory large cube from which we
+ * are going to write data.
+ */
+ tv_ptr->start[0] = (hsize_t)i;
+ tv_ptr->start[1] = (hsize_t)j;
+ tv_ptr->start[2] = (hsize_t)k;
+ tv_ptr->start[3] = (hsize_t)l;
+ tv_ptr->start[4] = 0;
+
+ HDassert((tv_ptr->start[0] == 0) || (0 < tv_ptr->small_ds_offset + 1));
+ HDassert((tv_ptr->start[1] == 0) || (1 < tv_ptr->small_ds_offset + 1));
+ HDassert((tv_ptr->start[2] == 0) || (2 < tv_ptr->small_ds_offset + 1));
+ HDassert((tv_ptr->start[3] == 0) || (3 < tv_ptr->small_ds_offset + 1));
+ HDassert((tv_ptr->start[4] == 0) || (4 < tv_ptr->small_ds_offset + 1));
+
+ ckrbrd_hs_dr_pio_test__slct_ckrbrd(
+ tv_ptr->mpi_rank, tv_ptr->mem_large_ds_sid, tv_ptr->large_rank, tv_ptr->edge_size,
+ tv_ptr->checker_edge_size, tv_ptr->small_rank - 1, tv_ptr->start);
+
+ /* verify that H5Sselect_shape_same() reports the in
+ * memory checkerboard selection of the slice through the
+ * large dataset and the checkerboard selection of the process
+ * slice of the small data set as having the same shape.
+ */
+ check = H5Sselect_shape_same(tv_ptr->file_small_ds_sid_1, tv_ptr->mem_large_ds_sid);
+ VRFY((check == TRUE), "H5Sselect_shape_same passed.");
+
+ /* write the checker board selection of the slice from the in
+ * memory large data set to the slice of the on disk small
+ * dataset.
+ */
+#if CHECKER_BOARD_HS_DR_PIO_TEST__M2D_L2S__DEBUG
+ HDfprintf(stdout, "%s:%d: start = %d %d %d %d %d.\n", fcnName, tv_ptr->mpi_rank,
+ tv_ptr->start[0], tv_ptr->start[1], tv_ptr->start[2], tv_ptr->start[3],
+ tv_ptr->start[4]);
+ HDfprintf(stdout, "%s:%d: mem/file extent dims = %d/%d.\n", fcnName, tv_ptr->mpi_rank,
+ H5Sget_simple_extent_ndims(tv_ptr->mem_large_ds_sid),
+ H5Sget_simple_extent_ndims(tv_ptr->file_small_ds_sid_1));
+#endif /* CHECKER_BOARD_HS_DR_PIO_TEST__M2D_L2S__DEBUG */
+ ret = H5Dwrite(tv_ptr->small_dataset, H5T_NATIVE_UINT32, tv_ptr->mem_large_ds_sid,
+ tv_ptr->file_small_ds_sid_1, tv_ptr->xfer_plist, tv_ptr->large_ds_buf_0);
+ VRFY((ret >= 0), "H5Dwrite() slice to large ds succeeded.");
+
+ /* read the on disk process slice of the small dataset into memory */
+ ret = H5Dread(tv_ptr->small_dataset, H5T_NATIVE_UINT32, tv_ptr->mem_small_ds_sid,
+ tv_ptr->file_small_ds_sid_0, tv_ptr->xfer_plist, tv_ptr->small_ds_buf_1);
+ VRFY((ret >= 0), "H5Dread() slice from small ds succeeded.");
+
+ /* verify that expected data is retrieved */
+
+ expected_value =
+ (uint32_t)((i * tv_ptr->edge_size * tv_ptr->edge_size * tv_ptr->edge_size *
+ tv_ptr->edge_size) +
+ (j * tv_ptr->edge_size * tv_ptr->edge_size * tv_ptr->edge_size) +
+ (k * tv_ptr->edge_size * tv_ptr->edge_size) + (l * tv_ptr->edge_size));
+
+ start_index = (size_t)(tv_ptr->mpi_rank) * tv_ptr->small_ds_slice_size;
+ stop_index = start_index + tv_ptr->small_ds_slice_size - 1;
+
+ HDassert(start_index < stop_index);
+ HDassert(stop_index <= tv_ptr->small_ds_size);
+
+ data_ok = TRUE;
+
+ ptr_1 = tv_ptr->small_ds_buf_1;
+ for (u = 0; u < start_index; u++, ptr_1++) {
+
+ if (*ptr_1 != 0) {
+
+ data_ok = FALSE;
+ *ptr_1 = 0;
+ }
+ }
+
+ data_ok &= ckrbrd_hs_dr_pio_test__verify_data(
+ tv_ptr->small_ds_buf_1 + start_index, tv_ptr->small_rank - 1, tv_ptr->edge_size,
+ tv_ptr->checker_edge_size, expected_value, (hbool_t)TRUE);
+
+ ptr_1 = tv_ptr->small_ds_buf_1;
+ for (u = stop_index; u < tv_ptr->small_ds_size; u++, ptr_1++) {
+
+ if (*ptr_1 != 0) {
+
+ data_ok = FALSE;
+ *ptr_1 = 0;
+ }
+ }
+
+ VRFY((data_ok == TRUE), "large slice write slice to small slice data good.");
+
+ (tv_ptr->tests_run)++;
+ }
+
+ l++;
+
+ (tv_ptr->total_tests)++;
+
+ } while ((tv_ptr->large_rank > 2) && ((tv_ptr->small_rank - 1) <= 1) && (l < tv_ptr->edge_size));
+ k++;
+ } while ((tv_ptr->large_rank > 3) && ((tv_ptr->small_rank - 1) <= 2) && (k < tv_ptr->edge_size));
+ j++;
+ } while ((tv_ptr->large_rank > 4) && ((tv_ptr->small_rank - 1) <= 3) && (j < tv_ptr->edge_size));
+
+ return;
+
+} /* ckrbrd_hs_dr_pio_test__m2d_l2s() */
+
+/*-------------------------------------------------------------------------
+ * Function: ckrbrd_hs_dr_pio_test__m2d_s2l()
+ *
+ * Purpose: Part four of a series of tests of I/O to/from checker
+ * board hyperslab selections of different rank in the parallel.
+ *
+ * Verify that we can write from memory to file using
+ * selections of different rank that H5Sselect_shape_same()
+ * views as being of the same shape.
+ *
+ * Do this by writing checker board selections of the contents
+ * of the process's slice of the in memory small data set to
+ * slices of the on disk large data set. After each write,
+ * read the process's slice of the large data set back into
+ * memory, and verify that it contains the expected data.
+ *
+ * Verify that H5Sselect_shape_same() returns true on the
+ * memory and file selections.
+ *
+ * Return: void
+ *
+ * Programmer: JRM -- 8/15/11
+ *
+ *-------------------------------------------------------------------------
+ */
+
+#define CHECKER_BOARD_HS_DR_PIO_TEST__M2D_S2L__DEBUG 0
+
+static void
+ckrbrd_hs_dr_pio_test__m2d_s2l(struct hs_dr_pio_test_vars_t *tv_ptr)
+{
+#if CHECKER_BOARD_HS_DR_PIO_TEST__M2D_S2L__DEBUG
+ const char *fcnName = "ckrbrd_hs_dr_pio_test__m2d_s2l()";
+#endif /* CONTIG_HS_DR_PIO_TEST__M2D_S2L__DEBUG */
+ hbool_t data_ok = FALSE;
+ int i, j, k, l;
+ size_t u;
+ size_t start_index;
+ size_t stop_index;
+ uint32_t expected_value;
+ uint32_t *ptr_1;
+ int mpi_rank; /* needed by VRFY */
+ hsize_t sel_start[PAR_SS_DR_MAX_RANK];
+ htri_t check; /* Shape comparison return value */
+ herr_t ret; /* Generic return value */
+
+ /* initialize the local copy of mpi_rank */
+ mpi_rank = tv_ptr->mpi_rank;
+
+ /* Now write the contents of the process's slice of the in memory
+ * small data set to slices of the on disk large data set. After
+ * each write, read the process's slice of the large data set back
+ * into memory, and verify that it contains the expected data.
+ * Verify that H5Sselect_shape_same() returns true on the memory
+ * and file selections.
+ */
+
+ tv_ptr->start[0] = (hsize_t)(tv_ptr->mpi_rank);
+ tv_ptr->stride[0] = (hsize_t)(2 * (tv_ptr->mpi_size + 1));
+ tv_ptr->count[0] = 1;
+ tv_ptr->block[0] = 1;
+
+ for (i = 1; i < tv_ptr->large_rank; i++) {
+
+ tv_ptr->start[i] = 0;
+ tv_ptr->stride[i] = (hsize_t)(2 * tv_ptr->edge_size);
+ tv_ptr->count[i] = 1;
+ tv_ptr->block[i] = (hsize_t)(tv_ptr->edge_size);
+ }
+
+ ret = H5Sselect_hyperslab(tv_ptr->file_large_ds_sid_0, H5S_SELECT_SET, tv_ptr->start, tv_ptr->stride,
+ tv_ptr->count, tv_ptr->block);
+ VRFY((ret >= 0), "H5Sselect_hyperslab(file_large_ds_sid_0, set) succeeded");
+
+ ret = H5Sselect_hyperslab(tv_ptr->mem_large_ds_sid, H5S_SELECT_SET, tv_ptr->start, tv_ptr->stride,
+ tv_ptr->count, tv_ptr->block);
+ VRFY((ret >= 0), "H5Sselect_hyperslab(tv_ptr->mem_large_ds_sid, set) succeeded");
+
+ /* setup a checkerboard selection of the slice of the in memory small
+ * data set associated with the process's mpi rank.
+ */
+
+ sel_start[0] = sel_start[1] = sel_start[2] = sel_start[3] = sel_start[4] = 0;
+ sel_start[tv_ptr->small_ds_offset] = (hsize_t)(tv_ptr->mpi_rank);
+
+ ckrbrd_hs_dr_pio_test__slct_ckrbrd(tv_ptr->mpi_rank, tv_ptr->mem_small_ds_sid, tv_ptr->small_rank,
+ tv_ptr->edge_size, tv_ptr->checker_edge_size, tv_ptr->small_rank - 1,
+ sel_start);
+
+ /* set up start, stride, count, and block -- note that we will
+ * change start[] so as to write checkerboard selections of slices
+ * of the small data set to slices of the large data set.
+ */
+ for (i = 0; i < PAR_SS_DR_MAX_RANK; i++) {
+
+ tv_ptr->start[i] = 0;
+ tv_ptr->stride[i] = (hsize_t)(2 * tv_ptr->edge_size);
+ tv_ptr->count[i] = 1;
+ if ((PAR_SS_DR_MAX_RANK - i) > (tv_ptr->small_rank - 1)) {
+
+ tv_ptr->block[i] = 1;
+ }
+ else {
+
+ tv_ptr->block[i] = (hsize_t)(tv_ptr->edge_size);
+ }
+ }
+
+ /* zero out the in memory large ds */
+ HDmemset(tv_ptr->large_ds_buf_1, 0, sizeof(uint32_t) * tv_ptr->large_ds_size);
+
+#if CHECKER_BOARD_HS_DR_PIO_TEST__M2D_S2L__DEBUG
+ HDfprintf(stdout,
+ "%s writing process checkerboard selections of slices of small ds to process slices of large "
+ "ds on disk.\n",
+ fcnName);
+#endif /* CHECKER_BOARD_HS_DR_PIO_TEST__M2D_S2L__DEBUG */
+
+ if (PAR_SS_DR_MAX_RANK - tv_ptr->large_rank == 0) {
+
+ i = tv_ptr->mpi_rank;
+ }
+ else {
+
+ i = 0;
+ }
+
+ /* since large_rank is at most PAR_SS_DR_MAX_RANK, no need to
+ * loop over it -- either we are setting i to mpi_rank, or
+ * we are setting it to zero. It will not change during the
+ * test.
+ */
+
+ if (PAR_SS_DR_MAX_RANK - tv_ptr->large_rank == 1) {
+
+ j = tv_ptr->mpi_rank;
+ }
+ else {
+
+ j = 0;
+ }
+
+ do {
+ if (PAR_SS_DR_MAX_RANK - tv_ptr->large_rank == 2) {
+
+ k = tv_ptr->mpi_rank;
+ }
+ else {
+
+ k = 0;
+ }
+
+ do {
+ /* since small rank >= 2 and large_rank > small_rank, we
+ * have large_rank >= 3. Since PAR_SS_DR_MAX_RANK == 5
+ * (baring major re-orgaization), this gives us:
+ *
+ * (PAR_SS_DR_MAX_RANK - large_rank) <= 2
+ *
+ * so no need to repeat the test in the outer loops --
+ * just set l = 0.
+ */
+
+ l = 0;
+ do {
+ if ((tv_ptr->skips)++ < tv_ptr->max_skips) { /* skip the test */
+
+ (tv_ptr->tests_skipped)++;
+ }
+ else { /* run the test */
+
+ tv_ptr->skips = 0; /* reset the skips counter */
+
+ /* we know that small_rank >= 1 and that large_rank > small_rank
+ * by the assertions at the head of this function. Thus no
+ * need for another inner loop.
+ */
+
+ /* Zero out this processes slice of the on disk large data set.
+ * Note that this will leave one slice with its original data
+ * as there is one more slice than processes.
+ */
+ ret = H5Dwrite(tv_ptr->large_dataset, H5T_NATIVE_UINT32, tv_ptr->mem_large_ds_sid,
+ tv_ptr->file_large_ds_sid_0, tv_ptr->xfer_plist, tv_ptr->large_ds_buf_2);
+ VRFY((ret != FAIL), "H5Dwrite() to zero large ds succeeded");
+
+ /* select the portion of the in memory large cube to which we
+ * are going to write data.
+ */
+ tv_ptr->start[0] = (hsize_t)i;
+ tv_ptr->start[1] = (hsize_t)j;
+ tv_ptr->start[2] = (hsize_t)k;
+ tv_ptr->start[3] = (hsize_t)l;
+ tv_ptr->start[4] = 0;
+
+ HDassert((tv_ptr->start[0] == 0) || (0 < tv_ptr->small_ds_offset + 1));
+ HDassert((tv_ptr->start[1] == 0) || (1 < tv_ptr->small_ds_offset + 1));
+ HDassert((tv_ptr->start[2] == 0) || (2 < tv_ptr->small_ds_offset + 1));
+ HDassert((tv_ptr->start[3] == 0) || (3 < tv_ptr->small_ds_offset + 1));
+ HDassert((tv_ptr->start[4] == 0) || (4 < tv_ptr->small_ds_offset + 1));
+
+ ckrbrd_hs_dr_pio_test__slct_ckrbrd(
+ tv_ptr->mpi_rank, tv_ptr->file_large_ds_sid_1, tv_ptr->large_rank, tv_ptr->edge_size,
+ tv_ptr->checker_edge_size, tv_ptr->small_rank - 1, tv_ptr->start);
+
+ /* verify that H5Sselect_shape_same() reports the in
+ * memory small data set slice selection and the
+ * on disk slice through the large data set selection
+ * as having the same shape.
+ */
+ check = H5Sselect_shape_same(tv_ptr->mem_small_ds_sid, tv_ptr->file_large_ds_sid_1);
+ VRFY((check == TRUE), "H5Sselect_shape_same passed");
+
+ /* write the small data set slice from memory to the
+ * target slice of the disk data set
+ */
+#if CHECKER_BOARD_HS_DR_PIO_TEST__M2D_S2L__DEBUG
+ HDfprintf(stdout, "%s:%d: start = %d %d %d %d %d.\n", fcnName, tv_ptr->mpi_rank,
+ tv_ptr->start[0], tv_ptr->start[1], tv_ptr->start[2], tv_ptr->start[3],
+ tv_ptr->start[4]);
+ HDfprintf(stdout, "%s:%d: mem/file extent dims = %d/%d.\n", fcnName, tv_ptr->mpi_rank,
+ H5Sget_simple_extent_ndims(tv_ptr->mem_small_ds_sid),
+ H5Sget_simple_extent_ndims(tv_ptr->file_large_ds_sid_1));
+#endif /* CHECKER_BOARD_HS_DR_PIO_TEST__M2D_S2L__DEBUG */
+ ret = H5Dwrite(tv_ptr->large_dataset, H5T_NATIVE_UINT32, tv_ptr->mem_small_ds_sid,
+ tv_ptr->file_large_ds_sid_1, tv_ptr->xfer_plist, tv_ptr->small_ds_buf_0);
+ VRFY((ret != FAIL), "H5Dwrite of small ds slice to large ds succeeded");
+
+ /* read this processes slice on the on disk large
+ * data set into memory.
+ */
+
+ ret = H5Dread(tv_ptr->large_dataset, H5T_NATIVE_UINT32, tv_ptr->mem_large_ds_sid,
+ tv_ptr->file_large_ds_sid_0, tv_ptr->xfer_plist, tv_ptr->large_ds_buf_1);
+ VRFY((ret != FAIL), "H5Dread() of process slice of large ds succeeded");
+
+ /* verify that the expected data and only the
+ * expected data was read.
+ */
+ expected_value = (uint32_t)((size_t)(tv_ptr->mpi_rank) * tv_ptr->small_ds_slice_size);
+
+ start_index =
+ (size_t)((i * tv_ptr->edge_size * tv_ptr->edge_size * tv_ptr->edge_size *
+ tv_ptr->edge_size) +
+ (j * tv_ptr->edge_size * tv_ptr->edge_size * tv_ptr->edge_size) +
+ (k * tv_ptr->edge_size * tv_ptr->edge_size) + (l * tv_ptr->edge_size));
+ stop_index = start_index + tv_ptr->small_ds_slice_size - 1;
+
+ HDassert(start_index < stop_index);
+ HDassert(stop_index < tv_ptr->large_ds_size);
+
+ data_ok = TRUE;
+
+ ptr_1 = tv_ptr->large_ds_buf_1;
+ for (u = 0; u < start_index; u++, ptr_1++) {
+
+ if (*ptr_1 != 0) {
+
+ data_ok = FALSE;
+ *ptr_1 = 0;
+ }
+ }
+
+ data_ok &= ckrbrd_hs_dr_pio_test__verify_data(
+ tv_ptr->large_ds_buf_1 + start_index, tv_ptr->small_rank - 1, tv_ptr->edge_size,
+ tv_ptr->checker_edge_size, expected_value, (hbool_t)TRUE);
+
+ ptr_1 = tv_ptr->large_ds_buf_1;
+ for (u = stop_index; u < tv_ptr->small_ds_size; u++, ptr_1++) {
+
+ if (*ptr_1 != 0) {
+
+ data_ok = FALSE;
+ *ptr_1 = 0;
+ }
+ }
+
+ VRFY((data_ok == TRUE), "small ds cb slice write to large ds slice data good.");
+
+ (tv_ptr->tests_run)++;
+ }
+
+ l++;
+
+ (tv_ptr->total_tests)++;
+
+ } while ((tv_ptr->large_rank > 2) && ((tv_ptr->small_rank - 1) <= 1) && (l < tv_ptr->edge_size));
+ k++;
+ } while ((tv_ptr->large_rank > 3) && ((tv_ptr->small_rank - 1) <= 2) && (k < tv_ptr->edge_size));
+ j++;
+ } while ((tv_ptr->large_rank > 4) && ((tv_ptr->small_rank - 1) <= 3) && (j < tv_ptr->edge_size));
+
+ return;
+
+} /* ckrbrd_hs_dr_pio_test__m2d_s2l() */
+
+/*-------------------------------------------------------------------------
+ * Function: ckrbrd_hs_dr_pio_test__run_test()
+ *
+ * Purpose: Test I/O to/from checkerboard selections of hyperslabs of
+ * different rank in the parallel.
+ *
+ * Return: void
+ *
+ * Programmer: JRM -- 10/10/09
+ *
+ *-------------------------------------------------------------------------
+ */
+
+#define CKRBRD_HS_DR_PIO_TEST__RUN_TEST__DEBUG 0
+
+static void
+ckrbrd_hs_dr_pio_test__run_test(const int test_num, const int edge_size, const int checker_edge_size,
+ const int chunk_edge_size, const int small_rank, const int large_rank,
+ const hbool_t use_collective_io, const hid_t dset_type,
+ const int express_test, int *skips_ptr, int max_skips,
+ int64_t *total_tests_ptr, int64_t *tests_run_ptr, int64_t *tests_skipped_ptr,
+ int mpi_rank)
+
+{
+#if CKRBRD_HS_DR_PIO_TEST__RUN_TEST__DEBUG
+ const char *fcnName = "ckrbrd_hs_dr_pio_test__run_test()";
+#endif /* CKRBRD_HS_DR_PIO_TEST__RUN_TEST__DEBUG */
+ struct hs_dr_pio_test_vars_t test_vars = {
+ /* int mpi_size = */ -1,
+ /* int mpi_rank = */ -1,
+ /* MPI_Comm mpi_comm = */ MPI_COMM_NULL,
+ /* MPI_Inf mpi_info = */ MPI_INFO_NULL,
+ /* int test_num = */ -1,
+ /* int edge_size = */ -1,
+ /* int checker_edge_size = */ -1,
+ /* int chunk_edge_size = */ -1,
+ /* int small_rank = */ -1,
+ /* int large_rank = */ -1,
+ /* hid_t dset_type = */ -1,
+ /* uint32_t * small_ds_buf_0 = */ NULL,
+ /* uint32_t * small_ds_buf_1 = */ NULL,
+ /* uint32_t * small_ds_buf_2 = */ NULL,
+ /* uint32_t * small_ds_slice_buf = */ NULL,
+ /* uint32_t * large_ds_buf_0 = */ NULL,
+ /* uint32_t * large_ds_buf_1 = */ NULL,
+ /* uint32_t * large_ds_buf_2 = */ NULL,
+ /* uint32_t * large_ds_slice_buf = */ NULL,
+ /* int small_ds_offset = */ -1,
+ /* int large_ds_offset = */ -1,
+ /* hid_t fid = */ -1, /* HDF5 file ID */
+ /* hid_t xfer_plist = */ H5P_DEFAULT,
+ /* hid_t full_mem_small_ds_sid = */ -1,
+ /* hid_t full_file_small_ds_sid = */ -1,
+ /* hid_t mem_small_ds_sid = */ -1,
+ /* hid_t file_small_ds_sid_0 = */ -1,
+ /* hid_t file_small_ds_sid_1 = */ -1,
+ /* hid_t small_ds_slice_sid = */ -1,
+ /* hid_t full_mem_large_ds_sid = */ -1,
+ /* hid_t full_file_large_ds_sid = */ -1,
+ /* hid_t mem_large_ds_sid = */ -1,
+ /* hid_t file_large_ds_sid_0 = */ -1,
+ /* hid_t file_large_ds_sid_1 = */ -1,
+ /* hid_t file_large_ds_process_slice_sid = */ -1,
+ /* hid_t mem_large_ds_process_slice_sid = */ -1,
+ /* hid_t large_ds_slice_sid = */ -1,
+ /* hid_t small_dataset = */ -1, /* Dataset ID */
+ /* hid_t large_dataset = */ -1, /* Dataset ID */
+ /* size_t small_ds_size = */ 1,
+ /* size_t small_ds_slice_size = */ 1,
+ /* size_t large_ds_size = */ 1,
+ /* size_t large_ds_slice_size = */ 1,
+ /* hsize_t dims[PAR_SS_DR_MAX_RANK] = */ {0, 0, 0, 0, 0},
+ /* hsize_t chunk_dims[PAR_SS_DR_MAX_RANK] = */ {0, 0, 0, 0, 0},
+ /* hsize_t start[PAR_SS_DR_MAX_RANK] = */ {0, 0, 0, 0, 0},
+ /* hsize_t stride[PAR_SS_DR_MAX_RANK] = */ {0, 0, 0, 0, 0},
+ /* hsize_t count[PAR_SS_DR_MAX_RANK] = */ {0, 0, 0, 0, 0},
+ /* hsize_t block[PAR_SS_DR_MAX_RANK] = */ {0, 0, 0, 0, 0},
+ /* hsize_t * start_ptr = */ NULL,
+ /* hsize_t * stride_ptr = */ NULL,
+ /* hsize_t * count_ptr = */ NULL,
+ /* hsize_t * block_ptr = */ NULL,
+ /* int skips = */ 0,
+ /* int max_skips = */ 0,
+ /* int64_t total_tests = */ 0,
+ /* int64_t tests_run = */ 0,
+ /* int64_t tests_skipped = */ 0};
+ struct hs_dr_pio_test_vars_t *tv_ptr = &test_vars;
+
+ if (MAINPROCESS)
+ printf("\r - running test #%lld: small rank = %d, large rank = %d", (long long)(test_num + 1),
+ small_rank, large_rank);
+
+ hs_dr_pio_test__setup(test_num, edge_size, checker_edge_size, chunk_edge_size, small_rank, large_rank,
+ use_collective_io, dset_type, express_test, tv_ptr);
+
+ /* initialize skips & max_skips */
+ tv_ptr->skips = *skips_ptr;
+ tv_ptr->max_skips = max_skips;
+
+#if CKRBRD_HS_DR_PIO_TEST__RUN_TEST__DEBUG
+ if (MAINPROCESS) {
+ HDfprintf(stdout, "test %d: small rank = %d, large rank = %d.\n", test_num, small_rank, large_rank);
+ HDfprintf(stdout, "test %d: Initialization complete.\n", test_num);
+ }
+#endif /* CKRBRD_HS_DR_PIO_TEST__RUN_TEST__DEBUG */
+
+ /* first, verify that we can read from disk correctly using selections
+ * of different rank that H5Sselect_shape_same() views as being of the
+ * same shape.
+ *
+ * Start by reading a (small_rank - 1)-D slice from this processes slice
+ * of the on disk large data set, and verifying that the data read is
+ * correct. Verify that H5Sselect_shape_same() returns true on the
+ * memory and file selections.
+ *
+ * The first step is to set up the needed checker board selection in the
+ * in memory small small cube
+ */
+
+ ckrbrd_hs_dr_pio_test__d2m_l2s(tv_ptr);
+
+ /* similarly, read slices of the on disk small data set into slices
+ * through the in memory large data set, and verify that the correct
+ * data (and only the correct data) is read.
+ */
+
+ ckrbrd_hs_dr_pio_test__d2m_s2l(tv_ptr);
+
+ /* now we go in the opposite direction, verifying that we can write
+ * from memory to file using selections of different rank that
+ * H5Sselect_shape_same() views as being of the same shape.
+ *
+ * Start by writing small_rank - 1 D slices from the in memory large data
+ * set to the on disk small dataset. After each write, read the slice of
+ * the small dataset back from disk, and verify that it contains the
+ * expected data. Verify that H5Sselect_shape_same() returns true on
+ * the memory and file selections.
+ */
+
+ ckrbrd_hs_dr_pio_test__m2d_l2s(tv_ptr);
+
+ /* Now write the contents of the process's slice of the in memory
+ * small data set to slices of the on disk large data set. After
+ * each write, read the process's slice of the large data set back
+ * into memory, and verify that it contains the expected data.
+ * Verify that H5Sselect_shape_same() returns true on the memory
+ * and file selections.
+ */
+
+ ckrbrd_hs_dr_pio_test__m2d_s2l(tv_ptr);
+
+#if CKRBRD_HS_DR_PIO_TEST__RUN_TEST__DEBUG
+ if (MAINPROCESS) {
+ HDfprintf(stdout, "test %d: Subtests complete -- tests run/skipped/total = %lld/%lld/%lld.\n",
+ test_num, (long long)(tv_ptr->tests_run), (long long)(tv_ptr->tests_skipped),
+ (long long)(tv_ptr->total_tests));
+ }
+#endif /* CKRBRD_HS_DR_PIO_TEST__RUN_TEST__DEBUG */
+
+ hs_dr_pio_test__takedown(tv_ptr);
+
+#if CKRBRD_HS_DR_PIO_TEST__RUN_TEST__DEBUG
+ if (MAINPROCESS) {
+ HDfprintf(stdout, "test %d: Takedown complete.\n", test_num);
+ }
+#endif /* CKRBRD_HS_DR_PIO_TEST__RUN_TEST__DEBUG */
+
+ *skips_ptr = tv_ptr->skips;
+ *total_tests_ptr += tv_ptr->total_tests;
+ *tests_run_ptr += tv_ptr->tests_run;
+ *tests_skipped_ptr += tv_ptr->tests_skipped;
+
+ return;
+
+} /* ckrbrd_hs_dr_pio_test__run_test() */
+
+/*-------------------------------------------------------------------------
+ * Function: ckrbrd_hs_dr_pio_test()
+ *
+ * Purpose: Test I/O to/from hyperslab selections of different rank in
+ * the parallel case.
+ *
+ * Return: void
+ *
+ * Programmer: JRM -- 9/18/09
+ *
+ *-------------------------------------------------------------------------
+ */
+
+static void
+ckrbrd_hs_dr_pio_test(ShapeSameTestMethods sstest_type)
+{
+ int express_test;
+ int local_express_test;
+ int mpi_size = -1;
+ int mpi_rank = -1;
+ int test_num = 0;
+ int edge_size;
+ int checker_edge_size = 3;
+ int chunk_edge_size = 0;
+ int small_rank = 3;
+ int large_rank = 4;
+ int mpi_result;
+ hid_t dset_type = H5T_NATIVE_UINT;
+ int skips = 0;
+ int max_skips = 0;
+ /* The following table list the number of sub-tests skipped between
+ * each test that is actually executed as a function of the express
+ * test level. Note that any value in excess of 4880 will cause all
+ * sub tests to be skipped.
+ */
+ int max_skips_tbl[4] = {0, 4, 64, 1024};
+ int64_t total_tests = 0;
+ int64_t tests_run = 0;
+ int64_t tests_skipped = 0;
+
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ edge_size = (mpi_size > 6 ? mpi_size : 6);
+
+ local_express_test = EXPRESS_MODE; /* GetTestExpress(); */
+
+ HDcompile_assert(sizeof(uint32_t) == sizeof(unsigned));
+
+ mpi_result = MPI_Allreduce((void *)&local_express_test, (void *)&express_test, 1, MPI_INT, MPI_MAX,
+ MPI_COMM_WORLD);
+
+ VRFY((mpi_result == MPI_SUCCESS), "MPI_Allreduce(0) succeeded");
+
+ if (local_express_test < 0) {
+ max_skips = max_skips_tbl[0];
+ }
+ else if (local_express_test > 3) {
+ max_skips = max_skips_tbl[3];
+ }
+ else {
+ max_skips = max_skips_tbl[local_express_test];
+ }
+
+#if 0
+ {
+ int DebugWait = 1;
+
+ while (DebugWait) ;
+ }
+#endif
+
+ for (large_rank = 3; large_rank <= PAR_SS_DR_MAX_RANK; large_rank++) {
+
+ for (small_rank = 2; small_rank < large_rank; small_rank++) {
+ switch (sstest_type) {
+ case IND_CONTIG:
+ /* contiguous data set, independent I/O */
+ chunk_edge_size = 0;
+ ckrbrd_hs_dr_pio_test__run_test(test_num, edge_size, checker_edge_size, chunk_edge_size,
+ small_rank, large_rank, FALSE, dset_type, express_test,
+ &skips, max_skips, &total_tests, &tests_run,
+ &tests_skipped, mpi_rank);
+ test_num++;
+ break;
+ /* end of case IND_CONTIG */
+
+ case COL_CONTIG:
+ /* contiguous data set, collective I/O */
+ chunk_edge_size = 0;
+ ckrbrd_hs_dr_pio_test__run_test(test_num, edge_size, checker_edge_size, chunk_edge_size,
+ small_rank, large_rank, TRUE, dset_type, express_test,
+ &skips, max_skips, &total_tests, &tests_run,
+ &tests_skipped, mpi_rank);
+ test_num++;
+ break;
+ /* end of case COL_CONTIG */
+
+ case IND_CHUNKED:
+ /* chunked data set, independent I/O */
+ chunk_edge_size = 5;
+ ckrbrd_hs_dr_pio_test__run_test(test_num, edge_size, checker_edge_size, chunk_edge_size,
+ small_rank, large_rank, FALSE, dset_type, express_test,
+ &skips, max_skips, &total_tests, &tests_run,
+ &tests_skipped, mpi_rank);
+ test_num++;
+ break;
+ /* end of case IND_CHUNKED */
+
+ case COL_CHUNKED:
+ /* chunked data set, collective I/O */
+ chunk_edge_size = 5;
+ ckrbrd_hs_dr_pio_test__run_test(test_num, edge_size, checker_edge_size, chunk_edge_size,
+ small_rank, large_rank, TRUE, dset_type, express_test,
+ &skips, max_skips, &total_tests, &tests_run,
+ &tests_skipped, mpi_rank);
+ test_num++;
+ break;
+ /* end of case COL_CHUNKED */
+
+ default:
+ VRFY((FALSE), "unknown test type");
+ break;
+
+ } /* end of switch(sstest_type) */
+#if CONTIG_HS_DR_PIO_TEST__DEBUG
+ if ((MAINPROCESS) && (tests_skipped > 0)) {
+ HDfprintf(stdout, " run/skipped/total = %" PRId64 "/%" PRId64 "/%" PRId64 ".\n",
+ tests_run, tests_skipped, total_tests);
+ }
+#endif /* CONTIG_HS_DR_PIO_TEST__DEBUG */
+ }
+ }
+
+ if (MAINPROCESS) {
+ if (tests_skipped > 0) {
+ HDfprintf(stdout, " %" PRId64 " of %" PRId64 " subtests skipped to expedite testing.\n",
+ tests_skipped, total_tests);
+ }
+ else
+ HDprintf("\n");
+ }
+
+ return;
+
+} /* ckrbrd_hs_dr_pio_test() */
+
+/* Main Body. Here for now, may have to move them to a separated file later. */
+
+/*
+ * Main driver of the Parallel HDF5 tests
+ */
+
+#include "testphdf5.h"
+
+#ifndef PATH_MAX
+#define PATH_MAX 512
+#endif /* !PATH_MAX */
+
+/* global variables */
+int dim0;
+int dim1;
+int chunkdim0;
+int chunkdim1;
+int nerrors = 0; /* errors count */
+int ndatasets = 300; /* number of datasets to create*/
+int ngroups = 512; /* number of groups to create in root
+ * group. */
+int facc_type = FACC_MPIO; /*Test file access type */
+int dxfer_coll_type = DXFER_COLLECTIVE_IO;
+
+H5E_auto2_t old_func; /* previous error handler */
+void *old_client_data; /* previous error handler arg.*/
+
+/* other option flags */
+
+#ifdef USE_PAUSE
+/* pause the process for a moment to allow debugger to attach if desired. */
+/* Will pause more if greenlight file is not persent but will eventually */
+/* continue. */
+#include <sys/types.h>
+#include <sys/stat.h>
+
+void
+pause_proc(void)
+{
+
+ int pid;
+ h5_stat_t statbuf;
+ char greenlight[] = "go";
+ int maxloop = 10;
+ int loops = 0;
+ int time_int = 10;
+
+ /* mpi variables */
+ int mpi_size, mpi_rank;
+ int mpi_namelen;
+ char mpi_name[MPI_MAX_PROCESSOR_NAME];
+
+ pid = getpid();
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+ MPI_Get_processor_name(mpi_name, &mpi_namelen);
+
+ if (MAINPROCESS)
+ while ((HDstat(greenlight, &statbuf) == -1) && loops < maxloop) {
+ if (!loops++) {
+ HDprintf("Proc %d (%*s, %d): to debug, attach %d\n", mpi_rank, mpi_namelen, mpi_name, pid,
+ pid);
+ }
+ HDprintf("waiting(%ds) for file %s ...\n", time_int, greenlight);
+ fflush(stdout);
+ HDsleep(time_int);
+ }
+ MPI_Barrier(MPI_COMM_WORLD);
+}
+
+/* Use the Profile feature of MPI to call the pause_proc() */
+int
+MPI_Init(int *argc, char ***argv)
+{
+ int ret_code;
+ ret_code = PMPI_Init(argc, argv);
+ pause_proc();
+ return (ret_code);
+}
+#endif /* USE_PAUSE */
+
+/*
+ * Show command usage
+ */
+static void
+usage(void)
+{
+ HDprintf(" [-r] [-w] [-m<n_datasets>] [-n<n_groups>] "
+ "[-o] [-f <prefix>] [-d <dim0> <dim1>]\n");
+ HDprintf("\t-m<n_datasets>"
+ "\tset number of datasets for the multiple dataset test\n");
+ HDprintf("\t-n<n_groups>"
+ "\tset number of groups for the multiple group test\n");
+#if 0
+ HDprintf("\t-f <prefix>\tfilename prefix\n");
+#endif
+ HDprintf("\t-2\t\tuse Split-file together with MPIO\n");
+ HDprintf("\t-d <factor0> <factor1>\tdataset dimensions factors. Defaults (%d,%d)\n", ROW_FACTOR,
+ COL_FACTOR);
+ HDprintf("\t-c <dim0> <dim1>\tdataset chunk dimensions. Defaults (dim0/10,dim1/10)\n");
+ HDprintf("\n");
+}
+
+/*
+ * parse the command line options
+ */
+static int
+parse_options(int argc, char **argv)
+{
+ int mpi_size, mpi_rank; /* mpi variables */
+
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* setup default chunk-size. Make sure sizes are > 0 */
+
+ chunkdim0 = (dim0 + 9) / 10;
+ chunkdim1 = (dim1 + 9) / 10;
+
+ while (--argc) {
+ if (**(++argv) != '-') {
+ break;
+ }
+ else {
+ switch (*(*argv + 1)) {
+ case 'm':
+ ndatasets = atoi((*argv + 1) + 1);
+ if (ndatasets < 0) {
+ nerrors++;
+ return (1);
+ }
+ break;
+ case 'n':
+ ngroups = atoi((*argv + 1) + 1);
+ if (ngroups < 0) {
+ nerrors++;
+ return (1);
+ }
+ break;
+#if 0
+ case 'f': if (--argc < 1) {
+ nerrors++;
+ return(1);
+ }
+ if (**(++argv) == '-') {
+ nerrors++;
+ return(1);
+ }
+ paraprefix = *argv;
+ break;
+#endif
+ case 'i': /* Collective MPI-IO access with independent IO */
+ dxfer_coll_type = DXFER_INDEPENDENT_IO;
+ break;
+ case '2': /* Use the split-file driver with MPIO access */
+ /* Can use $HDF5_METAPREFIX to define the */
+ /* meta-file-prefix. */
+ facc_type = FACC_MPIO | FACC_SPLIT;
+ break;
+ case 'd': /* dimensizes */
+ if (--argc < 2) {
+ nerrors++;
+ return (1);
+ }
+ dim0 = atoi(*(++argv)) * mpi_size;
+ argc--;
+ dim1 = atoi(*(++argv)) * mpi_size;
+ /* set default chunkdim sizes too */
+ chunkdim0 = (dim0 + 9) / 10;
+ chunkdim1 = (dim1 + 9) / 10;
+ break;
+ case 'c': /* chunk dimensions */
+ if (--argc < 2) {
+ nerrors++;
+ return (1);
+ }
+ chunkdim0 = atoi(*(++argv));
+ argc--;
+ chunkdim1 = atoi(*(++argv));
+ break;
+ case 'h': /* print help message--return with nerrors set */
+ return (1);
+ default:
+ HDprintf("Illegal option(%s)\n", *argv);
+ nerrors++;
+ return (1);
+ }
+ }
+ } /*while*/
+
+ /* check validity of dimension and chunk sizes */
+ if (dim0 <= 0 || dim1 <= 0) {
+ HDprintf("Illegal dim sizes (%d, %d)\n", dim0, dim1);
+ nerrors++;
+ return (1);
+ }
+ if (chunkdim0 <= 0 || chunkdim1 <= 0) {
+ HDprintf("Illegal chunkdim sizes (%d, %d)\n", chunkdim0, chunkdim1);
+ nerrors++;
+ return (1);
+ }
+
+ /* Make sure datasets can be divided into equal portions by the processes */
+ if ((dim0 % mpi_size) || (dim1 % mpi_size)) {
+ if (MAINPROCESS)
+ HDprintf("dim0(%d) and dim1(%d) must be multiples of processes(%d)\n", dim0, dim1, mpi_size);
+ nerrors++;
+ return (1);
+ }
+
+ /* compose the test filenames */
+ {
+ int i, n;
+
+ n = sizeof(FILENAME) / sizeof(FILENAME[0]) - 1; /* exclude the NULL */
+
+ for (i = 0; i < n; i++)
+ strncpy(filenames[i], FILENAME[i], PATH_MAX);
+#if 0 /* no support for VFDs right now */
+ if (h5_fixname(FILENAME[i], fapl, filenames[i], PATH_MAX) == NULL) {
+ HDprintf("h5_fixname failed\n");
+ nerrors++;
+ return (1);
+ }
+#endif
+ if (MAINPROCESS) {
+ HDprintf("Test filenames are:\n");
+ for (i = 0; i < n; i++)
+ HDprintf(" %s\n", filenames[i]);
+ }
+ }
+
+ return (0);
+}
+
+/*
+ * Create the appropriate File access property list
+ */
+hid_t
+create_faccess_plist(MPI_Comm comm, MPI_Info info, int l_facc_type)
+{
+ hid_t ret_pl = -1;
+ herr_t ret; /* generic return value */
+ int mpi_rank; /* mpi variables */
+
+ /* need the rank for error checking macros */
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ ret_pl = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY((ret_pl >= 0), "H5P_FILE_ACCESS");
+
+ if (l_facc_type == FACC_DEFAULT)
+ return (ret_pl);
+
+ if (l_facc_type == FACC_MPIO) {
+ /* set Parallel access with communicator */
+ ret = H5Pset_fapl_mpio(ret_pl, comm, info);
+ VRFY((ret >= 0), "");
+ ret = H5Pset_all_coll_metadata_ops(ret_pl, TRUE);
+ VRFY((ret >= 0), "");
+ ret = H5Pset_coll_metadata_write(ret_pl, TRUE);
+ VRFY((ret >= 0), "");
+ return (ret_pl);
+ }
+
+ if (l_facc_type == (FACC_MPIO | FACC_SPLIT)) {
+ hid_t mpio_pl;
+
+ mpio_pl = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY((mpio_pl >= 0), "");
+ /* set Parallel access with communicator */
+ ret = H5Pset_fapl_mpio(mpio_pl, comm, info);
+ VRFY((ret >= 0), "");
+
+ /* setup file access template */
+ ret_pl = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY((ret_pl >= 0), "");
+ /* set Parallel access with communicator */
+ ret = H5Pset_fapl_split(ret_pl, ".meta", mpio_pl, ".raw", mpio_pl);
+ VRFY((ret >= 0), "H5Pset_fapl_split succeeded");
+ H5Pclose(mpio_pl);
+ return (ret_pl);
+ }
+
+ /* unknown file access types */
+ return (ret_pl);
+}
+
+/* Shape Same test using contiguous hyperslab using independent IO on contiguous datasets */
+static void
+sscontig1(void)
+{
+ contig_hs_dr_pio_test(IND_CONTIG);
+}
+
+/* Shape Same test using contiguous hyperslab using collective IO on contiguous datasets */
+static void
+sscontig2(void)
+{
+ contig_hs_dr_pio_test(COL_CONTIG);
+}
+
+/* Shape Same test using contiguous hyperslab using independent IO on chunked datasets */
+static void
+sscontig3(void)
+{
+ contig_hs_dr_pio_test(IND_CHUNKED);
+}
+
+/* Shape Same test using contiguous hyperslab using collective IO on chunked datasets */
+static void
+sscontig4(void)
+{
+ contig_hs_dr_pio_test(COL_CHUNKED);
+}
+
+/* Shape Same test using checker hyperslab using independent IO on contiguous datasets */
+static void
+sschecker1(void)
+{
+ ckrbrd_hs_dr_pio_test(IND_CONTIG);
+}
+
+/* Shape Same test using checker hyperslab using collective IO on contiguous datasets */
+static void
+sschecker2(void)
+{
+ ckrbrd_hs_dr_pio_test(COL_CONTIG);
+}
+
+/* Shape Same test using checker hyperslab using independent IO on chunked datasets */
+static void
+sschecker3(void)
+{
+ ckrbrd_hs_dr_pio_test(IND_CHUNKED);
+}
+
+/* Shape Same test using checker hyperslab using collective IO on chunked datasets */
+static void
+sschecker4(void)
+{
+ ckrbrd_hs_dr_pio_test(COL_CHUNKED);
+}
+
+int
+main(int argc, char **argv)
+{
+ int mpi_size, mpi_rank; /* mpi variables */
+
+#ifndef H5_HAVE_WIN32_API
+ /* Un-buffer the stdout and stderr */
+ HDsetbuf(stderr, NULL);
+ HDsetbuf(stdout, NULL);
+#endif
+
+ MPI_Init(&argc, &argv);
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ dim0 = ROW_FACTOR * mpi_size;
+ dim1 = COL_FACTOR * mpi_size;
+
+ if (MAINPROCESS) {
+ HDprintf("===================================\n");
+ HDprintf("Shape Same Tests Start\n");
+ HDprintf(" express_test = %d.\n", EXPRESS_MODE /* GetTestExpress() */);
+ HDprintf("===================================\n");
+ }
+
+ /* Attempt to turn off atexit post processing so that in case errors
+ * happen during the test and the process is aborted, it will not get
+ * hang in the atexit post processing in which it may try to make MPI
+ * calls. By then, MPI calls may not work.
+ */
+ if (H5dont_atexit() < 0) {
+ if (MAINPROCESS)
+ HDprintf("%d: Failed to turn off atexit processing. Continue.\n", mpi_rank);
+ };
+ H5open();
+ /* h5_show_hostname(); */
+
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+
+ /* Get the capability flag of the VOL connector being used */
+ if (H5Pget_vol_cap_flags(fapl, &vol_cap_flags_g) < 0) {
+ if (MAINPROCESS)
+ HDprintf("Failed to get the capability flag of the VOL connector being used\n");
+
+ MPI_Finalize();
+ return 0;
+ }
+
+ /* Make sure the connector supports the API functions being tested. This test only
+ * uses a few API functions, such as H5Fcreate/close/delete, H5Dcreate/write/read/close,
+ */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ if (MAINPROCESS)
+ HDprintf("API functions for basic file and dataset aren't supported with this connector\n");
+
+ MPI_Finalize();
+ return 0;
+ }
+
+#if 0
+ HDmemset(filenames, 0, sizeof(filenames));
+ for (int i = 0; i < NFILENAME; i++) {
+ if (NULL == (filenames[i] = HDmalloc(PATH_MAX))) {
+ HDprintf("couldn't allocate filename array\n");
+ MPI_Abort(MPI_COMM_WORLD, -1);
+ }
+ }
+#endif
+
+ /* Initialize testing framework */
+ /* TestInit(argv[0], usage, parse_options); */
+
+ if (parse_options(argc, argv)) {
+ usage();
+ return 1;
+ }
+
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO && MAINPROCESS) {
+ HDprintf("===================================\n"
+ " Using Independent I/O with file set view to replace collective I/O \n"
+ "===================================\n");
+ }
+
+ /* Shape Same tests using contiguous hyperslab */
+#if 0
+ AddTest("sscontig1", sscontig1, NULL,
+ "Cntg hslab, ind IO, cntg dsets", filenames[0]);
+ AddTest("sscontig2", sscontig2, NULL,
+ "Cntg hslab, col IO, cntg dsets", filenames[0]);
+ AddTest("sscontig3", sscontig3, NULL,
+ "Cntg hslab, ind IO, chnk dsets", filenames[0]);
+ AddTest("sscontig4", sscontig4, NULL,
+ "Cntg hslab, col IO, chnk dsets", filenames[0]);
+#endif
+ if (MAINPROCESS) {
+ printf("Cntg hslab, ind IO, cntg dsets\n");
+ fflush(stdout);
+ }
+ sscontig1();
+ if (MAINPROCESS) {
+ printf("Cntg hslab, col IO, cntg dsets\n");
+ fflush(stdout);
+ }
+ sscontig2();
+ if (MAINPROCESS) {
+ printf("Cntg hslab, ind IO, chnk dsets\n");
+ fflush(stdout);
+ }
+ sscontig3();
+ if (MAINPROCESS) {
+ printf("Cntg hslab, col IO, chnk dsets\n");
+ fflush(stdout);
+ }
+ sscontig4();
+
+ /* Shape Same tests using checker board hyperslab */
+#if 0
+ AddTest("sschecker1", sschecker1, NULL,
+ "Check hslab, ind IO, cntg dsets", filenames[0]);
+ AddTest("sschecker2", sschecker2, NULL,
+ "Check hslab, col IO, cntg dsets", filenames[0]);
+ AddTest("sschecker3", sschecker3, NULL,
+ "Check hslab, ind IO, chnk dsets", filenames[0]);
+ AddTest("sschecker4", sschecker4, NULL,
+ "Check hslab, col IO, chnk dsets", filenames[0]);
+#endif
+ if (MAINPROCESS) {
+ printf("Check hslab, ind IO, cntg dsets\n");
+ fflush(stdout);
+ }
+ sschecker1();
+ if (MAINPROCESS) {
+ printf("Check hslab, col IO, cntg dsets\n");
+ fflush(stdout);
+ }
+ sschecker2();
+ if (MAINPROCESS) {
+ printf("Check hslab, ind IO, chnk dsets\n");
+ fflush(stdout);
+ }
+ sschecker3();
+ if (MAINPROCESS) {
+ printf("Check hslab, col IO, chnk dsets\n");
+ fflush(stdout);
+ }
+ sschecker4();
+
+ /* Display testing information */
+ /* TestInfo(argv[0]); */
+
+ /* setup file access property list */
+ H5Pset_fapl_mpio(fapl, MPI_COMM_WORLD, MPI_INFO_NULL);
+
+ /* Parse command line arguments */
+ /* TestParseCmdLine(argc, argv); */
+
+ /* Perform requested testing */
+ /* PerformTests(); */
+
+ /* make sure all processes are finished before final report, cleanup
+ * and exit.
+ */
+ MPI_Barrier(MPI_COMM_WORLD);
+
+ /* Display test summary, if requested */
+ /* if (MAINPROCESS && GetTestSummary())
+ TestSummary(); */
+
+ /* Clean up test files */
+ /* h5_clean_files(FILENAME, fapl); */
+ H5Fdelete(FILENAME[0], fapl);
+ H5Pclose(fapl);
+
+ /* nerrors += GetTestNumErrs(); */
+
+ /* Gather errors from all processes */
+ {
+ int temp;
+ MPI_Allreduce(&nerrors, &temp, 1, MPI_INT, MPI_MAX, MPI_COMM_WORLD);
+ nerrors = temp;
+ }
+
+ if (MAINPROCESS) { /* only process 0 reports */
+ HDprintf("===================================\n");
+ if (nerrors)
+ HDprintf("***Shape Same tests detected %d errors***\n", nerrors);
+ else
+ HDprintf("Shape Same tests finished successfully\n");
+ HDprintf("===================================\n");
+ }
+
+#if 0
+ for (int i = 0; i < NFILENAME; i++) {
+ HDfree(filenames[i]);
+ filenames[i] = NULL;
+ }
+#endif
+
+ /* close HDF5 library */
+ H5close();
+
+ /* Release test infrastructure */
+ /* TestShutdown(); */
+
+ MPI_Finalize();
+
+ /* cannot just return (nerrors) because exit code is limited to 1byte */
+ return (nerrors != 0);
+}
diff --git a/testpar/API/t_span_tree.c b/testpar/API/t_span_tree.c
new file mode 100644
index 0000000..5aafb0b
--- /dev/null
+++ b/testpar/API/t_span_tree.c
@@ -0,0 +1,2622 @@
+
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ This program will test irregular hyperslab selections with collective write and read.
+ The way to test whether collective write and read works is to use independent IO
+ output to verify the collective output.
+
+ 1) We will write two datasets with the same hyperslab selection settings;
+ one in independent mode,
+ one in collective mode,
+ 2) We will read two datasets with the same hyperslab selection settings,
+ 1. independent read to read independent output,
+ independent read to read collecive output,
+ Compare the result,
+ If the result is the same, then collective write succeeds.
+ 2. collective read to read independent output,
+ independent read to read independent output,
+ Compare the result,
+ If the result is the same, then collective read succeeds.
+
+ */
+
+#include "hdf5.h"
+#if 0
+#include "H5private.h"
+#endif
+#include "testphdf5.h"
+
+#define LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG 0
+
+static void coll_write_test(int chunk_factor);
+static void coll_read_test(void);
+
+/*-------------------------------------------------------------------------
+ * Function: coll_irregular_cont_write
+ *
+ * Purpose: Wrapper to test the collectively irregular hyperslab write in
+ * contiguous storage
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Unknown
+ * Dec 2nd, 2004
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+coll_irregular_cont_write(void)
+{
+ int mpi_rank;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file dataset, or dataset more aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ coll_write_test(0);
+}
+
+/*-------------------------------------------------------------------------
+ * Function: coll_irregular_cont_read
+ *
+ * Purpose: Wrapper to test the collectively irregular hyperslab read in
+ * contiguous storage
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Unknown
+ * Dec 2nd, 2004
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+coll_irregular_cont_read(void)
+{
+ int mpi_rank;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file dataset, or dataset more aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ coll_read_test();
+}
+
+/*-------------------------------------------------------------------------
+ * Function: coll_irregular_simple_chunk_write
+ *
+ * Purpose: Wrapper to test the collectively irregular hyperslab write in
+ * chunk storage(1 chunk)
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Unknown
+ * Dec 2nd, 2004
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+coll_irregular_simple_chunk_write(void)
+{
+ int mpi_rank;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file dataset, or dataset more aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ coll_write_test(1);
+}
+
+/*-------------------------------------------------------------------------
+ * Function: coll_irregular_simple_chunk_read
+ *
+ * Purpose: Wrapper to test the collectively irregular hyperslab read in chunk
+ * storage(1 chunk)
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Unknown
+ * Dec 2nd, 2004
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+coll_irregular_simple_chunk_read(void)
+{
+ int mpi_rank;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file dataset, or dataset more aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ coll_read_test();
+}
+
+/*-------------------------------------------------------------------------
+ * Function: coll_irregular_complex_chunk_write
+ *
+ * Purpose: Wrapper to test the collectively irregular hyperslab write in chunk
+ * storage(4 chunks)
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Unknown
+ * Dec 2nd, 2004
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+coll_irregular_complex_chunk_write(void)
+{
+ int mpi_rank;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file dataset, or dataset more aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ coll_write_test(4);
+}
+
+/*-------------------------------------------------------------------------
+ * Function: coll_irregular_complex_chunk_read
+ *
+ * Purpose: Wrapper to test the collectively irregular hyperslab read in chunk
+ * storage(1 chunk)
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Unknown
+ * Dec 2nd, 2004
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+coll_irregular_complex_chunk_read(void)
+{
+ int mpi_rank;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC) ||
+ !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_MORE)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file dataset, or dataset more aren't supported with this "
+ "connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ coll_read_test();
+}
+
+/*-------------------------------------------------------------------------
+ * Function: coll_write_test
+ *
+ * Purpose: To test the collectively irregular hyperslab write in chunk
+ * storage
+ * Input: number of chunks on each dimension
+ * if number is equal to 0, contiguous storage
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Unknown
+ * Dec 2nd, 2004
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+coll_write_test(int chunk_factor)
+{
+
+ const char *filename;
+ hid_t facc_plist, dxfer_plist, dcrt_plist;
+ hid_t file, datasetc, dataseti; /* File and dataset identifiers */
+ hid_t mspaceid1, mspaceid, fspaceid, fspaceid1; /* Dataspace identifiers */
+
+ hsize_t mdim1[1]; /* Dimension size of the first dataset (in memory) */
+ hsize_t fsdim[2]; /* Dimension sizes of the dataset (on disk) */
+ hsize_t mdim[2]; /* Dimension sizes of the dataset in memory when we
+ * read selection from the dataset on the disk
+ */
+
+ hsize_t start[2]; /* Start of hyperslab */
+ hsize_t stride[2]; /* Stride of hyperslab */
+ hsize_t count[2]; /* Block count */
+ hsize_t block[2]; /* Block sizes */
+ hsize_t chunk_dims[2];
+
+ herr_t ret;
+ int i;
+ int fillvalue = 0; /* Fill value for the dataset */
+
+ int *matrix_out = NULL;
+ int *matrix_out1 = NULL; /* Buffer to read from the dataset */
+ int *vector = NULL;
+
+ int mpi_size, mpi_rank;
+
+ MPI_Comm comm = MPI_COMM_WORLD;
+ MPI_Info info = MPI_INFO_NULL;
+
+ /*set up MPI parameters */
+ MPI_Comm_size(comm, &mpi_size);
+ MPI_Comm_rank(comm, &mpi_rank);
+
+ /* Obtain file name */
+ filename = PARATESTFILE /* GetTestParameters() */;
+
+ /*
+ * Buffers' initialization.
+ */
+
+ mdim1[0] = (hsize_t)(MSPACE1_DIM * mpi_size);
+ mdim[0] = MSPACE_DIM1;
+ mdim[1] = (hsize_t)(MSPACE_DIM2 * mpi_size);
+ fsdim[0] = FSPACE_DIM1;
+ fsdim[1] = (hsize_t)(FSPACE_DIM2 * mpi_size);
+
+ vector = (int *)HDmalloc(sizeof(int) * (size_t)mdim1[0] * (size_t)mpi_size);
+ matrix_out = (int *)HDmalloc(sizeof(int) * (size_t)mdim[0] * (size_t)mdim[1] * (size_t)mpi_size);
+ matrix_out1 = (int *)HDmalloc(sizeof(int) * (size_t)mdim[0] * (size_t)mdim[1] * (size_t)mpi_size);
+
+ HDmemset(vector, 0, sizeof(int) * (size_t)mdim1[0] * (size_t)mpi_size);
+ vector[0] = vector[MSPACE1_DIM * mpi_size - 1] = -1;
+ for (i = 1; i < MSPACE1_DIM * mpi_size - 1; i++)
+ vector[i] = (int)i;
+
+ /* Grab file access property list */
+ facc_plist = create_faccess_plist(comm, info, facc_type);
+ VRFY((facc_plist >= 0), "");
+
+ /*
+ * Create a file.
+ */
+ file = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, facc_plist);
+ VRFY((file >= 0), "H5Fcreate succeeded");
+
+ /*
+ * Create property list for a dataset and set up fill values.
+ */
+ dcrt_plist = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY((dcrt_plist >= 0), "");
+
+ ret = H5Pset_fill_value(dcrt_plist, H5T_NATIVE_INT, &fillvalue);
+ VRFY((ret >= 0), "Fill value creation property list succeeded");
+
+ if (chunk_factor != 0) {
+ chunk_dims[0] = fsdim[0] / (hsize_t)chunk_factor;
+ chunk_dims[1] = fsdim[1] / (hsize_t)chunk_factor;
+ ret = H5Pset_chunk(dcrt_plist, 2, chunk_dims);
+ VRFY((ret >= 0), "chunk creation property list succeeded");
+ }
+
+ /*
+ *
+ * Create dataspace for the first dataset in the disk.
+ * dim1 = 9
+ * dim2 = 3600
+ *
+ *
+ */
+ fspaceid = H5Screate_simple(FSPACE_RANK, fsdim, NULL);
+ VRFY((fspaceid >= 0), "file dataspace created succeeded");
+
+ /*
+ * Create dataset in the file. Notice that creation
+ * property list dcrt_plist is used.
+ */
+ datasetc =
+ H5Dcreate2(file, "collect_write", H5T_NATIVE_INT, fspaceid, H5P_DEFAULT, dcrt_plist, H5P_DEFAULT);
+ VRFY((datasetc >= 0), "dataset created succeeded");
+
+ dataseti =
+ H5Dcreate2(file, "independ_write", H5T_NATIVE_INT, fspaceid, H5P_DEFAULT, dcrt_plist, H5P_DEFAULT);
+ VRFY((dataseti >= 0), "dataset created succeeded");
+
+ /* The First selection for FILE
+ *
+ * block (3,2)
+ * stride(4,3)
+ * count (1,768/mpi_size)
+ * start (0,1+768*3*mpi_rank/mpi_size)
+ *
+ */
+
+ start[0] = FHSTART0;
+ start[1] = (hsize_t)(FHSTART1 + mpi_rank * FHSTRIDE1 * FHCOUNT1);
+ stride[0] = FHSTRIDE0;
+ stride[1] = FHSTRIDE1;
+ count[0] = FHCOUNT0;
+ count[1] = FHCOUNT1;
+ block[0] = FHBLOCK0;
+ block[1] = FHBLOCK1;
+
+ ret = H5Sselect_hyperslab(fspaceid, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "hyperslab selection succeeded");
+
+ /* The Second selection for FILE
+ *
+ * block (3,768)
+ * stride (1,1)
+ * count (1,1)
+ * start (4,768*mpi_rank/mpi_size)
+ *
+ */
+
+ start[0] = SHSTART0;
+ start[1] = (hsize_t)(SHSTART1 + SHCOUNT1 * SHBLOCK1 * mpi_rank);
+ stride[0] = SHSTRIDE0;
+ stride[1] = SHSTRIDE1;
+ count[0] = SHCOUNT0;
+ count[1] = SHCOUNT1;
+ block[0] = SHBLOCK0;
+ block[1] = SHBLOCK1;
+
+ ret = H5Sselect_hyperslab(fspaceid, H5S_SELECT_OR, start, stride, count, block);
+ VRFY((ret >= 0), "hyperslab selection succeeded");
+
+ /*
+ * Create dataspace for the first dataset in the memory
+ * dim1 = 27000
+ *
+ */
+ mspaceid1 = H5Screate_simple(MSPACE1_RANK, mdim1, NULL);
+ VRFY((mspaceid1 >= 0), "memory dataspace created succeeded");
+
+ /*
+ * Memory space is 1-D, this is a good test to check
+ * whether a span-tree derived datatype needs to be built.
+ * block 1
+ * stride 1
+ * count 6912/mpi_size
+ * start 1
+ *
+ */
+ start[0] = MHSTART0;
+ stride[0] = MHSTRIDE0;
+ count[0] = MHCOUNT0;
+ block[0] = MHBLOCK0;
+
+ ret = H5Sselect_hyperslab(mspaceid1, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "hyperslab selection succeeded");
+
+ /* independent write */
+ ret = H5Dwrite(dataseti, H5T_NATIVE_INT, mspaceid1, fspaceid, H5P_DEFAULT, vector);
+ VRFY((ret >= 0), "dataset independent write succeed");
+
+ dxfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((dxfer_plist >= 0), "");
+
+ ret = H5Pset_dxpl_mpio(dxfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "MPIO data transfer property list succeed");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(dxfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ /* collective write */
+ ret = H5Dwrite(datasetc, H5T_NATIVE_INT, mspaceid1, fspaceid, dxfer_plist, vector);
+ VRFY((ret >= 0), "dataset collective write succeed");
+
+ ret = H5Sclose(mspaceid1);
+ VRFY((ret >= 0), "");
+
+ ret = H5Sclose(fspaceid);
+ VRFY((ret >= 0), "");
+
+ /*
+ * Close dataset.
+ */
+ ret = H5Dclose(datasetc);
+ VRFY((ret >= 0), "");
+
+ ret = H5Dclose(dataseti);
+ VRFY((ret >= 0), "");
+
+ /*
+ * Close the file.
+ */
+ ret = H5Fclose(file);
+ VRFY((ret >= 0), "");
+ /*
+ * Close property list
+ */
+
+ ret = H5Pclose(facc_plist);
+ VRFY((ret >= 0), "");
+ ret = H5Pclose(dxfer_plist);
+ VRFY((ret >= 0), "");
+ ret = H5Pclose(dcrt_plist);
+ VRFY((ret >= 0), "");
+
+ /*
+ * Open the file.
+ */
+
+ /***
+
+ For testing collective hyperslab selection write
+ In this test, we are using independent read to check
+ the correctedness of collective write compared with
+ independent write,
+
+ In order to thoroughly test this feature, we choose
+ a different selection set for reading the data out.
+
+
+ ***/
+
+ /* Obtain file access property list with MPI-IO driver */
+ facc_plist = create_faccess_plist(comm, info, facc_type);
+ VRFY((facc_plist >= 0), "");
+
+ file = H5Fopen(filename, H5F_ACC_RDONLY, facc_plist);
+ VRFY((file >= 0), "H5Fopen succeeded");
+
+ /*
+ * Open the dataset.
+ */
+ datasetc = H5Dopen2(file, "collect_write", H5P_DEFAULT);
+ VRFY((datasetc >= 0), "H5Dopen2 succeeded");
+
+ dataseti = H5Dopen2(file, "independ_write", H5P_DEFAULT);
+ VRFY((dataseti >= 0), "H5Dopen2 succeeded");
+
+ /*
+ * Get dataspace of the open dataset.
+ */
+ fspaceid = H5Dget_space(datasetc);
+ VRFY((fspaceid >= 0), "file dataspace obtained succeeded");
+
+ fspaceid1 = H5Dget_space(dataseti);
+ VRFY((fspaceid1 >= 0), "file dataspace obtained succeeded");
+
+ /* The First selection for FILE to read
+ *
+ * block (1,1)
+ * stride(1.1)
+ * count (3,768/mpi_size)
+ * start (1,2+768*mpi_rank/mpi_size)
+ *
+ */
+ start[0] = RFFHSTART0;
+ start[1] = (hsize_t)(RFFHSTART1 + mpi_rank * RFFHCOUNT1);
+ block[0] = RFFHBLOCK0;
+ block[1] = RFFHBLOCK1;
+ stride[0] = RFFHSTRIDE0;
+ stride[1] = RFFHSTRIDE1;
+ count[0] = RFFHCOUNT0;
+ count[1] = RFFHCOUNT1;
+
+ /* The first selection of the dataset generated by collective write */
+ ret = H5Sselect_hyperslab(fspaceid, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "hyperslab selection succeeded");
+
+ /* The first selection of the dataset generated by independent write */
+ ret = H5Sselect_hyperslab(fspaceid1, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "hyperslab selection succeeded");
+
+ /* The Second selection for FILE to read
+ *
+ * block (1,1)
+ * stride(1.1)
+ * count (3,1536/mpi_size)
+ * start (2,4+1536*mpi_rank/mpi_size)
+ *
+ */
+
+ start[0] = RFSHSTART0;
+ start[1] = (hsize_t)(RFSHSTART1 + RFSHCOUNT1 * mpi_rank);
+ block[0] = RFSHBLOCK0;
+ block[1] = RFSHBLOCK1;
+ stride[0] = RFSHSTRIDE0;
+ stride[1] = RFSHSTRIDE0;
+ count[0] = RFSHCOUNT0;
+ count[1] = RFSHCOUNT1;
+
+ /* The second selection of the dataset generated by collective write */
+ ret = H5Sselect_hyperslab(fspaceid, H5S_SELECT_OR, start, stride, count, block);
+ VRFY((ret >= 0), "hyperslab selection succeeded");
+
+ /* The second selection of the dataset generated by independent write */
+ ret = H5Sselect_hyperslab(fspaceid1, H5S_SELECT_OR, start, stride, count, block);
+ VRFY((ret >= 0), "hyperslab selection succeeded");
+
+ /*
+ * Create memory dataspace.
+ * rank = 2
+ * mdim1 = 9
+ * mdim2 = 3600
+ *
+ */
+ mspaceid = H5Screate_simple(MSPACE_RANK, mdim, NULL);
+
+ /*
+ * Select two hyperslabs in memory. Hyperslabs has the same
+ * size and shape as the selected hyperslabs for the file dataspace
+ * Only the starting point is different.
+ * The first selection
+ * block (1,1)
+ * stride(1.1)
+ * count (3,768/mpi_size)
+ * start (0,768*mpi_rank/mpi_size)
+ *
+ */
+
+ start[0] = RMFHSTART0;
+ start[1] = (hsize_t)(RMFHSTART1 + mpi_rank * RMFHCOUNT1);
+ block[0] = RMFHBLOCK0;
+ block[1] = RMFHBLOCK1;
+ stride[0] = RMFHSTRIDE0;
+ stride[1] = RMFHSTRIDE1;
+ count[0] = RMFHCOUNT0;
+ count[1] = RMFHCOUNT1;
+
+ ret = H5Sselect_hyperslab(mspaceid, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "hyperslab selection succeeded");
+
+ /*
+ * Select two hyperslabs in memory. Hyperslabs has the same
+ * size and shape as the selected hyperslabs for the file dataspace
+ * Only the starting point is different.
+ * The second selection
+ * block (1,1)
+ * stride(1,1)
+ * count (3,1536/mpi_size)
+ * start (1,2+1536*mpi_rank/mpi_size)
+ *
+ */
+ start[0] = RMSHSTART0;
+ start[1] = (hsize_t)(RMSHSTART1 + mpi_rank * RMSHCOUNT1);
+ block[0] = RMSHBLOCK0;
+ block[1] = RMSHBLOCK1;
+ stride[0] = RMSHSTRIDE0;
+ stride[1] = RMSHSTRIDE1;
+ count[0] = RMSHCOUNT0;
+ count[1] = RMSHCOUNT1;
+
+ ret = H5Sselect_hyperslab(mspaceid, H5S_SELECT_OR, start, stride, count, block);
+ VRFY((ret >= 0), "hyperslab selection succeeded");
+
+ /*
+ * Initialize data buffer.
+ */
+
+ HDmemset(matrix_out, 0, sizeof(int) * (size_t)MSPACE_DIM1 * (size_t)MSPACE_DIM2 * (size_t)mpi_size);
+ HDmemset(matrix_out1, 0, sizeof(int) * (size_t)MSPACE_DIM1 * (size_t)MSPACE_DIM2 * (size_t)mpi_size);
+ /*
+ * Read data back to the buffer matrix_out.
+ */
+
+ ret = H5Dread(datasetc, H5T_NATIVE_INT, mspaceid, fspaceid, H5P_DEFAULT, matrix_out);
+ VRFY((ret >= 0), "H5D independent read succeed");
+
+ ret = H5Dread(dataseti, H5T_NATIVE_INT, mspaceid, fspaceid, H5P_DEFAULT, matrix_out1);
+ VRFY((ret >= 0), "H5D independent read succeed");
+
+ ret = 0;
+
+ for (i = 0; i < MSPACE_DIM1 * MSPACE_DIM2 * mpi_size; i++) {
+ if (matrix_out[i] != matrix_out1[i])
+ ret = -1;
+ if (ret < 0)
+ break;
+ }
+
+ VRFY((ret >= 0), "H5D irregular collective write succeed");
+
+ /*
+ * Close memory file and memory dataspaces.
+ */
+ ret = H5Sclose(mspaceid);
+ VRFY((ret >= 0), "");
+ ret = H5Sclose(fspaceid);
+ VRFY((ret >= 0), "");
+
+ /*
+ * Close dataset.
+ */
+ ret = H5Dclose(dataseti);
+ VRFY((ret >= 0), "");
+
+ ret = H5Dclose(datasetc);
+ VRFY((ret >= 0), "");
+
+ /*
+ * Close property list
+ */
+
+ ret = H5Pclose(facc_plist);
+ VRFY((ret >= 0), "");
+
+ /*
+ * Close the file.
+ */
+ ret = H5Fclose(file);
+ VRFY((ret >= 0), "");
+
+ if (vector)
+ HDfree(vector);
+ if (matrix_out)
+ HDfree(matrix_out);
+ if (matrix_out1)
+ HDfree(matrix_out1);
+
+ return;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: coll_read_test
+ *
+ * Purpose: To test the collectively irregular hyperslab read in chunk
+ * storage
+ * Input: number of chunks on each dimension
+ * if number is equal to 0, contiguous storage
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Unknown
+ * Dec 2nd, 2004
+ *
+ *-------------------------------------------------------------------------
+ */
+static void
+coll_read_test(void)
+{
+
+ const char *filename;
+ hid_t facc_plist, dxfer_plist;
+ hid_t file, dataseti; /* File and dataset identifiers */
+ hid_t mspaceid, fspaceid1; /* Dataspace identifiers */
+
+ /* Dimension sizes of the dataset (on disk) */
+ hsize_t mdim[2]; /* Dimension sizes of the dataset in memory when we
+ * read selection from the dataset on the disk
+ */
+
+ hsize_t start[2]; /* Start of hyperslab */
+ hsize_t stride[2]; /* Stride of hyperslab */
+ hsize_t count[2]; /* Block count */
+ hsize_t block[2]; /* Block sizes */
+ herr_t ret;
+
+ int i;
+
+ int *matrix_out;
+ int *matrix_out1; /* Buffer to read from the dataset */
+
+ int mpi_size, mpi_rank;
+
+ MPI_Comm comm = MPI_COMM_WORLD;
+ MPI_Info info = MPI_INFO_NULL;
+
+ /*set up MPI parameters */
+ MPI_Comm_size(comm, &mpi_size);
+ MPI_Comm_rank(comm, &mpi_rank);
+
+ /* Obtain file name */
+ filename = PARATESTFILE /* GetTestParameters() */;
+
+ /* Initialize the buffer */
+
+ mdim[0] = MSPACE_DIM1;
+ mdim[1] = (hsize_t)(MSPACE_DIM2 * mpi_size);
+ matrix_out = (int *)HDmalloc(sizeof(int) * (size_t)MSPACE_DIM1 * (size_t)MSPACE_DIM2 * (size_t)mpi_size);
+ matrix_out1 = (int *)HDmalloc(sizeof(int) * (size_t)MSPACE_DIM1 * (size_t)MSPACE_DIM2 * (size_t)mpi_size);
+
+ /*** For testing collective hyperslab selection read ***/
+
+ /* Obtain file access property list */
+ facc_plist = create_faccess_plist(comm, info, facc_type);
+ VRFY((facc_plist >= 0), "");
+
+ /*
+ * Open the file.
+ */
+ file = H5Fopen(filename, H5F_ACC_RDONLY, facc_plist);
+ VRFY((file >= 0), "H5Fopen succeeded");
+
+ /*
+ * Open the dataset.
+ */
+ dataseti = H5Dopen2(file, "independ_write", H5P_DEFAULT);
+ VRFY((dataseti >= 0), "H5Dopen2 succeeded");
+
+ /*
+ * Get dataspace of the open dataset.
+ */
+ fspaceid1 = H5Dget_space(dataseti);
+ VRFY((fspaceid1 >= 0), "file dataspace obtained succeeded");
+
+ /* The First selection for FILE to read
+ *
+ * block (1,1)
+ * stride(1.1)
+ * count (3,768/mpi_size)
+ * start (1,2+768*mpi_rank/mpi_size)
+ *
+ */
+ start[0] = RFFHSTART0;
+ start[1] = (hsize_t)(RFFHSTART1 + mpi_rank * RFFHCOUNT1);
+ block[0] = RFFHBLOCK0;
+ block[1] = RFFHBLOCK1;
+ stride[0] = RFFHSTRIDE0;
+ stride[1] = RFFHSTRIDE1;
+ count[0] = RFFHCOUNT0;
+ count[1] = RFFHCOUNT1;
+
+ ret = H5Sselect_hyperslab(fspaceid1, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "hyperslab selection succeeded");
+
+ /* The Second selection for FILE to read
+ *
+ * block (1,1)
+ * stride(1.1)
+ * count (3,1536/mpi_size)
+ * start (2,4+1536*mpi_rank/mpi_size)
+ *
+ */
+ start[0] = RFSHSTART0;
+ start[1] = (hsize_t)(RFSHSTART1 + RFSHCOUNT1 * mpi_rank);
+ block[0] = RFSHBLOCK0;
+ block[1] = RFSHBLOCK1;
+ stride[0] = RFSHSTRIDE0;
+ stride[1] = RFSHSTRIDE0;
+ count[0] = RFSHCOUNT0;
+ count[1] = RFSHCOUNT1;
+
+ ret = H5Sselect_hyperslab(fspaceid1, H5S_SELECT_OR, start, stride, count, block);
+ VRFY((ret >= 0), "hyperslab selection succeeded");
+
+ /*
+ * Create memory dataspace.
+ */
+ mspaceid = H5Screate_simple(MSPACE_RANK, mdim, NULL);
+
+ /*
+ * Select two hyperslabs in memory. Hyperslabs has the same
+ * size and shape as the selected hyperslabs for the file dataspace.
+ * Only the starting point is different.
+ * The first selection
+ * block (1,1)
+ * stride(1.1)
+ * count (3,768/mpi_size)
+ * start (0,768*mpi_rank/mpi_size)
+ *
+ */
+
+ start[0] = RMFHSTART0;
+ start[1] = (hsize_t)(RMFHSTART1 + mpi_rank * RMFHCOUNT1);
+ block[0] = RMFHBLOCK0;
+ block[1] = RMFHBLOCK1;
+ stride[0] = RMFHSTRIDE0;
+ stride[1] = RMFHSTRIDE1;
+ count[0] = RMFHCOUNT0;
+ count[1] = RMFHCOUNT1;
+ ret = H5Sselect_hyperslab(mspaceid, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "hyperslab selection succeeded");
+
+ /*
+ * Select two hyperslabs in memory. Hyperslabs has the same
+ * size and shape as the selected hyperslabs for the file dataspace
+ * Only the starting point is different.
+ * The second selection
+ * block (1,1)
+ * stride(1,1)
+ * count (3,1536/mpi_size)
+ * start (1,2+1536*mpi_rank/mpi_size)
+ *
+ */
+ start[0] = RMSHSTART0;
+ start[1] = (hsize_t)(RMSHSTART1 + mpi_rank * RMSHCOUNT1);
+ block[0] = RMSHBLOCK0;
+ block[1] = RMSHBLOCK1;
+ stride[0] = RMSHSTRIDE0;
+ stride[1] = RMSHSTRIDE1;
+ count[0] = RMSHCOUNT0;
+ count[1] = RMSHCOUNT1;
+ ret = H5Sselect_hyperslab(mspaceid, H5S_SELECT_OR, start, stride, count, block);
+ VRFY((ret >= 0), "hyperslab selection succeeded");
+
+ /*
+ * Initialize data buffer.
+ */
+
+ HDmemset(matrix_out, 0, sizeof(int) * (size_t)MSPACE_DIM1 * (size_t)MSPACE_DIM2 * (size_t)mpi_size);
+ HDmemset(matrix_out1, 0, sizeof(int) * (size_t)MSPACE_DIM1 * (size_t)MSPACE_DIM2 * (size_t)mpi_size);
+
+ /*
+ * Read data back to the buffer matrix_out.
+ */
+
+ dxfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((dxfer_plist >= 0), "");
+
+ ret = H5Pset_dxpl_mpio(dxfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "MPIO data transfer property list succeed");
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO) {
+ ret = H5Pset_dxpl_mpio_collective_opt(dxfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((ret >= 0), "set independent IO collectively succeeded");
+ }
+
+ /* Collective read */
+ ret = H5Dread(dataseti, H5T_NATIVE_INT, mspaceid, fspaceid1, dxfer_plist, matrix_out);
+ VRFY((ret >= 0), "H5D collecive read succeed");
+
+ ret = H5Pclose(dxfer_plist);
+ VRFY((ret >= 0), "");
+
+ /* Independent read */
+ ret = H5Dread(dataseti, H5T_NATIVE_INT, mspaceid, fspaceid1, H5P_DEFAULT, matrix_out1);
+ VRFY((ret >= 0), "H5D independent read succeed");
+
+ ret = 0;
+ for (i = 0; i < MSPACE_DIM1 * MSPACE_DIM2 * mpi_size; i++) {
+ if (matrix_out[i] != matrix_out1[i])
+ ret = -1;
+ if (ret < 0)
+ break;
+ }
+ VRFY((ret >= 0), "H5D contiguous irregular collective read succeed");
+
+ /*
+ * Free read buffers.
+ */
+ HDfree(matrix_out);
+ HDfree(matrix_out1);
+
+ /*
+ * Close memory file and memory dataspaces.
+ */
+ ret = H5Sclose(mspaceid);
+ VRFY((ret >= 0), "");
+ ret = H5Sclose(fspaceid1);
+ VRFY((ret >= 0), "");
+
+ /*
+ * Close dataset.
+ */
+ ret = H5Dclose(dataseti);
+ VRFY((ret >= 0), "");
+
+ /*
+ * Close property list
+ */
+ ret = H5Pclose(facc_plist);
+ VRFY((ret >= 0), "");
+
+ /*
+ * Close the file.
+ */
+ ret = H5Fclose(file);
+ VRFY((ret >= 0), "");
+
+ return;
+}
+
+/****************************************************************
+**
+** lower_dim_size_comp_test__select_checker_board():
+**
+** Given a dataspace of tgt_rank, and dimensions:
+**
+** (mpi_size + 1), edge_size, ... , edge_size
+**
+** edge_size, and a checker_edge_size, select a checker
+** board selection of a sel_rank (sel_rank < tgt_rank)
+** dimensional slice through the dataspace parallel to the
+** sel_rank fastest changing indices, with origin (in the
+** higher indices) as indicated by the start array.
+**
+** Note that this function, is hard coded to presume a
+** maximum dataspace rank of 5.
+**
+** While this maximum is declared as a constant, increasing
+** it will require extensive coding in addition to changing
+** the value of the constant.
+**
+** JRM -- 11/11/09
+**
+****************************************************************/
+
+#define LDSCT_DS_RANK 5
+#if LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG
+#define LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK 0
+#endif
+
+#define LOWER_DIM_SIZE_COMP_TEST__SELECT_CHECKER_BOARD__DEBUG 0
+
+static void
+lower_dim_size_comp_test__select_checker_board(const int mpi_rank, const hid_t tgt_sid, const int tgt_rank,
+ const hsize_t dims[LDSCT_DS_RANK], const int checker_edge_size,
+ const int sel_rank, hsize_t sel_start[])
+{
+#if LOWER_DIM_SIZE_COMP_TEST__SELECT_CHECKER_BOARD__DEBUG
+ const char *fcnName = "lower_dim_size_comp_test__select_checker_board():";
+#endif
+ hbool_t first_selection = TRUE;
+ int i, j, k, l, m;
+ int ds_offset;
+ int sel_offset;
+ const int test_max_rank = LDSCT_DS_RANK; /* must update code if */
+ /* this changes */
+ hsize_t base_count;
+ hsize_t offset_count;
+ hsize_t start[LDSCT_DS_RANK];
+ hsize_t stride[LDSCT_DS_RANK];
+ hsize_t count[LDSCT_DS_RANK];
+ hsize_t block[LDSCT_DS_RANK];
+ herr_t ret; /* Generic return value */
+
+#if LOWER_DIM_SIZE_COMP_TEST__SELECT_CHECKER_BOARD__DEBUG
+ if (mpi_rank == LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK) {
+ HDfprintf(stdout, "%s:%d: dims/checker_edge_size = %d %d %d %d %d / %d\n", fcnName, mpi_rank,
+ (int)dims[0], (int)dims[1], (int)dims[2], (int)dims[3], (int)dims[4], checker_edge_size);
+ }
+#endif /* LOWER_DIM_SIZE_COMP_TEST__SELECT_CHECKER_BOARD__DEBUG */
+
+ HDassert(0 < checker_edge_size);
+ HDassert(0 < sel_rank);
+ HDassert(sel_rank <= tgt_rank);
+ HDassert(tgt_rank <= test_max_rank);
+ HDassert(test_max_rank <= LDSCT_DS_RANK);
+
+ sel_offset = test_max_rank - sel_rank;
+ HDassert(sel_offset >= 0);
+
+ ds_offset = test_max_rank - tgt_rank;
+ HDassert(ds_offset >= 0);
+ HDassert(ds_offset <= sel_offset);
+
+ HDassert((hsize_t)checker_edge_size <= dims[sel_offset]);
+ HDassert(dims[sel_offset] == 10);
+
+#if LOWER_DIM_SIZE_COMP_TEST__SELECT_CHECKER_BOARD__DEBUG
+ if (mpi_rank == LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK) {
+ HDfprintf(stdout, "%s:%d: sel_rank/sel_offset = %d/%d.\n", fcnName, mpi_rank, sel_rank, sel_offset);
+ HDfprintf(stdout, "%s:%d: tgt_rank/ds_offset = %d/%d.\n", fcnName, mpi_rank, tgt_rank, ds_offset);
+ }
+#endif /* LOWER_DIM_SIZE_COMP_TEST__SELECT_CHECKER_BOARD__DEBUG */
+
+ /* First, compute the base count (which assumes start == 0
+ * for the associated offset) and offset_count (which
+ * assumes start == checker_edge_size for the associated
+ * offset).
+ *
+ * Note that the following computation depends on the C99
+ * requirement that integer division discard any fraction
+ * (truncation towards zero) to function correctly. As we
+ * now require C99, this shouldn't be a problem, but noting
+ * it may save us some pain if we are ever obliged to support
+ * pre-C99 compilers again.
+ */
+
+ base_count = dims[sel_offset] / (hsize_t)(checker_edge_size * 2);
+
+ if ((dims[sel_rank] % (hsize_t)(checker_edge_size * 2)) > 0) {
+
+ base_count++;
+ }
+
+ offset_count =
+ (hsize_t)((dims[sel_offset] - (hsize_t)checker_edge_size) / ((hsize_t)(checker_edge_size * 2)));
+
+ if (((dims[sel_rank] - (hsize_t)checker_edge_size) % ((hsize_t)(checker_edge_size * 2))) > 0) {
+
+ offset_count++;
+ }
+
+#if LOWER_DIM_SIZE_COMP_TEST__SELECT_CHECKER_BOARD__DEBUG
+ if (mpi_rank == LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK) {
+ HDfprintf(stdout, "%s:%d: base_count/offset_count = %d/%d.\n", fcnName, mpi_rank, base_count,
+ offset_count);
+ }
+#endif /* LOWER_DIM_SIZE_COMP_TEST__SELECT_CHECKER_BOARD__DEBUG */
+
+ /* Now set up the stride and block arrays, and portions of the start
+ * and count arrays that will not be altered during the selection of
+ * the checker board.
+ */
+ i = 0;
+ while (i < ds_offset) {
+
+ /* these values should never be used */
+ start[i] = 0;
+ stride[i] = 0;
+ count[i] = 0;
+ block[i] = 0;
+
+ i++;
+ }
+
+ while (i < sel_offset) {
+
+ start[i] = sel_start[i];
+ stride[i] = 2 * dims[i];
+ count[i] = 1;
+ block[i] = 1;
+
+ i++;
+ }
+
+ while (i < test_max_rank) {
+
+ stride[i] = (hsize_t)(2 * checker_edge_size);
+ block[i] = (hsize_t)checker_edge_size;
+
+ i++;
+ }
+
+ i = 0;
+ do {
+ if (0 >= sel_offset) {
+
+ if (i == 0) {
+
+ start[0] = 0;
+ count[0] = base_count;
+ }
+ else {
+
+ start[0] = (hsize_t)checker_edge_size;
+ count[0] = offset_count;
+ }
+ }
+
+ j = 0;
+ do {
+ if (1 >= sel_offset) {
+
+ if (j == 0) {
+
+ start[1] = 0;
+ count[1] = base_count;
+ }
+ else {
+
+ start[1] = (hsize_t)checker_edge_size;
+ count[1] = offset_count;
+ }
+ }
+
+ k = 0;
+ do {
+ if (2 >= sel_offset) {
+
+ if (k == 0) {
+
+ start[2] = 0;
+ count[2] = base_count;
+ }
+ else {
+
+ start[2] = (hsize_t)checker_edge_size;
+ count[2] = offset_count;
+ }
+ }
+
+ l = 0;
+ do {
+ if (3 >= sel_offset) {
+
+ if (l == 0) {
+
+ start[3] = 0;
+ count[3] = base_count;
+ }
+ else {
+
+ start[3] = (hsize_t)checker_edge_size;
+ count[3] = offset_count;
+ }
+ }
+
+ m = 0;
+ do {
+ if (4 >= sel_offset) {
+
+ if (m == 0) {
+
+ start[4] = 0;
+ count[4] = base_count;
+ }
+ else {
+
+ start[4] = (hsize_t)checker_edge_size;
+ count[4] = offset_count;
+ }
+ }
+
+ if (((i + j + k + l + m) % 2) == 0) {
+
+#if LOWER_DIM_SIZE_COMP_TEST__SELECT_CHECKER_BOARD__DEBUG
+ if (mpi_rank == LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK) {
+
+ HDfprintf(stdout, "%s%d: *** first_selection = %d ***\n", fcnName, mpi_rank,
+ (int)first_selection);
+ HDfprintf(stdout, "%s:%d: i/j/k/l/m = %d/%d/%d/%d/%d\n", fcnName, mpi_rank, i,
+ j, k, l, m);
+ HDfprintf(stdout, "%s:%d: start = %d %d %d %d %d.\n", fcnName, mpi_rank,
+ (int)start[0], (int)start[1], (int)start[2], (int)start[3],
+ (int)start[4]);
+ HDfprintf(stdout, "%s:%d: stride = %d %d %d %d %d.\n", fcnName, mpi_rank,
+ (int)stride[0], (int)stride[1], (int)stride[2], (int)stride[3],
+ (int)stride[4]);
+ HDfprintf(stdout, "%s:%d: count = %d %d %d %d %d.\n", fcnName, mpi_rank,
+ (int)count[0], (int)count[1], (int)count[2], (int)count[3],
+ (int)count[4]);
+ HDfprintf(stdout, "%s:%d: block = %d %d %d %d %d.\n", fcnName, mpi_rank,
+ (int)block[0], (int)block[1], (int)block[2], (int)block[3],
+ (int)block[4]);
+ HDfprintf(stdout, "%s:%d: n-cube extent dims = %d.\n", fcnName, mpi_rank,
+ H5Sget_simple_extent_ndims(tgt_sid));
+ HDfprintf(stdout, "%s:%d: selection rank = %d.\n", fcnName, mpi_rank,
+ sel_rank);
+ }
+#endif
+
+ if (first_selection) {
+
+ first_selection = FALSE;
+
+ ret = H5Sselect_hyperslab(tgt_sid, H5S_SELECT_SET, &(start[ds_offset]),
+ &(stride[ds_offset]), &(count[ds_offset]),
+ &(block[ds_offset]));
+
+ VRFY((ret != FAIL), "H5Sselect_hyperslab(SET) succeeded");
+ }
+ else {
+
+ ret = H5Sselect_hyperslab(tgt_sid, H5S_SELECT_OR, &(start[ds_offset]),
+ &(stride[ds_offset]), &(count[ds_offset]),
+ &(block[ds_offset]));
+
+ VRFY((ret != FAIL), "H5Sselect_hyperslab(OR) succeeded");
+ }
+ }
+
+ m++;
+
+ } while ((m <= 1) && (4 >= sel_offset));
+
+ l++;
+
+ } while ((l <= 1) && (3 >= sel_offset));
+
+ k++;
+
+ } while ((k <= 1) && (2 >= sel_offset));
+
+ j++;
+
+ } while ((j <= 1) && (1 >= sel_offset));
+
+ i++;
+
+ } while ((i <= 1) && (0 >= sel_offset));
+
+#if LOWER_DIM_SIZE_COMP_TEST__SELECT_CHECKER_BOARD__DEBUG
+ if (mpi_rank == LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK) {
+ HDfprintf(stdout, "%s%d: H5Sget_select_npoints(tgt_sid) = %d.\n", fcnName, mpi_rank,
+ (int)H5Sget_select_npoints(tgt_sid));
+ }
+#endif /* LOWER_DIM_SIZE_COMP_TEST__SELECT_CHECKER_BOARD__DEBUG */
+
+ /* Clip the selection back to the dataspace proper. */
+
+ for (i = 0; i < test_max_rank; i++) {
+
+ start[i] = 0;
+ stride[i] = dims[i];
+ count[i] = 1;
+ block[i] = dims[i];
+ }
+
+ ret = H5Sselect_hyperslab(tgt_sid, H5S_SELECT_AND, start, stride, count, block);
+
+ VRFY((ret != FAIL), "H5Sselect_hyperslab(AND) succeeded");
+
+#if LOWER_DIM_SIZE_COMP_TEST__SELECT_CHECKER_BOARD__DEBUG
+ if (mpi_rank == LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK) {
+ HDfprintf(stdout, "%s%d: H5Sget_select_npoints(tgt_sid) = %d.\n", fcnName, mpi_rank,
+ (int)H5Sget_select_npoints(tgt_sid));
+ HDfprintf(stdout, "%s%d: done.\n", fcnName, mpi_rank);
+ }
+#endif /* LOWER_DIM_SIZE_COMP_TEST__SELECT_CHECKER_BOARD__DEBUG */
+
+ return;
+
+} /* lower_dim_size_comp_test__select_checker_board() */
+
+/****************************************************************
+**
+** lower_dim_size_comp_test__verify_data():
+**
+** Examine the supplied buffer to see if it contains the
+** expected data. Return TRUE if it does, and FALSE
+** otherwise.
+**
+** The supplied buffer is presumed to this process's slice
+** of the target data set. Each such slice will be an
+** n-cube of rank (rank -1) and the supplied edge_size with
+** origin (mpi_rank, 0, ... , 0) in the target data set.
+**
+** Further, the buffer is presumed to be the result of reading
+** or writing a checker board selection of an m (1 <= m <
+** rank) dimensional slice through this processes slice
+** of the target data set. Also, this slice must be parallel
+** to the fastest changing indices.
+**
+** It is further presumed that the buffer was zeroed before
+** the read/write, and that the full target data set (i.e.
+** the buffer/data set for all processes) was initialized
+** with the natural numbers listed in order from the origin
+** along the fastest changing axis.
+**
+** Thus for a 20x10x10 dataset, the value stored in location
+** (x, y, z) (assuming that z is the fastest changing index
+** and x the slowest) is assumed to be:
+**
+** (10 * 10 * x) + (10 * y) + z
+**
+** Further, supposing that this is process 10, this process's
+** slice of the dataset would be a 10 x 10 2-cube with origin
+** (10, 0, 0) in the data set, and would be initialize (prior
+** to the checkerboard selection) as follows:
+**
+** 1000, 1001, 1002, ... 1008, 1009
+** 1010, 1011, 1012, ... 1018, 1019
+** . . . . .
+** . . . . .
+** . . . . .
+** 1090, 1091, 1092, ... 1098, 1099
+**
+** In the case of a read from the processors slice of another
+** data set of different rank, the values expected will have
+** to be adjusted accordingly. This is done via the
+** first_expected_val parameter.
+**
+** Finally, the function presumes that the first element
+** of the buffer resides either at the origin of either
+** a selected or an unselected checker. (Translation:
+** if partial checkers appear in the buffer, they will
+** intersect the edges of the n-cube opposite the origin.)
+**
+****************************************************************/
+
+#define LOWER_DIM_SIZE_COMP_TEST__VERIFY_DATA__DEBUG 0
+
+static hbool_t
+lower_dim_size_comp_test__verify_data(uint32_t *buf_ptr,
+#if LOWER_DIM_SIZE_COMP_TEST__VERIFY_DATA__DEBUG
+ const int mpi_rank,
+#endif /* LOWER_DIM_SIZE_COMP_TEST__VERIFY_DATA__DEBUG */
+ const int rank, const int edge_size, const int checker_edge_size,
+ uint32_t first_expected_val, hbool_t buf_starts_in_checker)
+{
+#if LOWER_DIM_SIZE_COMP_TEST__VERIFY_DATA__DEBUG
+ const char *fcnName = "lower_dim_size_comp_test__verify_data():";
+#endif
+ hbool_t good_data = TRUE;
+ hbool_t in_checker;
+ hbool_t start_in_checker[5];
+ uint32_t expected_value;
+ uint32_t *val_ptr;
+ int i, j, k, l, m; /* to track position in n-cube */
+ int v, w, x, y, z; /* to track position in checker */
+ const int test_max_rank = 5; /* code changes needed if this is increased */
+
+ HDassert(buf_ptr != NULL);
+ HDassert(0 < rank);
+ HDassert(rank <= test_max_rank);
+ HDassert(edge_size >= 6);
+ HDassert(0 < checker_edge_size);
+ HDassert(checker_edge_size <= edge_size);
+ HDassert(test_max_rank <= LDSCT_DS_RANK);
+
+#if LOWER_DIM_SIZE_COMP_TEST__VERIFY_DATA__DEBUG
+ if (mpi_rank == LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK) {
+ HDfprintf(stdout, "%s mpi_rank = %d.\n", fcnName, mpi_rank);
+ HDfprintf(stdout, "%s rank = %d.\n", fcnName, rank);
+ HDfprintf(stdout, "%s edge_size = %d.\n", fcnName, edge_size);
+ HDfprintf(stdout, "%s checker_edge_size = %d.\n", fcnName, checker_edge_size);
+ HDfprintf(stdout, "%s first_expected_val = %d.\n", fcnName, (int)first_expected_val);
+ HDfprintf(stdout, "%s starts_in_checker = %d.\n", fcnName, (int)buf_starts_in_checker);
+ }
+#endif
+
+ val_ptr = buf_ptr;
+ expected_value = first_expected_val;
+
+ i = 0;
+ v = 0;
+ start_in_checker[0] = buf_starts_in_checker;
+ do {
+ if (v >= checker_edge_size) {
+
+ start_in_checker[0] = !start_in_checker[0];
+ v = 0;
+ }
+
+ j = 0;
+ w = 0;
+ start_in_checker[1] = start_in_checker[0];
+ do {
+ if (w >= checker_edge_size) {
+
+ start_in_checker[1] = !start_in_checker[1];
+ w = 0;
+ }
+
+ k = 0;
+ x = 0;
+ start_in_checker[2] = start_in_checker[1];
+ do {
+ if (x >= checker_edge_size) {
+
+ start_in_checker[2] = !start_in_checker[2];
+ x = 0;
+ }
+
+ l = 0;
+ y = 0;
+ start_in_checker[3] = start_in_checker[2];
+ do {
+ if (y >= checker_edge_size) {
+
+ start_in_checker[3] = !start_in_checker[3];
+ y = 0;
+ }
+
+ m = 0;
+ z = 0;
+#if LOWER_DIM_SIZE_COMP_TEST__VERIFY_DATA__DEBUG
+ if (mpi_rank == LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK) {
+ HDfprintf(stdout, "%d, %d, %d, %d, %d:", i, j, k, l, m);
+ }
+#endif
+ in_checker = start_in_checker[3];
+ do {
+#if LOWER_DIM_SIZE_COMP_TEST__VERIFY_DATA__DEBUG
+ if (mpi_rank == LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK) {
+ HDfprintf(stdout, " %d", (int)(*val_ptr));
+ }
+#endif
+ if (z >= checker_edge_size) {
+
+ in_checker = !in_checker;
+ z = 0;
+ }
+
+ if (in_checker) {
+
+ if (*val_ptr != expected_value) {
+
+ good_data = FALSE;
+ }
+
+ /* zero out buffer for re-use */
+ *val_ptr = 0;
+ }
+ else if (*val_ptr != 0) {
+
+ good_data = FALSE;
+
+ /* zero out buffer for re-use */
+ *val_ptr = 0;
+ }
+
+ val_ptr++;
+ expected_value++;
+ m++;
+ z++;
+
+ } while ((rank >= (test_max_rank - 4)) && (m < edge_size));
+#if LOWER_DIM_SIZE_COMP_TEST__VERIFY_DATA__DEBUG
+ if (mpi_rank == LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK) {
+ HDfprintf(stdout, "\n");
+ }
+#endif
+ l++;
+ y++;
+ } while ((rank >= (test_max_rank - 3)) && (l < edge_size));
+ k++;
+ x++;
+ } while ((rank >= (test_max_rank - 2)) && (k < edge_size));
+ j++;
+ w++;
+ } while ((rank >= (test_max_rank - 1)) && (j < edge_size));
+ i++;
+ v++;
+ } while ((rank >= test_max_rank) && (i < edge_size));
+
+ return (good_data);
+
+} /* lower_dim_size_comp_test__verify_data() */
+
+/*-------------------------------------------------------------------------
+ * Function: lower_dim_size_comp_test__run_test()
+ *
+ * Purpose: Verify that a bug in the computation of the size of the
+ * lower dimensions of a dataspace in H5S_obtain_datatype()
+ * has been corrected.
+ *
+ * Return: void
+ *
+ * Programmer: JRM -- 11/11/09
+ *
+ *-------------------------------------------------------------------------
+ */
+
+#define LDSCT_DS_RANK 5
+
+static void
+lower_dim_size_comp_test__run_test(const int chunk_edge_size, const hbool_t use_collective_io,
+ const hid_t dset_type)
+{
+#if LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG
+ const char *fcnName = "lower_dim_size_comp_test__run_test()";
+ int rank;
+ hsize_t dims[32];
+ hsize_t max_dims[32];
+#endif /* LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG */
+ const char *filename;
+ hbool_t data_ok = FALSE;
+ hbool_t mis_match = FALSE;
+ int i;
+ int start_index;
+ int stop_index;
+ int mrc;
+ int mpi_rank;
+ int mpi_size;
+ MPI_Comm mpi_comm = MPI_COMM_NULL;
+ MPI_Info mpi_info = MPI_INFO_NULL;
+ hid_t fid; /* HDF5 file ID */
+ hid_t acc_tpl; /* File access templates */
+ hid_t xfer_plist = H5P_DEFAULT;
+ size_t small_ds_size;
+ size_t small_ds_slice_size;
+ size_t large_ds_size;
+#if LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG
+ size_t large_ds_slice_size;
+#endif
+ uint32_t expected_value;
+ uint32_t *small_ds_buf_0 = NULL;
+ uint32_t *small_ds_buf_1 = NULL;
+ uint32_t *large_ds_buf_0 = NULL;
+ uint32_t *large_ds_buf_1 = NULL;
+ uint32_t *ptr_0;
+ uint32_t *ptr_1;
+ hsize_t small_chunk_dims[LDSCT_DS_RANK];
+ hsize_t large_chunk_dims[LDSCT_DS_RANK];
+ hsize_t small_dims[LDSCT_DS_RANK];
+ hsize_t large_dims[LDSCT_DS_RANK];
+ hsize_t start[LDSCT_DS_RANK];
+ hsize_t stride[LDSCT_DS_RANK];
+ hsize_t count[LDSCT_DS_RANK];
+ hsize_t block[LDSCT_DS_RANK];
+ hsize_t small_sel_start[LDSCT_DS_RANK];
+ hsize_t large_sel_start[LDSCT_DS_RANK];
+ hid_t full_mem_small_ds_sid;
+ hid_t full_file_small_ds_sid;
+ hid_t mem_small_ds_sid;
+ hid_t file_small_ds_sid;
+ hid_t full_mem_large_ds_sid;
+ hid_t full_file_large_ds_sid;
+ hid_t mem_large_ds_sid;
+ hid_t file_large_ds_sid;
+ hid_t small_ds_dcpl_id = H5P_DEFAULT;
+ hid_t large_ds_dcpl_id = H5P_DEFAULT;
+ hid_t small_dataset; /* Dataset ID */
+ hid_t large_dataset; /* Dataset ID */
+ htri_t check; /* Shape comparison return value */
+ herr_t ret; /* Generic return value */
+
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ HDassert(mpi_size >= 1);
+
+ mpi_comm = MPI_COMM_WORLD;
+ mpi_info = MPI_INFO_NULL;
+
+#if LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG
+ if (mpi_rank == LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK) {
+ HDfprintf(stdout, "%s:%d: chunk_edge_size = %d.\n", fcnName, mpi_rank, (int)chunk_edge_size);
+ HDfprintf(stdout, "%s:%d: use_collective_io = %d.\n", fcnName, mpi_rank, (int)use_collective_io);
+ }
+#endif /* LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG */
+
+ small_ds_size = (size_t)((mpi_size + 1) * 1 * 1 * 10 * 10);
+ small_ds_slice_size = (size_t)(1 * 1 * 10 * 10);
+ large_ds_size = (size_t)((mpi_size + 1) * 10 * 10 * 10 * 10);
+
+#if LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG
+ large_ds_slice_size = (size_t)(10 * 10 * 10 * 10);
+
+ if (mpi_rank == LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK) {
+ HDfprintf(stdout, "%s:%d: small ds size / slice size = %d / %d.\n", fcnName, mpi_rank,
+ (int)small_ds_size, (int)small_ds_slice_size);
+ HDfprintf(stdout, "%s:%d: large ds size / slice size = %d / %d.\n", fcnName, mpi_rank,
+ (int)large_ds_size, (int)large_ds_slice_size);
+ }
+#endif /* LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG */
+
+ /* Allocate buffers */
+ small_ds_buf_0 = (uint32_t *)HDmalloc(sizeof(uint32_t) * small_ds_size);
+ VRFY((small_ds_buf_0 != NULL), "malloc of small_ds_buf_0 succeeded");
+
+ small_ds_buf_1 = (uint32_t *)HDmalloc(sizeof(uint32_t) * small_ds_size);
+ VRFY((small_ds_buf_1 != NULL), "malloc of small_ds_buf_1 succeeded");
+
+ large_ds_buf_0 = (uint32_t *)HDmalloc(sizeof(uint32_t) * large_ds_size);
+ VRFY((large_ds_buf_0 != NULL), "malloc of large_ds_buf_0 succeeded");
+
+ large_ds_buf_1 = (uint32_t *)HDmalloc(sizeof(uint32_t) * large_ds_size);
+ VRFY((large_ds_buf_1 != NULL), "malloc of large_ds_buf_1 succeeded");
+
+ /* initialize the buffers */
+
+ ptr_0 = small_ds_buf_0;
+ ptr_1 = small_ds_buf_1;
+
+ for (i = 0; i < (int)small_ds_size; i++) {
+
+ *ptr_0 = (uint32_t)i;
+ *ptr_1 = 0;
+
+ ptr_0++;
+ ptr_1++;
+ }
+
+ ptr_0 = large_ds_buf_0;
+ ptr_1 = large_ds_buf_1;
+
+ for (i = 0; i < (int)large_ds_size; i++) {
+
+ *ptr_0 = (uint32_t)i;
+ *ptr_1 = 0;
+
+ ptr_0++;
+ ptr_1++;
+ }
+
+ /* get the file name */
+
+ filename = (const char *)PARATESTFILE /* GetTestParameters() */;
+ HDassert(filename != NULL);
+
+ /* ----------------------------------------
+ * CREATE AN HDF5 FILE WITH PARALLEL ACCESS
+ * ---------------------------------------*/
+ /* setup file access template */
+ acc_tpl = create_faccess_plist(mpi_comm, mpi_info, facc_type);
+ VRFY((acc_tpl >= 0), "create_faccess_plist() succeeded");
+
+ /* create the file collectively */
+ fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, acc_tpl);
+ VRFY((fid >= 0), "H5Fcreate succeeded");
+
+ MESG("File opened.");
+
+ /* Release file-access template */
+ ret = H5Pclose(acc_tpl);
+ VRFY((ret >= 0), "H5Pclose(acc_tpl) succeeded");
+
+ /* setup dims: */
+ small_dims[0] = (hsize_t)(mpi_size + 1);
+ small_dims[1] = 1;
+ small_dims[2] = 1;
+ small_dims[3] = 10;
+ small_dims[4] = 10;
+
+ large_dims[0] = (hsize_t)(mpi_size + 1);
+ large_dims[1] = 10;
+ large_dims[2] = 10;
+ large_dims[3] = 10;
+ large_dims[4] = 10;
+
+#if LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG
+ if (mpi_rank == LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK) {
+ HDfprintf(stdout, "%s:%d: small_dims[] = %d %d %d %d %d\n", fcnName, mpi_rank, (int)small_dims[0],
+ (int)small_dims[1], (int)small_dims[2], (int)small_dims[3], (int)small_dims[4]);
+ HDfprintf(stdout, "%s:%d: large_dims[] = %d %d %d %d %d\n", fcnName, mpi_rank, (int)large_dims[0],
+ (int)large_dims[1], (int)large_dims[2], (int)large_dims[3], (int)large_dims[4]);
+ }
+#endif
+
+ /* create dataspaces */
+
+ full_mem_small_ds_sid = H5Screate_simple(5, small_dims, NULL);
+ VRFY((full_mem_small_ds_sid != 0), "H5Screate_simple() full_mem_small_ds_sid succeeded");
+
+ full_file_small_ds_sid = H5Screate_simple(5, small_dims, NULL);
+ VRFY((full_file_small_ds_sid != 0), "H5Screate_simple() full_file_small_ds_sid succeeded");
+
+ mem_small_ds_sid = H5Screate_simple(5, small_dims, NULL);
+ VRFY((mem_small_ds_sid != 0), "H5Screate_simple() mem_small_ds_sid succeeded");
+
+ file_small_ds_sid = H5Screate_simple(5, small_dims, NULL);
+ VRFY((file_small_ds_sid != 0), "H5Screate_simple() file_small_ds_sid succeeded");
+
+ full_mem_large_ds_sid = H5Screate_simple(5, large_dims, NULL);
+ VRFY((full_mem_large_ds_sid != 0), "H5Screate_simple() full_mem_large_ds_sid succeeded");
+
+ full_file_large_ds_sid = H5Screate_simple(5, large_dims, NULL);
+ VRFY((full_file_large_ds_sid != 0), "H5Screate_simple() full_file_large_ds_sid succeeded");
+
+ mem_large_ds_sid = H5Screate_simple(5, large_dims, NULL);
+ VRFY((mem_large_ds_sid != 0), "H5Screate_simple() mem_large_ds_sid succeeded");
+
+ file_large_ds_sid = H5Screate_simple(5, large_dims, NULL);
+ VRFY((file_large_ds_sid != 0), "H5Screate_simple() file_large_ds_sid succeeded");
+
+ /* Select the entire extent of the full small ds dataspaces */
+ ret = H5Sselect_all(full_mem_small_ds_sid);
+ VRFY((ret != FAIL), "H5Sselect_all(full_mem_small_ds_sid) succeeded");
+
+ ret = H5Sselect_all(full_file_small_ds_sid);
+ VRFY((ret != FAIL), "H5Sselect_all(full_file_small_ds_sid) succeeded");
+
+ /* Select the entire extent of the full large ds dataspaces */
+ ret = H5Sselect_all(full_mem_large_ds_sid);
+ VRFY((ret != FAIL), "H5Sselect_all(full_mem_large_ds_sid) succeeded");
+
+ ret = H5Sselect_all(full_file_large_ds_sid);
+ VRFY((ret != FAIL), "H5Sselect_all(full_file_large_ds_sid) succeeded");
+
+ /* if chunk edge size is greater than zero, set up the small and
+ * large data set creation property lists to specify chunked
+ * datasets.
+ */
+ if (chunk_edge_size > 0) {
+
+ small_chunk_dims[0] = (hsize_t)(1);
+ small_chunk_dims[1] = small_chunk_dims[2] = (hsize_t)1;
+ small_chunk_dims[3] = small_chunk_dims[4] = (hsize_t)chunk_edge_size;
+
+#if LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG
+ if (mpi_rank == LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK) {
+ HDfprintf(stdout, "%s:%d: small chunk dims[] = %d %d %d %d %d\n", fcnName, mpi_rank,
+ (int)small_chunk_dims[0], (int)small_chunk_dims[1], (int)small_chunk_dims[2],
+ (int)small_chunk_dims[3], (int)small_chunk_dims[4]);
+ }
+#endif
+
+ small_ds_dcpl_id = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY((ret != FAIL), "H5Pcreate() small_ds_dcpl_id succeeded");
+
+ ret = H5Pset_layout(small_ds_dcpl_id, H5D_CHUNKED);
+ VRFY((ret != FAIL), "H5Pset_layout() small_ds_dcpl_id succeeded");
+
+ ret = H5Pset_chunk(small_ds_dcpl_id, 5, small_chunk_dims);
+ VRFY((ret != FAIL), "H5Pset_chunk() small_ds_dcpl_id succeeded");
+
+ large_chunk_dims[0] = (hsize_t)(1);
+ large_chunk_dims[1] = large_chunk_dims[2] = large_chunk_dims[3] = large_chunk_dims[4] =
+ (hsize_t)chunk_edge_size;
+
+#if LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG
+ if (mpi_rank == LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK) {
+ HDfprintf(stdout, "%s:%d: large chunk dims[] = %d %d %d %d %d\n", fcnName, mpi_rank,
+ (int)large_chunk_dims[0], (int)large_chunk_dims[1], (int)large_chunk_dims[2],
+ (int)large_chunk_dims[3], (int)large_chunk_dims[4]);
+ }
+#endif
+
+ large_ds_dcpl_id = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY((ret != FAIL), "H5Pcreate() large_ds_dcpl_id succeeded");
+
+ ret = H5Pset_layout(large_ds_dcpl_id, H5D_CHUNKED);
+ VRFY((ret != FAIL), "H5Pset_layout() large_ds_dcpl_id succeeded");
+
+ ret = H5Pset_chunk(large_ds_dcpl_id, 5, large_chunk_dims);
+ VRFY((ret != FAIL), "H5Pset_chunk() large_ds_dcpl_id succeeded");
+ }
+
+ /* create the small dataset */
+ small_dataset = H5Dcreate2(fid, "small_dataset", dset_type, file_small_ds_sid, H5P_DEFAULT,
+ small_ds_dcpl_id, H5P_DEFAULT);
+ VRFY((ret >= 0), "H5Dcreate2() small_dataset succeeded");
+
+ /* create the large dataset */
+ large_dataset = H5Dcreate2(fid, "large_dataset", dset_type, file_large_ds_sid, H5P_DEFAULT,
+ large_ds_dcpl_id, H5P_DEFAULT);
+ VRFY((ret >= 0), "H5Dcreate2() large_dataset succeeded");
+
+#if LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG
+ if (mpi_rank == LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK) {
+ HDfprintf(stdout, "%s:%d: small/large ds id = %d / %d.\n", fcnName, mpi_rank, (int)small_dataset,
+ (int)large_dataset);
+ }
+#endif /* LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG */
+
+ /* setup xfer property list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((xfer_plist >= 0), "H5Pcreate(H5P_DATASET_XFER) succeeded");
+
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pset_dxpl_mpio succeeded");
+
+ if (!use_collective_io) {
+
+ ret = H5Pset_dxpl_mpio_collective_opt(xfer_plist, H5FD_MPIO_INDIVIDUAL_IO);
+ VRFY((ret >= 0), "H5Pset_dxpl_mpio_collective_opt() succeeded");
+ }
+
+ /* setup selection to write initial data to the small data sets */
+ start[0] = (hsize_t)(mpi_rank + 1);
+ start[1] = start[2] = start[3] = start[4] = 0;
+
+ stride[0] = (hsize_t)(2 * (mpi_size + 1));
+ stride[1] = stride[2] = 2;
+ stride[3] = stride[4] = 2 * 10;
+
+ count[0] = count[1] = count[2] = count[3] = count[4] = 1;
+
+ block[0] = block[1] = block[2] = 1;
+ block[3] = block[4] = 10;
+
+#if LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG
+ if (mpi_rank == LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK) {
+ HDfprintf(stdout, "%s:%d: settings for small data set initialization.\n", fcnName, mpi_rank);
+ HDfprintf(stdout, "%s:%d: start[] = %d %d %d %d %d\n", fcnName, mpi_rank, (int)start[0],
+ (int)start[1], (int)start[2], (int)start[3], (int)start[4]);
+ HDfprintf(stdout, "%s:%d: stride[] = %d %d %d %d %d\n", fcnName, mpi_rank, (int)stride[0],
+ (int)stride[1], (int)stride[2], (int)stride[3], (int)stride[4]);
+ HDfprintf(stdout, "%s:%d: count[] = %d %d %d %d %d\n", fcnName, mpi_rank, (int)count[0],
+ (int)count[1], (int)count[2], (int)count[3], (int)count[4]);
+ HDfprintf(stdout, "%s:%d: block[] = %d %d %d %d %d\n", fcnName, mpi_rank, (int)block[0],
+ (int)block[1], (int)block[2], (int)block[3], (int)block[4]);
+ }
+#endif /* LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG */
+
+ /* setup selections for writing initial data to the small data set */
+ ret = H5Sselect_hyperslab(mem_small_ds_sid, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sselect_hyperslab(mem_small_ds_sid, set) succeeded");
+
+ ret = H5Sselect_hyperslab(file_small_ds_sid, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sselect_hyperslab(file_small_ds_sid, set) succeeded");
+
+ if (MAINPROCESS) { /* add an additional slice to the selections */
+
+ start[0] = 0;
+
+#if LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG
+ if (mpi_rank == LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK) {
+ HDfprintf(stdout, "%s:%d: added settings for main process.\n", fcnName, mpi_rank);
+ HDfprintf(stdout, "%s:%d: start[] = %d %d %d %d %d\n", fcnName, mpi_rank, (int)start[0],
+ (int)start[1], (int)start[2], (int)start[3], (int)start[4]);
+ HDfprintf(stdout, "%s:%d: stride[] = %d %d %d %d %d\n", fcnName, mpi_rank, (int)stride[0],
+ (int)stride[1], (int)stride[2], (int)stride[3], (int)stride[4]);
+ HDfprintf(stdout, "%s:%d: count[] = %d %d %d %d %d\n", fcnName, mpi_rank, (int)count[0],
+ (int)count[1], (int)count[2], (int)count[3], (int)count[4]);
+ HDfprintf(stdout, "%s:%d: block[] = %d %d %d %d %d\n", fcnName, mpi_rank, (int)block[0],
+ (int)block[1], (int)block[2], (int)block[3], (int)block[4]);
+ }
+#endif /* LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG */
+
+ ret = H5Sselect_hyperslab(mem_small_ds_sid, H5S_SELECT_OR, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sselect_hyperslab(mem_small_ds_sid, or) succeeded");
+
+ ret = H5Sselect_hyperslab(file_small_ds_sid, H5S_SELECT_OR, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sselect_hyperslab(file_small_ds_sid, or) succeeded");
+ }
+
+ check = H5Sselect_valid(mem_small_ds_sid);
+ VRFY((check == TRUE), "H5Sselect_valid(mem_small_ds_sid) returns TRUE");
+
+ check = H5Sselect_valid(file_small_ds_sid);
+ VRFY((check == TRUE), "H5Sselect_valid(file_small_ds_sid) returns TRUE");
+
+ /* write the initial value of the small data set to file */
+#if LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG
+ if (mpi_rank == LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK) {
+ HDfprintf(stdout, "%s:%d: writing init value of small ds to file.\n", fcnName, mpi_rank);
+ }
+#endif /* LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG */
+ ret = H5Dwrite(small_dataset, dset_type, mem_small_ds_sid, file_small_ds_sid, xfer_plist, small_ds_buf_0);
+ VRFY((ret >= 0), "H5Dwrite() small_dataset initial write succeeded");
+
+ /* sync with the other processes before reading data */
+ mrc = MPI_Barrier(MPI_COMM_WORLD);
+ VRFY((mrc == MPI_SUCCESS), "Sync after small dataset writes");
+
+ /* read the small data set back to verify that it contains the
+ * expected data. Note that each process reads in the entire
+ * data set and verifies it.
+ */
+ ret = H5Dread(small_dataset, H5T_NATIVE_UINT32, full_mem_small_ds_sid, full_file_small_ds_sid, xfer_plist,
+ small_ds_buf_1);
+ VRFY((ret >= 0), "H5Dread() small_dataset initial read succeeded");
+
+ /* sync with the other processes before checking data */
+ mrc = MPI_Barrier(MPI_COMM_WORLD);
+ VRFY((mrc == MPI_SUCCESS), "Sync after small dataset writes");
+
+ /* verify that the correct data was written to the small data set,
+ * and reset the buffer to zero in passing.
+ */
+ expected_value = 0;
+ mis_match = FALSE;
+ ptr_1 = small_ds_buf_1;
+
+ i = 0;
+ for (i = 0; i < (int)small_ds_size; i++) {
+
+ if (*ptr_1 != expected_value) {
+
+ mis_match = TRUE;
+ }
+
+ *ptr_1 = (uint32_t)0;
+
+ ptr_1++;
+ expected_value++;
+ }
+ VRFY((mis_match == FALSE), "small ds init data good.");
+
+ /* setup selections for writing initial data to the large data set */
+ start[0] = (hsize_t)(mpi_rank + 1);
+ start[1] = start[2] = start[3] = start[4] = (hsize_t)0;
+
+ stride[0] = (hsize_t)(2 * (mpi_size + 1));
+ stride[1] = stride[2] = stride[3] = stride[4] = (hsize_t)(2 * 10);
+
+ count[0] = count[1] = count[2] = count[3] = count[4] = (hsize_t)1;
+
+ block[0] = (hsize_t)1;
+ block[1] = block[2] = block[3] = block[4] = (hsize_t)10;
+
+#if LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG
+ if (mpi_rank == LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK) {
+ HDfprintf(stdout, "%s:%d: settings for large data set initialization.\n", fcnName, mpi_rank);
+ HDfprintf(stdout, "%s:%d: start[] = %d %d %d %d %d\n", fcnName, mpi_rank, (int)start[0],
+ (int)start[1], (int)start[2], (int)start[3], (int)start[4]);
+ HDfprintf(stdout, "%s:%d: stride[] = %d %d %d %d %d\n", fcnName, mpi_rank, (int)stride[0],
+ (int)stride[1], (int)stride[2], (int)stride[3], (int)stride[4]);
+ HDfprintf(stdout, "%s:%d: count[] = %d %d %d %d %d\n", fcnName, mpi_rank, (int)count[0],
+ (int)count[1], (int)count[2], (int)count[3], (int)count[4]);
+ HDfprintf(stdout, "%s:%d: block[] = %d %d %d %d %d\n", fcnName, mpi_rank, (int)block[0],
+ (int)block[1], (int)block[2], (int)block[3], (int)block[4]);
+ }
+#endif /* LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG */
+
+ ret = H5Sselect_hyperslab(mem_large_ds_sid, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sselect_hyperslab(mem_large_ds_sid, set) succeeded");
+
+ ret = H5Sselect_hyperslab(file_large_ds_sid, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sselect_hyperslab(file_large_ds_sid, set) succeeded");
+
+#if LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG
+ if (mpi_rank == LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK) {
+ HDfprintf(stdout, "%s%d: H5Sget_select_npoints(mem_large_ds_sid) = %d.\n", fcnName, mpi_rank,
+ (int)H5Sget_select_npoints(mem_large_ds_sid));
+ HDfprintf(stdout, "%s%d: H5Sget_select_npoints(file_large_ds_sid) = %d.\n", fcnName, mpi_rank,
+ (int)H5Sget_select_npoints(file_large_ds_sid));
+ }
+#endif /* LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG */
+
+ if (MAINPROCESS) { /* add an additional slice to the selections */
+
+ start[0] = (hsize_t)0;
+
+#if LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG
+ if (mpi_rank == LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK) {
+ HDfprintf(stdout, "%s:%d: added settings for main process.\n", fcnName, mpi_rank);
+ HDfprintf(stdout, "%s:%d: start[] = %d %d %d %d %d\n", fcnName, mpi_rank, (int)start[0],
+ (int)start[1], (int)start[2], (int)start[3], (int)start[4]);
+ HDfprintf(stdout, "%s:%d: stride[] = %d %d %d %d %d\n", fcnName, mpi_rank, (int)stride[0],
+ (int)stride[1], (int)stride[2], (int)stride[3], (int)stride[4]);
+ HDfprintf(stdout, "%s:%d: count[] = %d %d %d %d %d\n", fcnName, mpi_rank, (int)count[0],
+ (int)count[1], (int)count[2], (int)count[3], (int)count[4]);
+ HDfprintf(stdout, "%s:%d: block[] = %d %d %d %d %d\n", fcnName, mpi_rank, (int)block[0],
+ (int)block[1], (int)block[2], (int)block[3], (int)block[4]);
+ }
+#endif /* LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG */
+
+ ret = H5Sselect_hyperslab(mem_large_ds_sid, H5S_SELECT_OR, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sselect_hyperslab(mem_large_ds_sid, or) succeeded");
+
+ ret = H5Sselect_hyperslab(file_large_ds_sid, H5S_SELECT_OR, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sselect_hyperslab(file_large_ds_sid, or) succeeded");
+
+#if LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG
+ if (mpi_rank == LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK) {
+ HDfprintf(stdout, "%s%d: H5Sget_select_npoints(mem_large_ds_sid) = %d.\n", fcnName, mpi_rank,
+ (int)H5Sget_select_npoints(mem_large_ds_sid));
+ HDfprintf(stdout, "%s%d: H5Sget_select_npoints(file_large_ds_sid) = %d.\n", fcnName, mpi_rank,
+ (int)H5Sget_select_npoints(file_large_ds_sid));
+ }
+#endif /* LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG */
+ }
+
+ /* try clipping the selection back to the large dataspace proper */
+ start[0] = start[1] = start[2] = start[3] = start[4] = (hsize_t)0;
+
+ stride[0] = (hsize_t)(2 * (mpi_size + 1));
+ stride[1] = stride[2] = stride[3] = stride[4] = (hsize_t)(2 * 10);
+
+ count[0] = count[1] = count[2] = count[3] = count[4] = (hsize_t)1;
+
+ block[0] = (hsize_t)(mpi_size + 1);
+ block[1] = block[2] = block[3] = block[4] = (hsize_t)10;
+
+ ret = H5Sselect_hyperslab(mem_large_ds_sid, H5S_SELECT_AND, start, stride, count, block);
+ VRFY((ret != FAIL), "H5Sselect_hyperslab(mem_large_ds_sid, and) succeeded");
+
+ ret = H5Sselect_hyperslab(file_large_ds_sid, H5S_SELECT_AND, start, stride, count, block);
+ VRFY((ret != FAIL), "H5Sselect_hyperslab(file_large_ds_sid, and) succeeded");
+
+#if LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG
+ if (mpi_rank == LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK) {
+
+ rank = H5Sget_simple_extent_dims(mem_large_ds_sid, dims, max_dims);
+ HDfprintf(stdout, "%s:%d: mem_large_ds_sid dims[%d] = %d %d %d %d %d\n", fcnName, mpi_rank, rank,
+ (int)dims[0], (int)dims[1], (int)dims[2], (int)dims[3], (int)dims[4]);
+
+ rank = H5Sget_simple_extent_dims(file_large_ds_sid, dims, max_dims);
+ HDfprintf(stdout, "%s:%d: file_large_ds_sid dims[%d] = %d %d %d %d %d\n", fcnName, mpi_rank, rank,
+ (int)dims[0], (int)dims[1], (int)dims[2], (int)dims[3], (int)dims[4]);
+ }
+#endif /* LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG */
+
+ check = H5Sselect_valid(mem_large_ds_sid);
+ VRFY((check == TRUE), "H5Sselect_valid(mem_large_ds_sid) returns TRUE");
+
+ check = H5Sselect_valid(file_large_ds_sid);
+ VRFY((check == TRUE), "H5Sselect_valid(file_large_ds_sid) returns TRUE");
+
+ /* write the initial value of the large data set to file */
+#if LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG
+ if (mpi_rank == LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK) {
+ HDfprintf(stdout, "%s:%d: writing init value of large ds to file.\n", fcnName, mpi_rank);
+ HDfprintf(stdout, "%s:%d: large_dataset = %d.\n", fcnName, mpi_rank, (int)large_dataset);
+ HDfprintf(stdout, "%s:%d: mem_large_ds_sid = %d, file_large_ds_sid = %d.\n", fcnName, mpi_rank,
+ (int)mem_large_ds_sid, (int)file_large_ds_sid);
+ }
+#endif /* LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG */
+
+ ret = H5Dwrite(large_dataset, dset_type, mem_large_ds_sid, file_large_ds_sid, xfer_plist, large_ds_buf_0);
+
+ if (ret < 0)
+ H5Eprint2(H5E_DEFAULT, stderr);
+ VRFY((ret >= 0), "H5Dwrite() large_dataset initial write succeeded");
+
+ /* sync with the other processes before checking data */
+ mrc = MPI_Barrier(MPI_COMM_WORLD);
+ VRFY((mrc == MPI_SUCCESS), "Sync after large dataset writes");
+
+ /* read the large data set back to verify that it contains the
+ * expected data. Note that each process reads in the entire
+ * data set.
+ */
+ ret = H5Dread(large_dataset, H5T_NATIVE_UINT32, full_mem_large_ds_sid, full_file_large_ds_sid, xfer_plist,
+ large_ds_buf_1);
+ VRFY((ret >= 0), "H5Dread() large_dataset initial read succeeded");
+
+ /* verify that the correct data was written to the large data set.
+ * in passing, reset the buffer to zeros
+ */
+ expected_value = 0;
+ mis_match = FALSE;
+ ptr_1 = large_ds_buf_1;
+
+ i = 0;
+ for (i = 0; i < (int)large_ds_size; i++) {
+
+ if (*ptr_1 != expected_value) {
+
+ mis_match = TRUE;
+ }
+
+ *ptr_1 = (uint32_t)0;
+
+ ptr_1++;
+ expected_value++;
+ }
+ VRFY((mis_match == FALSE), "large ds init data good.");
+
+ /***********************************/
+ /***** INITIALIZATION COMPLETE *****/
+ /***********************************/
+
+ /* read a checkerboard selection of the process slice of the
+ * small on disk data set into the process slice of the large
+ * in memory data set, and verify the data read.
+ */
+
+ small_sel_start[0] = (hsize_t)(mpi_rank + 1);
+ small_sel_start[1] = small_sel_start[2] = small_sel_start[3] = small_sel_start[4] = 0;
+
+ lower_dim_size_comp_test__select_checker_board(mpi_rank, file_small_ds_sid,
+ /* tgt_rank = */ 5, small_dims,
+ /* checker_edge_size = */ 3,
+ /* sel_rank */ 2, small_sel_start);
+
+ expected_value =
+ (uint32_t)((small_sel_start[0] * small_dims[1] * small_dims[2] * small_dims[3] * small_dims[4]) +
+ (small_sel_start[1] * small_dims[2] * small_dims[3] * small_dims[4]) +
+ (small_sel_start[2] * small_dims[3] * small_dims[4]) +
+ (small_sel_start[3] * small_dims[4]) + (small_sel_start[4]));
+
+ large_sel_start[0] = (hsize_t)(mpi_rank + 1);
+ large_sel_start[1] = 5;
+ large_sel_start[2] = large_sel_start[3] = large_sel_start[4] = 0;
+
+ lower_dim_size_comp_test__select_checker_board(mpi_rank, mem_large_ds_sid,
+ /* tgt_rank = */ 5, large_dims,
+ /* checker_edge_size = */ 3,
+ /* sel_rank = */ 2, large_sel_start);
+
+ /* verify that H5Sselect_shape_same() reports the two
+ * selections as having the same shape.
+ */
+ check = H5Sselect_shape_same(mem_large_ds_sid, file_small_ds_sid);
+ VRFY((check == TRUE), "H5Sselect_shape_same passed (1)");
+
+ ret = H5Dread(small_dataset, H5T_NATIVE_UINT32, mem_large_ds_sid, file_small_ds_sid, xfer_plist,
+ large_ds_buf_1);
+
+ VRFY((ret >= 0), "H5Sread() slice from small ds succeeded.");
+
+#if LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG
+ if (mpi_rank == LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK) {
+ HDfprintf(stdout, "%s:%d: H5Dread() returns.\n", fcnName, mpi_rank);
+ }
+#endif /* LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG */
+
+ /* verify that expected data is retrieved */
+
+ data_ok = TRUE;
+
+ start_index = (int)((large_sel_start[0] * large_dims[1] * large_dims[2] * large_dims[3] * large_dims[4]) +
+ (large_sel_start[1] * large_dims[2] * large_dims[3] * large_dims[4]) +
+ (large_sel_start[2] * large_dims[3] * large_dims[4]) +
+ (large_sel_start[3] * large_dims[4]) + (large_sel_start[4]));
+
+ stop_index = start_index + (int)small_ds_slice_size;
+
+ HDassert(0 <= start_index);
+ HDassert(start_index < stop_index);
+ HDassert(stop_index <= (int)large_ds_size);
+
+ ptr_1 = large_ds_buf_1;
+
+ for (i = 0; i < start_index; i++) {
+
+ if (*ptr_1 != (uint32_t)0) {
+
+ data_ok = FALSE;
+ *ptr_1 = (uint32_t)0;
+ }
+
+ ptr_1++;
+ }
+
+ VRFY((data_ok == TRUE), "slice read from small ds data good(1).");
+
+ data_ok = lower_dim_size_comp_test__verify_data(ptr_1,
+#if LOWER_DIM_SIZE_COMP_TEST__VERIFY_DATA__DEBUG
+ mpi_rank,
+#endif /* LOWER_DIM_SIZE_COMP_TEST__VERIFY_DATA__DEBUG */
+ /* rank */ 2,
+ /* edge_size */ 10,
+ /* checker_edge_size */ 3, expected_value,
+ /* buf_starts_in_checker */ TRUE);
+
+ VRFY((data_ok == TRUE), "slice read from small ds data good(2).");
+
+ data_ok = TRUE;
+
+ ptr_1 += small_ds_slice_size;
+
+ for (i = stop_index; i < (int)large_ds_size; i++) {
+
+ if (*ptr_1 != (uint32_t)0) {
+
+ data_ok = FALSE;
+ *ptr_1 = (uint32_t)0;
+ }
+
+ ptr_1++;
+ }
+
+ VRFY((data_ok == TRUE), "slice read from small ds data good(3).");
+
+ /* read a checkerboard selection of a slice of the process slice of
+ * the large on disk data set into the process slice of the small
+ * in memory data set, and verify the data read.
+ */
+
+ small_sel_start[0] = (hsize_t)(mpi_rank + 1);
+ small_sel_start[1] = small_sel_start[2] = small_sel_start[3] = small_sel_start[4] = 0;
+
+ lower_dim_size_comp_test__select_checker_board(mpi_rank, mem_small_ds_sid,
+ /* tgt_rank = */ 5, small_dims,
+ /* checker_edge_size = */ 3,
+ /* sel_rank */ 2, small_sel_start);
+
+ large_sel_start[0] = (hsize_t)(mpi_rank + 1);
+ large_sel_start[1] = 5;
+ large_sel_start[2] = large_sel_start[3] = large_sel_start[4] = 0;
+
+ lower_dim_size_comp_test__select_checker_board(mpi_rank, file_large_ds_sid,
+ /* tgt_rank = */ 5, large_dims,
+ /* checker_edge_size = */ 3,
+ /* sel_rank = */ 2, large_sel_start);
+
+ /* verify that H5Sselect_shape_same() reports the two
+ * selections as having the same shape.
+ */
+ check = H5Sselect_shape_same(mem_small_ds_sid, file_large_ds_sid);
+ VRFY((check == TRUE), "H5Sselect_shape_same passed (2)");
+
+ ret = H5Dread(large_dataset, H5T_NATIVE_UINT32, mem_small_ds_sid, file_large_ds_sid, xfer_plist,
+ small_ds_buf_1);
+
+ VRFY((ret >= 0), "H5Sread() slice from large ds succeeded.");
+
+#if LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG
+ if (mpi_rank == LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK) {
+ HDfprintf(stdout, "%s:%d: H5Dread() returns.\n", fcnName, mpi_rank);
+ }
+#endif /* LOWER_DIM_SIZE_COMP_TEST__RUN_TEST__DEBUG */
+
+ /* verify that expected data is retrieved */
+
+ data_ok = TRUE;
+
+ expected_value =
+ (uint32_t)((large_sel_start[0] * large_dims[1] * large_dims[2] * large_dims[3] * large_dims[4]) +
+ (large_sel_start[1] * large_dims[2] * large_dims[3] * large_dims[4]) +
+ (large_sel_start[2] * large_dims[3] * large_dims[4]) +
+ (large_sel_start[3] * large_dims[4]) + (large_sel_start[4]));
+
+ start_index = (int)(mpi_rank + 1) * (int)small_ds_slice_size;
+
+ stop_index = start_index + (int)small_ds_slice_size;
+
+ HDassert(0 <= start_index);
+ HDassert(start_index < stop_index);
+ HDassert(stop_index <= (int)small_ds_size);
+
+ ptr_1 = small_ds_buf_1;
+
+ for (i = 0; i < start_index; i++) {
+
+ if (*ptr_1 != (uint32_t)0) {
+
+ data_ok = FALSE;
+ *ptr_1 = (uint32_t)0;
+ }
+
+ ptr_1++;
+ }
+
+ VRFY((data_ok == TRUE), "slice read from large ds data good(1).");
+
+ data_ok = lower_dim_size_comp_test__verify_data(ptr_1,
+#if LOWER_DIM_SIZE_COMP_TEST__VERIFY_DATA__DEBUG
+ mpi_rank,
+#endif /* LOWER_DIM_SIZE_COMP_TEST__VERIFY_DATA__DEBUG */
+ /* rank */ 2,
+ /* edge_size */ 10,
+ /* checker_edge_size */ 3, expected_value,
+ /* buf_starts_in_checker */ TRUE);
+
+ VRFY((data_ok == TRUE), "slice read from large ds data good(2).");
+
+ data_ok = TRUE;
+
+ ptr_1 += small_ds_slice_size;
+
+ for (i = stop_index; i < (int)small_ds_size; i++) {
+
+ if (*ptr_1 != (uint32_t)0) {
+
+#if LOWER_DIM_SIZE_COMP_TEST__VERIFY_DATA__DEBUG
+ if (mpi_rank == LOWER_DIM_SIZE_COMP_TEST_DEBUG_TARGET_RANK) {
+ HDfprintf(stdout, "%s:%d: unexpected value at index %d: %d.\n", fcnName, mpi_rank, (int)i,
+ (int)(*ptr_1));
+ }
+#endif /* LOWER_DIM_SIZE_COMP_TEST__VERIFY_DATA__DEBUG */
+
+ data_ok = FALSE;
+ *ptr_1 = (uint32_t)0;
+ }
+
+ ptr_1++;
+ }
+
+ VRFY((data_ok == TRUE), "slice read from large ds data good(3).");
+
+ /* Close dataspaces */
+ ret = H5Sclose(full_mem_small_ds_sid);
+ VRFY((ret != FAIL), "H5Sclose(full_mem_small_ds_sid) succeeded");
+
+ ret = H5Sclose(full_file_small_ds_sid);
+ VRFY((ret != FAIL), "H5Sclose(full_file_small_ds_sid) succeeded");
+
+ ret = H5Sclose(mem_small_ds_sid);
+ VRFY((ret != FAIL), "H5Sclose(mem_small_ds_sid) succeeded");
+
+ ret = H5Sclose(file_small_ds_sid);
+ VRFY((ret != FAIL), "H5Sclose(file_small_ds_sid) succeeded");
+
+ ret = H5Sclose(full_mem_large_ds_sid);
+ VRFY((ret != FAIL), "H5Sclose(full_mem_large_ds_sid) succeeded");
+
+ ret = H5Sclose(full_file_large_ds_sid);
+ VRFY((ret != FAIL), "H5Sclose(full_file_large_ds_sid) succeeded");
+
+ ret = H5Sclose(mem_large_ds_sid);
+ VRFY((ret != FAIL), "H5Sclose(mem_large_ds_sid) succeeded");
+
+ ret = H5Sclose(file_large_ds_sid);
+ VRFY((ret != FAIL), "H5Sclose(file_large_ds_sid) succeeded");
+
+ /* Close Datasets */
+ ret = H5Dclose(small_dataset);
+ VRFY((ret != FAIL), "H5Dclose(small_dataset) succeeded");
+
+ ret = H5Dclose(large_dataset);
+ VRFY((ret != FAIL), "H5Dclose(large_dataset) succeeded");
+
+ /* close the file collectively */
+ MESG("about to close file.");
+ ret = H5Fclose(fid);
+ VRFY((ret != FAIL), "file close succeeded");
+
+ /* Free memory buffers */
+ if (small_ds_buf_0 != NULL)
+ HDfree(small_ds_buf_0);
+ if (small_ds_buf_1 != NULL)
+ HDfree(small_ds_buf_1);
+
+ if (large_ds_buf_0 != NULL)
+ HDfree(large_ds_buf_0);
+ if (large_ds_buf_1 != NULL)
+ HDfree(large_ds_buf_1);
+
+ return;
+
+} /* lower_dim_size_comp_test__run_test() */
+
+/*-------------------------------------------------------------------------
+ * Function: lower_dim_size_comp_test()
+ *
+ * Purpose: Test to see if an error in the computation of the size
+ * of the lower dimensions in H5S_obtain_datatype() has
+ * been corrected.
+ *
+ * Return: void
+ *
+ * Programmer: JRM -- 11/11/09
+ *
+ *-------------------------------------------------------------------------
+ */
+
+void
+lower_dim_size_comp_test(void)
+{
+ /* const char *fcnName = "lower_dim_size_comp_test()"; */
+ int chunk_edge_size = 0;
+ int use_collective_io;
+ int mpi_rank;
+
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file or dataset aren't supported with this connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ HDcompile_assert(sizeof(uint32_t) == sizeof(unsigned));
+ for (use_collective_io = 0; use_collective_io <= 1; use_collective_io++) {
+ chunk_edge_size = 0;
+ lower_dim_size_comp_test__run_test(chunk_edge_size, (hbool_t)use_collective_io, H5T_NATIVE_UINT);
+
+ chunk_edge_size = 5;
+ lower_dim_size_comp_test__run_test(chunk_edge_size, (hbool_t)use_collective_io, H5T_NATIVE_UINT);
+ } /* end for */
+
+ return;
+} /* lower_dim_size_comp_test() */
+
+/*-------------------------------------------------------------------------
+ * Function: link_chunk_collective_io_test()
+ *
+ * Purpose: Test to verify that an error in MPI type management in
+ * H5D_link_chunk_collective_io() has been corrected.
+ * In this bug, we used to free MPI types regardless of
+ * whether they were basic or derived.
+ *
+ * This test is based on a bug report kindly provided by
+ * Rob Latham of the MPICH team and ANL.
+ *
+ * The basic thrust of the test is to cause a process
+ * to participate in a collective I/O in which it:
+ *
+ * 1) Reads or writes exactly one chunk,
+ *
+ * 2) Has no in memory buffer for any other chunk.
+ *
+ * The test differers from Rob Latham's bug report in
+ * that is runs with an arbitrary number of proceeses,
+ * and uses a 1 dimensional dataset.
+ *
+ * Return: void
+ *
+ * Programmer: JRM -- 12/16/09
+ *
+ *-------------------------------------------------------------------------
+ */
+
+#define LINK_CHUNK_COLLECTIVE_IO_TEST_CHUNK_SIZE 16
+
+void
+link_chunk_collective_io_test(void)
+{
+ /* const char *fcnName = "link_chunk_collective_io_test()"; */
+ const char *filename;
+ hbool_t mis_match = FALSE;
+ int i;
+ int mrc;
+ int mpi_rank;
+ int mpi_size;
+ MPI_Comm mpi_comm = MPI_COMM_WORLD;
+ MPI_Info mpi_info = MPI_INFO_NULL;
+ hsize_t count[1] = {1};
+ hsize_t stride[1] = {2 * LINK_CHUNK_COLLECTIVE_IO_TEST_CHUNK_SIZE};
+ hsize_t block[1] = {LINK_CHUNK_COLLECTIVE_IO_TEST_CHUNK_SIZE};
+ hsize_t start[1];
+ hsize_t dims[1];
+ hsize_t chunk_dims[1] = {LINK_CHUNK_COLLECTIVE_IO_TEST_CHUNK_SIZE};
+ herr_t ret; /* Generic return value */
+ hid_t file_id;
+ hid_t acc_tpl;
+ hid_t dset_id;
+ hid_t file_ds_sid;
+ hid_t write_mem_ds_sid;
+ hid_t read_mem_ds_sid;
+ hid_t ds_dcpl_id;
+ hid_t xfer_plist;
+ double diff;
+ double expected_value;
+ double local_data_written[LINK_CHUNK_COLLECTIVE_IO_TEST_CHUNK_SIZE];
+ double local_data_read[LINK_CHUNK_COLLECTIVE_IO_TEST_CHUNK_SIZE];
+
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* Make sure the connector supports the API functions being tested */
+ if (!(vol_cap_flags_g & H5VL_CAP_FLAG_FILE_BASIC) || !(vol_cap_flags_g & H5VL_CAP_FLAG_DATASET_BASIC)) {
+ if (MAINPROCESS) {
+ puts("SKIPPED");
+ printf(" API functions for basic file or dataset aren't supported with this connector\n");
+ fflush(stdout);
+ }
+
+ return;
+ }
+
+ HDassert(mpi_size > 0);
+
+ /* get the file name */
+ filename = (const char *)PARATESTFILE /* GetTestParameters() */;
+ HDassert(filename != NULL);
+
+ /* setup file access template */
+ acc_tpl = create_faccess_plist(mpi_comm, mpi_info, facc_type);
+ VRFY((acc_tpl >= 0), "create_faccess_plist() succeeded");
+
+ /* create the file collectively */
+ file_id = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, acc_tpl);
+ VRFY((file_id >= 0), "H5Fcreate succeeded");
+
+ MESG("File opened.");
+
+ /* Release file-access template */
+ ret = H5Pclose(acc_tpl);
+ VRFY((ret >= 0), "H5Pclose(acc_tpl) succeeded");
+
+ /* setup dims */
+ dims[0] = ((hsize_t)mpi_size) * ((hsize_t)(LINK_CHUNK_COLLECTIVE_IO_TEST_CHUNK_SIZE));
+
+ /* setup mem and file dataspaces */
+ write_mem_ds_sid = H5Screate_simple(1, chunk_dims, NULL);
+ VRFY((write_mem_ds_sid != 0), "H5Screate_simple() write_mem_ds_sid succeeded");
+
+ read_mem_ds_sid = H5Screate_simple(1, chunk_dims, NULL);
+ VRFY((read_mem_ds_sid != 0), "H5Screate_simple() read_mem_ds_sid succeeded");
+
+ file_ds_sid = H5Screate_simple(1, dims, NULL);
+ VRFY((file_ds_sid != 0), "H5Screate_simple() file_ds_sid succeeded");
+
+ /* setup data set creation property list */
+ ds_dcpl_id = H5Pcreate(H5P_DATASET_CREATE);
+ VRFY((ds_dcpl_id != FAIL), "H5Pcreate() ds_dcpl_id succeeded");
+
+ ret = H5Pset_layout(ds_dcpl_id, H5D_CHUNKED);
+ VRFY((ret != FAIL), "H5Pset_layout() ds_dcpl_id succeeded");
+
+ ret = H5Pset_chunk(ds_dcpl_id, 1, chunk_dims);
+ VRFY((ret != FAIL), "H5Pset_chunk() small_ds_dcpl_id succeeded");
+
+ /* create the data set */
+ dset_id =
+ H5Dcreate2(file_id, "dataset", H5T_NATIVE_DOUBLE, file_ds_sid, H5P_DEFAULT, ds_dcpl_id, H5P_DEFAULT);
+ VRFY((dset_id >= 0), "H5Dcreate2() dataset succeeded");
+
+ /* close the dataset creation property list */
+ ret = H5Pclose(ds_dcpl_id);
+ VRFY((ret >= 0), "H5Pclose(ds_dcpl_id) succeeded");
+
+ /* setup local data */
+ expected_value = (double)(LINK_CHUNK_COLLECTIVE_IO_TEST_CHUNK_SIZE) * (double)(mpi_rank);
+ for (i = 0; i < LINK_CHUNK_COLLECTIVE_IO_TEST_CHUNK_SIZE; i++) {
+
+ local_data_written[i] = expected_value;
+ local_data_read[i] = 0.0;
+ expected_value += 1.0;
+ }
+
+ /* select the file and mem spaces */
+ start[0] = (hsize_t)(mpi_rank * LINK_CHUNK_COLLECTIVE_IO_TEST_CHUNK_SIZE);
+ ret = H5Sselect_hyperslab(file_ds_sid, H5S_SELECT_SET, start, stride, count, block);
+ VRFY((ret >= 0), "H5Sselect_hyperslab(file_ds_sid, set) succeeded");
+
+ ret = H5Sselect_all(write_mem_ds_sid);
+ VRFY((ret != FAIL), "H5Sselect_all(mem_ds_sid) succeeded");
+
+ /* Note that we use NO SELECTION on the read memory dataspace */
+
+ /* setup xfer property list */
+ xfer_plist = H5Pcreate(H5P_DATASET_XFER);
+ VRFY((xfer_plist >= 0), "H5Pcreate(H5P_DATASET_XFER) succeeded");
+
+ ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE);
+ VRFY((ret >= 0), "H5Pset_dxpl_mpio succeeded");
+
+ /* write the data set */
+ ret = H5Dwrite(dset_id, H5T_NATIVE_DOUBLE, write_mem_ds_sid, file_ds_sid, xfer_plist, local_data_written);
+
+ VRFY((ret >= 0), "H5Dwrite() dataset initial write succeeded");
+
+ /* sync with the other processes before checking data */
+ mrc = MPI_Barrier(MPI_COMM_WORLD);
+ VRFY((mrc == MPI_SUCCESS), "Sync after dataset write");
+
+ /* read this processes slice of the dataset back in */
+ ret = H5Dread(dset_id, H5T_NATIVE_DOUBLE, read_mem_ds_sid, file_ds_sid, xfer_plist, local_data_read);
+ VRFY((ret >= 0), "H5Dread() dataset read succeeded");
+
+ /* close the xfer property list */
+ ret = H5Pclose(xfer_plist);
+ VRFY((ret >= 0), "H5Pclose(xfer_plist) succeeded");
+
+ /* verify the data */
+ mis_match = FALSE;
+ for (i = 0; i < LINK_CHUNK_COLLECTIVE_IO_TEST_CHUNK_SIZE; i++) {
+
+ diff = local_data_written[i] - local_data_read[i];
+ diff = fabs(diff);
+
+ if (diff >= 0.001) {
+
+ mis_match = TRUE;
+ }
+ }
+ VRFY((mis_match == FALSE), "dataset data good.");
+
+ /* Close dataspaces */
+ ret = H5Sclose(write_mem_ds_sid);
+ VRFY((ret != FAIL), "H5Sclose(write_mem_ds_sid) succeeded");
+
+ ret = H5Sclose(read_mem_ds_sid);
+ VRFY((ret != FAIL), "H5Sclose(read_mem_ds_sid) succeeded");
+
+ ret = H5Sclose(file_ds_sid);
+ VRFY((ret != FAIL), "H5Sclose(file_ds_sid) succeeded");
+
+ /* Close Dataset */
+ ret = H5Dclose(dset_id);
+ VRFY((ret != FAIL), "H5Dclose(dset_id) succeeded");
+
+ /* close the file collectively */
+ ret = H5Fclose(file_id);
+ VRFY((ret != FAIL), "file close succeeded");
+
+ return;
+
+} /* link_chunk_collective_io_test() */
diff --git a/testpar/API/testphdf5.c b/testpar/API/testphdf5.c
new file mode 100644
index 0000000..ec5dae2
--- /dev/null
+++ b/testpar/API/testphdf5.c
@@ -0,0 +1,1007 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * Main driver of the Parallel HDF5 tests
+ */
+
+#include "hdf5.h"
+#include "testphdf5.h"
+
+#ifndef PATH_MAX
+#define PATH_MAX 512
+#endif /* !PATH_MAX */
+
+/* global variables */
+int dim0;
+int dim1;
+int chunkdim0;
+int chunkdim1;
+int nerrors = 0; /* errors count */
+int ndatasets = 300; /* number of datasets to create*/
+int ngroups = 512; /* number of groups to create in root
+ * group. */
+int facc_type = FACC_MPIO; /*Test file access type */
+int dxfer_coll_type = DXFER_COLLECTIVE_IO;
+
+H5E_auto2_t old_func; /* previous error handler */
+void *old_client_data; /* previous error handler arg.*/
+
+/* other option flags */
+
+/* FILENAME and filenames must have the same number of names.
+ * Use PARATESTFILE in general and use a separated filename only if the file
+ * created in one test is accessed by a different test.
+ * filenames[0] is reserved as the file name for PARATESTFILE.
+ */
+#define NFILENAME 2
+/* #define PARATESTFILE filenames[0] */
+const char *FILENAME[NFILENAME] = {"ParaTest.h5", NULL};
+char filenames[NFILENAME][PATH_MAX];
+hid_t fapl; /* file access property list */
+
+#ifdef USE_PAUSE
+/* pause the process for a moment to allow debugger to attach if desired. */
+/* Will pause more if greenlight file is not persent but will eventually */
+/* continue. */
+#include <sys/types.h>
+#include <sys/stat.h>
+
+void
+pause_proc(void)
+{
+
+ int pid;
+ h5_stat_t statbuf;
+ char greenlight[] = "go";
+ int maxloop = 10;
+ int loops = 0;
+ int time_int = 10;
+
+ /* mpi variables */
+ int mpi_size, mpi_rank;
+ int mpi_namelen;
+ char mpi_name[MPI_MAX_PROCESSOR_NAME];
+
+ pid = getpid();
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+ MPI_Get_processor_name(mpi_name, &mpi_namelen);
+
+ if (MAINPROCESS)
+ while ((HDstat(greenlight, &statbuf) == -1) && loops < maxloop) {
+ if (!loops++) {
+ HDprintf("Proc %d (%*s, %d): to debug, attach %d\n", mpi_rank, mpi_namelen, mpi_name, pid,
+ pid);
+ }
+ HDprintf("waiting(%ds) for file %s ...\n", time_int, greenlight);
+ HDfflush(stdout);
+ HDsleep(time_int);
+ }
+ MPI_Barrier(MPI_COMM_WORLD);
+}
+
+/* Use the Profile feature of MPI to call the pause_proc() */
+int
+MPI_Init(int *argc, char ***argv)
+{
+ int ret_code;
+ ret_code = PMPI_Init(argc, argv);
+ pause_proc();
+ return (ret_code);
+}
+#endif /* USE_PAUSE */
+
+/*
+ * Show command usage
+ */
+static void
+usage(void)
+{
+ HDprintf(" [-r] [-w] [-m<n_datasets>] [-n<n_groups>] "
+ "[-o] [-f <prefix>] [-d <dim0> <dim1>]\n");
+ HDprintf("\t-m<n_datasets>"
+ "\tset number of datasets for the multiple dataset test\n");
+ HDprintf("\t-n<n_groups>"
+ "\tset number of groups for the multiple group test\n");
+#if 0
+ HDprintf("\t-f <prefix>\tfilename prefix\n");
+#endif
+ HDprintf("\t-2\t\tuse Split-file together with MPIO\n");
+ HDprintf("\t-d <factor0> <factor1>\tdataset dimensions factors. Defaults (%d,%d)\n", ROW_FACTOR,
+ COL_FACTOR);
+ HDprintf("\t-c <dim0> <dim1>\tdataset chunk dimensions. Defaults (dim0/10,dim1/10)\n");
+ HDprintf("\n");
+}
+
+/*
+ * parse the command line options
+ */
+static int
+parse_options(int argc, char **argv)
+{
+ int mpi_size, mpi_rank; /* mpi variables */
+
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ /* setup default chunk-size. Make sure sizes are > 0 */
+
+ chunkdim0 = (dim0 + 9) / 10;
+ chunkdim1 = (dim1 + 9) / 10;
+
+ while (--argc) {
+ if (**(++argv) != '-') {
+ break;
+ }
+ else {
+ switch (*(*argv + 1)) {
+ case 'm':
+ ndatasets = atoi((*argv + 1) + 1);
+ if (ndatasets < 0) {
+ nerrors++;
+ return (1);
+ }
+ break;
+ case 'n':
+ ngroups = atoi((*argv + 1) + 1);
+ if (ngroups < 0) {
+ nerrors++;
+ return (1);
+ }
+ break;
+#if 0
+ case 'f': if (--argc < 1) {
+ nerrors++;
+ return(1);
+ }
+ if (**(++argv) == '-') {
+ nerrors++;
+ return(1);
+ }
+ paraprefix = *argv;
+ break;
+#endif
+ case 'i': /* Collective MPI-IO access with independent IO */
+ dxfer_coll_type = DXFER_INDEPENDENT_IO;
+ break;
+ case '2': /* Use the split-file driver with MPIO access */
+ /* Can use $HDF5_METAPREFIX to define the */
+ /* meta-file-prefix. */
+ facc_type = FACC_MPIO | FACC_SPLIT;
+ break;
+ case 'd': /* dimensizes */
+ if (--argc < 2) {
+ nerrors++;
+ return (1);
+ }
+ dim0 = atoi(*(++argv)) * mpi_size;
+ argc--;
+ dim1 = atoi(*(++argv)) * mpi_size;
+ /* set default chunkdim sizes too */
+ chunkdim0 = (dim0 + 9) / 10;
+ chunkdim1 = (dim1 + 9) / 10;
+ break;
+ case 'c': /* chunk dimensions */
+ if (--argc < 2) {
+ nerrors++;
+ return (1);
+ }
+ chunkdim0 = atoi(*(++argv));
+ argc--;
+ chunkdim1 = atoi(*(++argv));
+ break;
+ case 'h': /* print help message--return with nerrors set */
+ return (1);
+ default:
+ HDprintf("Illegal option(%s)\n", *argv);
+ nerrors++;
+ return (1);
+ }
+ }
+ } /*while*/
+
+ /* check validity of dimension and chunk sizes */
+ if (dim0 <= 0 || dim1 <= 0) {
+ HDprintf("Illegal dim sizes (%d, %d)\n", dim0, dim1);
+ nerrors++;
+ return (1);
+ }
+ if (chunkdim0 <= 0 || chunkdim1 <= 0) {
+ HDprintf("Illegal chunkdim sizes (%d, %d)\n", chunkdim0, chunkdim1);
+ nerrors++;
+ return (1);
+ }
+
+ /* Make sure datasets can be divided into equal portions by the processes */
+ if ((dim0 % mpi_size) || (dim1 % mpi_size)) {
+ if (MAINPROCESS)
+ HDprintf("dim0(%d) and dim1(%d) must be multiples of processes(%d)\n", dim0, dim1, mpi_size);
+ nerrors++;
+ return (1);
+ }
+
+ /* compose the test filenames */
+ {
+ int i, n;
+
+ n = sizeof(FILENAME) / sizeof(FILENAME[0]) - 1; /* exclude the NULL */
+
+ for (i = 0; i < n; i++)
+ strncpy(filenames[i], FILENAME[i], PATH_MAX);
+#if 0 /* no support for VFDs right now */
+ if (h5_fixname(FILENAME[i], fapl, filenames[i], PATH_MAX) == NULL) {
+ HDprintf("h5_fixname failed\n");
+ nerrors++;
+ return (1);
+ }
+#endif
+ if (MAINPROCESS) {
+ HDprintf("Test filenames are:\n");
+ for (i = 0; i < n; i++)
+ HDprintf(" %s\n", filenames[i]);
+ }
+ }
+
+ return (0);
+}
+
+/*
+ * Create the appropriate File access property list
+ */
+hid_t
+create_faccess_plist(MPI_Comm comm, MPI_Info info, int l_facc_type)
+{
+ hid_t ret_pl = -1;
+ herr_t ret; /* generic return value */
+ int mpi_rank; /* mpi variables */
+
+ /* need the rank for error checking macros */
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ ret_pl = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY((ret_pl >= 0), "H5P_FILE_ACCESS");
+
+ if (l_facc_type == FACC_DEFAULT)
+ return (ret_pl);
+
+ if (l_facc_type == FACC_MPIO) {
+ /* set Parallel access with communicator */
+ ret = H5Pset_fapl_mpio(ret_pl, comm, info);
+ VRFY((ret >= 0), "");
+ ret = H5Pset_all_coll_metadata_ops(ret_pl, TRUE);
+ VRFY((ret >= 0), "");
+ ret = H5Pset_coll_metadata_write(ret_pl, TRUE);
+ VRFY((ret >= 0), "");
+ return (ret_pl);
+ }
+
+ if (l_facc_type == (FACC_MPIO | FACC_SPLIT)) {
+ hid_t mpio_pl;
+
+ mpio_pl = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY((mpio_pl >= 0), "");
+ /* set Parallel access with communicator */
+ ret = H5Pset_fapl_mpio(mpio_pl, comm, info);
+ VRFY((ret >= 0), "");
+
+ /* setup file access template */
+ ret_pl = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY((ret_pl >= 0), "");
+ /* set Parallel access with communicator */
+ ret = H5Pset_fapl_split(ret_pl, ".meta", mpio_pl, ".raw", mpio_pl);
+ VRFY((ret >= 0), "H5Pset_fapl_split succeeded");
+ H5Pclose(mpio_pl);
+ return (ret_pl);
+ }
+
+ /* unknown file access types */
+ return (ret_pl);
+}
+
+int
+main(int argc, char **argv)
+{
+ int mpi_size, mpi_rank; /* mpi variables */
+ herr_t ret;
+
+#if 0
+ H5Ptest_param_t ndsets_params, ngroups_params;
+ H5Ptest_param_t collngroups_params;
+ H5Ptest_param_t io_mode_confusion_params;
+ H5Ptest_param_t rr_obj_flush_confusion_params;
+#endif
+
+#ifndef H5_HAVE_WIN32_API
+ /* Un-buffer the stdout and stderr */
+ HDsetbuf(stderr, NULL);
+ HDsetbuf(stdout, NULL);
+#endif
+
+ MPI_Init(&argc, &argv);
+ MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
+ MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
+
+ dim0 = ROW_FACTOR * mpi_size;
+ dim1 = COL_FACTOR * mpi_size;
+
+ if (MAINPROCESS) {
+ HDprintf("===================================\n");
+ HDprintf("PHDF5 TESTS START\n");
+ HDprintf("===================================\n");
+ }
+
+ /* Attempt to turn off atexit post processing so that in case errors
+ * happen during the test and the process is aborted, it will not get
+ * hang in the atexit post processing in which it may try to make MPI
+ * calls. By then, MPI calls may not work.
+ */
+ if (H5dont_atexit() < 0) {
+ HDprintf("Failed to turn off atexit processing. Continue.\n");
+ };
+ H5open();
+ /* h5_show_hostname(); */
+
+#if 0
+ HDmemset(filenames, 0, sizeof(filenames));
+ for (int i = 0; i < NFILENAME; i++) {
+ if (NULL == (filenames[i] = HDmalloc(PATH_MAX))) {
+ HDprintf("couldn't allocate filename array\n");
+ MPI_Abort(MPI_COMM_WORLD, -1);
+ }
+ }
+#endif
+
+ /* Set up file access property list with parallel I/O access */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ VRFY((fapl >= 0), "H5Pcreate succeeded");
+
+ vol_cap_flags_g = H5VL_CAP_FLAG_NONE;
+
+ /* Get the capability flag of the VOL connector being used */
+ ret = H5Pget_vol_cap_flags(fapl, &vol_cap_flags_g);
+ VRFY((ret >= 0), "H5Pget_vol_cap_flags succeeded");
+
+ /* Initialize testing framework */
+ /* TestInit(argv[0], usage, parse_options); */
+
+ if (parse_options(argc, argv)) {
+ usage();
+ return 1;
+ }
+
+ /* Tests are generally arranged from least to most complexity... */
+#if 0
+ AddTest("mpiodup", test_fapl_mpio_dup, NULL,
+ "fapl_mpio duplicate", NULL);
+#endif
+
+ if (MAINPROCESS) {
+ printf("fapl_mpio duplicate\n");
+ fflush(stdout);
+ }
+ test_fapl_mpio_dup();
+
+#if 0
+ AddTest("split", test_split_comm_access, NULL,
+ "dataset using split communicators", PARATESTFILE);
+ AddTest("props", test_file_properties, NULL,
+ "Coll Metadata file property settings", PARATESTFILE);
+#endif
+
+ if (MAINPROCESS) {
+ printf("dataset using split communicators\n");
+ fflush(stdout);
+ }
+ test_split_comm_access();
+
+ if (MAINPROCESS) {
+ printf("Coll Metadata file property settings\n");
+ fflush(stdout);
+ }
+ test_file_properties();
+
+#if 0
+ AddTest("idsetw", dataset_writeInd, NULL,
+ "dataset independent write", PARATESTFILE);
+ AddTest("idsetr", dataset_readInd, NULL,
+ "dataset independent read", PARATESTFILE);
+#endif
+
+ if (MAINPROCESS) {
+ printf("dataset independent write\n");
+ fflush(stdout);
+ }
+ dataset_writeInd();
+ if (MAINPROCESS) {
+ printf("dataset independent read\n");
+ fflush(stdout);
+ }
+ dataset_readInd();
+
+#if 0
+ AddTest("cdsetw", dataset_writeAll, NULL,
+ "dataset collective write", PARATESTFILE);
+ AddTest("cdsetr", dataset_readAll, NULL,
+ "dataset collective read", PARATESTFILE);
+#endif
+
+ if (MAINPROCESS) {
+ printf("dataset collective write\n");
+ fflush(stdout);
+ }
+ dataset_writeAll();
+ if (MAINPROCESS) {
+ printf("dataset collective read\n");
+ fflush(stdout);
+ }
+ dataset_readAll();
+
+#if 0
+ AddTest("eidsetw", extend_writeInd, NULL,
+ "extendible dataset independent write", PARATESTFILE);
+ AddTest("eidsetr", extend_readInd, NULL,
+ "extendible dataset independent read", PARATESTFILE);
+ AddTest("ecdsetw", extend_writeAll, NULL,
+ "extendible dataset collective write", PARATESTFILE);
+ AddTest("ecdsetr", extend_readAll, NULL,
+ "extendible dataset collective read", PARATESTFILE);
+ AddTest("eidsetw2", extend_writeInd2, NULL,
+ "extendible dataset independent write #2", PARATESTFILE);
+ AddTest("selnone", none_selection_chunk, NULL,
+ "chunked dataset with none-selection", PARATESTFILE);
+ AddTest("calloc", test_chunk_alloc, NULL,
+ "parallel extend Chunked allocation on serial file", PARATESTFILE);
+ AddTest("fltread", test_filter_read, NULL,
+ "parallel read of dataset written serially with filters", PARATESTFILE);
+#endif
+
+ if (MAINPROCESS) {
+ printf("extendible dataset independent write\n");
+ fflush(stdout);
+ }
+ extend_writeInd();
+ if (MAINPROCESS) {
+ printf("extendible dataset independent read\n");
+ fflush(stdout);
+ }
+ extend_readInd();
+ if (MAINPROCESS) {
+ printf("extendible dataset collective write\n");
+ fflush(stdout);
+ }
+ extend_writeAll();
+ if (MAINPROCESS) {
+ printf("extendible dataset collective read\n");
+ fflush(stdout);
+ }
+ extend_readAll();
+ if (MAINPROCESS) {
+ printf("extendible dataset independent write #2\n");
+ fflush(stdout);
+ }
+ extend_writeInd2();
+ if (MAINPROCESS) {
+ printf("chunked dataset with none-selection\n");
+ fflush(stdout);
+ }
+ none_selection_chunk();
+ if (MAINPROCESS) {
+ printf("parallel extend Chunked allocation on serial file\n");
+ fflush(stdout);
+ }
+ test_chunk_alloc();
+ if (MAINPROCESS) {
+ printf("parallel read of dataset written serially with filters\n");
+ fflush(stdout);
+ }
+ test_filter_read();
+
+#ifdef H5_HAVE_FILTER_DEFLATE
+#if 0
+ AddTest("cmpdsetr", compress_readAll, NULL,
+ "compressed dataset collective read", PARATESTFILE);
+#endif
+
+ if (MAINPROCESS) {
+ printf("compressed dataset collective read\n");
+ fflush(stdout);
+ }
+ compress_readAll();
+#endif /* H5_HAVE_FILTER_DEFLATE */
+
+#if 0
+ AddTest("zerodsetr", zero_dim_dset, NULL,
+ "zero dim dset", PARATESTFILE);
+#endif
+
+ if (MAINPROCESS) {
+ printf("zero dim dset\n");
+ fflush(stdout);
+ }
+ zero_dim_dset();
+
+#if 0
+ ndsets_params.name = PARATESTFILE;
+ ndsets_params.count = ndatasets;
+ AddTest("ndsetw", multiple_dset_write, NULL,
+ "multiple datasets write", &ndsets_params);
+#endif
+
+ if (MAINPROCESS) {
+ printf("multiple datasets write\n");
+ fflush(stdout);
+ }
+ multiple_dset_write();
+
+#if 0
+ ngroups_params.name = PARATESTFILE;
+ ngroups_params.count = ngroups;
+ AddTest("ngrpw", multiple_group_write, NULL,
+ "multiple groups write", &ngroups_params);
+ AddTest("ngrpr", multiple_group_read, NULL,
+ "multiple groups read", &ngroups_params);
+#endif
+
+ if (MAINPROCESS) {
+ printf("multiple groups write\n");
+ fflush(stdout);
+ }
+ multiple_group_write();
+ if (MAINPROCESS) {
+ printf("multiple groups read\n");
+ fflush(stdout);
+ }
+ multiple_group_read();
+
+#if 0
+ AddTest("compact", compact_dataset, NULL,
+ "compact dataset test", PARATESTFILE);
+#endif
+
+ if (MAINPROCESS) {
+ printf("compact dataset test\n");
+ fflush(stdout);
+ }
+ compact_dataset();
+
+#if 0
+ collngroups_params.name = PARATESTFILE;
+ collngroups_params.count = ngroups;
+ /* combined cngrpw and ingrpr tests because ingrpr reads file created by cngrpw. */
+ AddTest("cngrpw-ingrpr", collective_group_write_independent_group_read, NULL,
+ "collective grp/dset write - independent grp/dset read",
+ &collngroups_params);
+#ifndef H5_HAVE_WIN32_API
+ AddTest("bigdset", big_dataset, NULL,
+ "big dataset test", PARATESTFILE);
+#else
+ HDprintf("big dataset test will be skipped on Windows (JIRA HDDFV-8064)\n");
+#endif
+#endif
+
+ if (MAINPROCESS) {
+ printf("collective grp/dset write - independent grp/dset read\n");
+ fflush(stdout);
+ }
+ collective_group_write_independent_group_read();
+ if (MAINPROCESS) {
+ printf("big dataset test\n");
+ fflush(stdout);
+ }
+ big_dataset();
+
+#if 0
+ AddTest("fill", dataset_fillvalue, NULL,
+ "dataset fill value", PARATESTFILE);
+#endif
+
+ if (MAINPROCESS) {
+ printf("dataset fill value\n");
+ fflush(stdout);
+ }
+ dataset_fillvalue();
+
+#if 0
+ AddTest("cchunk1",
+ coll_chunk1,NULL, "simple collective chunk io",PARATESTFILE);
+ AddTest("cchunk2",
+ coll_chunk2,NULL, "noncontiguous collective chunk io",PARATESTFILE);
+ AddTest("cchunk3",
+ coll_chunk3,NULL, "multi-chunk collective chunk io",PARATESTFILE);
+ AddTest("cchunk4",
+ coll_chunk4,NULL, "collective chunk io with partial non-selection ",PARATESTFILE);
+#endif
+
+ if (MAINPROCESS) {
+ printf("simple collective chunk io\n");
+ fflush(stdout);
+ }
+ coll_chunk1();
+ if (MAINPROCESS) {
+ printf("noncontiguous collective chunk io\n");
+ fflush(stdout);
+ }
+ coll_chunk2();
+ if (MAINPROCESS) {
+ printf("multi-chunk collective chunk io\n");
+ fflush(stdout);
+ }
+ coll_chunk3();
+ if (MAINPROCESS) {
+ printf("collective chunk io with partial non-selection\n");
+ fflush(stdout);
+ }
+ coll_chunk4();
+
+ if ((mpi_size < 3) && MAINPROCESS) {
+ HDprintf("Collective chunk IO optimization APIs ");
+ HDprintf("needs at least 3 processes to participate\n");
+ HDprintf("Collective chunk IO API tests will be skipped \n");
+ }
+
+#if 0
+ AddTest((mpi_size <3)? "-cchunk5":"cchunk5" ,
+ coll_chunk5,NULL,
+ "linked chunk collective IO without optimization",PARATESTFILE);
+ AddTest((mpi_size < 3)? "-cchunk6" : "cchunk6",
+ coll_chunk6,NULL,
+ "multi-chunk collective IO with direct request",PARATESTFILE);
+ AddTest((mpi_size < 3)? "-cchunk7" : "cchunk7",
+ coll_chunk7,NULL,
+ "linked chunk collective IO with optimization",PARATESTFILE);
+ AddTest((mpi_size < 3)? "-cchunk8" : "cchunk8",
+ coll_chunk8,NULL,
+ "linked chunk collective IO transferring to multi-chunk",PARATESTFILE);
+ AddTest((mpi_size < 3)? "-cchunk9" : "cchunk9",
+ coll_chunk9,NULL,
+ "multiple chunk collective IO with optimization",PARATESTFILE);
+ AddTest((mpi_size < 3)? "-cchunk10" : "cchunk10",
+ coll_chunk10,NULL,
+ "multiple chunk collective IO transferring to independent IO",PARATESTFILE);
+#endif
+
+ if (mpi_size >= 3) {
+ if (MAINPROCESS) {
+ printf("linked chunk collective IO without optimization\n");
+ fflush(stdout);
+ }
+ coll_chunk5();
+ if (MAINPROCESS) {
+ printf("multi-chunk collective IO with direct request\n");
+ fflush(stdout);
+ }
+ coll_chunk6();
+ if (MAINPROCESS) {
+ printf("linked chunk collective IO with optimization\n");
+ fflush(stdout);
+ }
+ coll_chunk7();
+ if (MAINPROCESS) {
+ printf("linked chunk collective IO transferring to multi-chunk\n");
+ fflush(stdout);
+ }
+ coll_chunk8();
+ if (MAINPROCESS) {
+ printf("multiple chunk collective IO with optimization\n");
+ fflush(stdout);
+ }
+ coll_chunk9();
+ if (MAINPROCESS) {
+ printf("multiple chunk collective IO transferring to independent IO\n");
+ fflush(stdout);
+ }
+ coll_chunk10();
+ }
+
+#if 0
+ /* irregular collective IO tests*/
+ AddTest("ccontw",
+ coll_irregular_cont_write,NULL,
+ "collective irregular contiguous write",PARATESTFILE);
+ AddTest("ccontr",
+ coll_irregular_cont_read,NULL,
+ "collective irregular contiguous read",PARATESTFILE);
+ AddTest("cschunkw",
+ coll_irregular_simple_chunk_write,NULL,
+ "collective irregular simple chunk write",PARATESTFILE);
+ AddTest("cschunkr",
+ coll_irregular_simple_chunk_read,NULL,
+ "collective irregular simple chunk read",PARATESTFILE);
+ AddTest("ccchunkw",
+ coll_irregular_complex_chunk_write,NULL,
+ "collective irregular complex chunk write",PARATESTFILE);
+ AddTest("ccchunkr",
+ coll_irregular_complex_chunk_read,NULL,
+ "collective irregular complex chunk read",PARATESTFILE);
+#endif
+
+ if (MAINPROCESS) {
+ printf("collective irregular contiguous write\n");
+ fflush(stdout);
+ }
+ coll_irregular_cont_write();
+ if (MAINPROCESS) {
+ printf("collective irregular contiguous read\n");
+ fflush(stdout);
+ }
+ coll_irregular_cont_read();
+ if (MAINPROCESS) {
+ printf("collective irregular simple chunk write\n");
+ fflush(stdout);
+ }
+ coll_irregular_simple_chunk_write();
+ if (MAINPROCESS) {
+ printf("collective irregular simple chunk read\n");
+ fflush(stdout);
+ }
+ coll_irregular_simple_chunk_read();
+ if (MAINPROCESS) {
+ printf("collective irregular complex chunk write\n");
+ fflush(stdout);
+ }
+ coll_irregular_complex_chunk_write();
+ if (MAINPROCESS) {
+ printf("collective irregular complex chunk read\n");
+ fflush(stdout);
+ }
+ coll_irregular_complex_chunk_read();
+
+#if 0
+ AddTest("null", null_dataset, NULL,
+ "null dataset test", PARATESTFILE);
+#endif
+
+ if (MAINPROCESS) {
+ printf("null dataset test\n");
+ fflush(stdout);
+ }
+ null_dataset();
+
+#if 0
+ io_mode_confusion_params.name = PARATESTFILE;
+ io_mode_confusion_params.count = 0; /* value not used */
+
+ AddTest("I/Omodeconf", io_mode_confusion, NULL,
+ "I/O mode confusion test",
+ &io_mode_confusion_params);
+#endif
+
+ if (MAINPROCESS) {
+ printf("I/O mode confusion test\n");
+ fflush(stdout);
+ }
+ io_mode_confusion();
+
+ if ((mpi_size < 3) && MAINPROCESS) {
+ HDprintf("rr_obj_hdr_flush_confusion test needs at least 3 processes.\n");
+ HDprintf("rr_obj_hdr_flush_confusion test will be skipped \n");
+ }
+
+ if (mpi_size > 2) {
+#if 0
+ rr_obj_flush_confusion_params.name = PARATESTFILE;
+ rr_obj_flush_confusion_params.count = 0; /* value not used */
+ AddTest("rrobjflushconf", rr_obj_hdr_flush_confusion, NULL,
+ "round robin object header flush confusion test",
+ &rr_obj_flush_confusion_params);
+#endif
+
+ if (MAINPROCESS) {
+ printf("round robin object header flush confusion test\n");
+ fflush(stdout);
+ }
+ rr_obj_hdr_flush_confusion();
+ }
+
+#if 0
+ AddTest("alnbg1",
+ chunk_align_bug_1, NULL,
+ "Chunk allocation with alignment bug.",
+ PARATESTFILE);
+
+ AddTest("tldsc",
+ lower_dim_size_comp_test, NULL,
+ "test lower dim size comp in span tree to mpi derived type",
+ PARATESTFILE);
+
+ AddTest("lccio",
+ link_chunk_collective_io_test, NULL,
+ "test mpi derived type management",
+ PARATESTFILE);
+
+ AddTest("actualio", actual_io_mode_tests, NULL,
+ "test actual io mode proprerty",
+ PARATESTFILE);
+
+ AddTest("nocolcause", no_collective_cause_tests, NULL,
+ "test cause for broken collective io",
+ PARATESTFILE);
+
+ AddTest("edpl", test_plist_ed, NULL,
+ "encode/decode Property Lists", NULL);
+#endif
+
+ if (MAINPROCESS) {
+ printf("Chunk allocation with alignment bug\n");
+ fflush(stdout);
+ }
+ chunk_align_bug_1();
+ if (MAINPROCESS) {
+ printf("test lower dim size comp in span tree to mpi derived type\n");
+ fflush(stdout);
+ }
+ lower_dim_size_comp_test();
+ if (MAINPROCESS) {
+ printf("test mpi derived type management\n");
+ fflush(stdout);
+ }
+ link_chunk_collective_io_test();
+ if (MAINPROCESS) {
+ printf("test actual io mode property - SKIPPED currently due to native-specific testing\n");
+ fflush(stdout);
+ }
+ /* actual_io_mode_tests(); */
+ if (MAINPROCESS) {
+ printf("test cause for broken collective io - SKIPPED currently due to native-specific testing\n");
+ fflush(stdout);
+ }
+ /* no_collective_cause_tests(); */
+ if (MAINPROCESS) {
+ printf("encode/decode Property Lists\n");
+ fflush(stdout);
+ }
+ test_plist_ed();
+
+ if ((mpi_size < 2) && MAINPROCESS) {
+ HDprintf("File Image Ops daisy chain test needs at least 2 processes.\n");
+ HDprintf("File Image Ops daisy chain test will be skipped \n");
+ }
+
+#if 0
+ AddTest((mpi_size < 2)? "-fiodc" : "fiodc", file_image_daisy_chain_test, NULL,
+ "file image ops daisy chain", NULL);
+#endif
+
+ if (mpi_size >= 2) {
+ if (MAINPROCESS) {
+ printf("file image ops daisy chain - SKIPPED currently due to native-specific testing\n");
+ fflush(stdout);
+ }
+ /* file_image_daisy_chain_test(); */
+ }
+
+ if ((mpi_size < 2) && MAINPROCESS) {
+ HDprintf("Atomicity tests need at least 2 processes to participate\n");
+ HDprintf("8 is more recommended.. Atomicity tests will be skipped \n");
+ }
+ else if (facc_type != FACC_MPIO && MAINPROCESS) {
+ HDprintf("Atomicity tests will not work with a non MPIO VFD\n");
+ }
+ else if (mpi_size >= 2 && facc_type == FACC_MPIO) {
+#if 0
+ AddTest("atomicity", dataset_atomicity, NULL,
+ "dataset atomic updates", PARATESTFILE);
+#endif
+
+ if (MAINPROCESS) {
+ printf("dataset atomic updates - SKIPPED currently due to native-specific testing\n");
+ fflush(stdout);
+ }
+ /* dataset_atomicity(); */
+ }
+
+#if 0
+ AddTest("denseattr", test_dense_attr, NULL,
+ "Store Dense Attributes", PARATESTFILE);
+#endif
+
+ if (MAINPROCESS) {
+ printf("Store Dense Attributes\n");
+ fflush(stdout);
+ }
+ test_dense_attr();
+
+#if 0
+ AddTest("noselcollmdread", test_partial_no_selection_coll_md_read, NULL,
+ "Collective Metadata read with some ranks having no selection", PARATESTFILE);
+ AddTest("MC_coll_MD_read", test_multi_chunk_io_addrmap_issue, NULL,
+ "Collective MD read with multi chunk I/O (H5D__chunk_addrmap)", PARATESTFILE);
+ AddTest("LC_coll_MD_read", test_link_chunk_io_sort_chunk_issue, NULL,
+ "Collective MD read with link chunk I/O (H5D__sort_chunk)", PARATESTFILE);
+#endif
+
+ if (MAINPROCESS) {
+ printf("Collective Metadata read with some ranks having no selection\n");
+ fflush(stdout);
+ }
+ test_partial_no_selection_coll_md_read();
+ if (MAINPROCESS) {
+ printf("Collective MD read with multi chunk I/O\n");
+ fflush(stdout);
+ }
+ test_multi_chunk_io_addrmap_issue();
+ if (MAINPROCESS) {
+ printf("Collective MD read with link chunk I/O\n");
+ fflush(stdout);
+ }
+ test_link_chunk_io_sort_chunk_issue();
+
+ /* Display testing information */
+ /* TestInfo(argv[0]); */
+
+ /* setup file access property list */
+ H5Pset_fapl_mpio(fapl, MPI_COMM_WORLD, MPI_INFO_NULL);
+
+ /* Parse command line arguments */
+ /* TestParseCmdLine(argc, argv); */
+
+ if (dxfer_coll_type == DXFER_INDEPENDENT_IO && MAINPROCESS) {
+ HDprintf("===================================\n"
+ " Using Independent I/O with file set view to replace collective I/O \n"
+ "===================================\n");
+ }
+
+ /* Perform requested testing */
+ /* PerformTests(); */
+
+ /* make sure all processes are finished before final report, cleanup
+ * and exit.
+ */
+ MPI_Barrier(MPI_COMM_WORLD);
+
+ /* Display test summary, if requested */
+ /* if (MAINPROCESS && GetTestSummary())
+ TestSummary(); */
+
+ /* Clean up test files */
+ /* h5_clean_files(FILENAME, fapl); */
+ H5Fdelete(FILENAME[0], fapl);
+ H5Pclose(fapl);
+
+ /* nerrors += GetTestNumErrs(); */
+
+ /* Gather errors from all processes */
+ {
+ int temp;
+ MPI_Allreduce(&nerrors, &temp, 1, MPI_INT, MPI_MAX, MPI_COMM_WORLD);
+ nerrors = temp;
+ }
+
+ if (MAINPROCESS) { /* only process 0 reports */
+ HDprintf("===================================\n");
+ if (nerrors)
+ HDprintf("***PHDF5 tests detected %d errors***\n", nerrors);
+ else
+ HDprintf("PHDF5 tests finished successfully\n");
+ HDprintf("===================================\n");
+ }
+
+#if 0
+ for (int i = 0; i < NFILENAME; i++) {
+ HDfree(filenames[i]);
+ filenames[i] = NULL;
+ }
+#endif
+
+ /* close HDF5 library */
+ H5close();
+
+ /* Release test infrastructure */
+ /* TestShutdown(); */
+
+ /* MPI_Finalize must be called AFTER H5close which may use MPI calls */
+ MPI_Finalize();
+
+ /* cannot just return (nerrors) because exit code is limited to 1byte */
+ return (nerrors != 0);
+}
diff --git a/testpar/API/testphdf5.h b/testpar/API/testphdf5.h
new file mode 100644
index 0000000..27d53e2
--- /dev/null
+++ b/testpar/API/testphdf5.h
@@ -0,0 +1,343 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/* common definitions used by all parallel hdf5 test programs. */
+
+#ifndef PHDF5TEST_H
+#define PHDF5TEST_H
+
+#include "H5private.h"
+#include "testpar.h"
+#include "H5_api_tests_disabled.h"
+
+/*
+ * Define parameters for various tests since we do not have access to
+ * passing parameters to tests via the testphdf5 test framework.
+ */
+#define PARATESTFILE "ParaTest.h5"
+#define NDATASETS 300
+#define NGROUPS 256
+
+/* Disable express testing by default */
+#define EXPRESS_MODE 0
+
+enum H5TEST_COLL_CHUNK_API {
+ API_NONE = 0,
+ API_LINK_HARD,
+ API_MULTI_HARD,
+ API_LINK_TRUE,
+ API_LINK_FALSE,
+ API_MULTI_COLL,
+ API_MULTI_IND
+};
+
+#ifndef FALSE
+#define FALSE 0
+#endif
+
+#ifndef TRUE
+#define TRUE 1
+#endif
+
+/* Constants definitions */
+#define DIM0 600 /* Default dataset sizes. */
+#define DIM1 1200 /* Values are from a monitor pixel sizes */
+#define ROW_FACTOR 8 /* Nominal row factor for dataset size */
+#define COL_FACTOR 16 /* Nominal column factor for dataset size */
+#define RANK 2
+#define DATASETNAME1 "Data1"
+#define DATASETNAME2 "Data2"
+#define DATASETNAME3 "Data3"
+#define DATASETNAME4 "Data4"
+#define DATASETNAME5 "Data5"
+#define DATASETNAME6 "Data6"
+#define DATASETNAME7 "Data7"
+#define DATASETNAME8 "Data8"
+#define DATASETNAME9 "Data9"
+
+/* point selection order */
+#define IN_ORDER 1
+#define OUT_OF_ORDER 2
+
+/* Hyperslab layout styles */
+#define BYROW 1 /* divide into slabs of rows */
+#define BYCOL 2 /* divide into blocks of columns */
+#define ZROW 3 /* same as BYCOL except process 0 gets 0 rows */
+#define ZCOL 4 /* same as BYCOL except process 0 gets 0 columns */
+
+/* File_Access_type bits */
+#define FACC_DEFAULT 0x0 /* default */
+#define FACC_MPIO 0x1 /* MPIO */
+#define FACC_SPLIT 0x2 /* Split File */
+
+#define DXFER_COLLECTIVE_IO 0x1 /* Collective IO*/
+#define DXFER_INDEPENDENT_IO 0x2 /* Independent IO collectively */
+/*Constants for collective chunk definitions */
+#define SPACE_DIM1 24
+#define SPACE_DIM2 4
+#define BYROW_CONT 1
+#define BYROW_DISCONT 2
+#define BYROW_SELECTNONE 3
+#define BYROW_SELECTUNBALANCE 4
+#define BYROW_SELECTINCHUNK 5
+
+#define DIMO_NUM_CHUNK 4
+#define DIM1_NUM_CHUNK 2
+#define LINK_TRUE_NUM_CHUNK 2
+#define LINK_FALSE_NUM_CHUNK 6
+#define MULTI_TRUE_PERCENT 50
+#define LINK_TRUE_CHUNK_NAME "h5_link_chunk_true"
+#define LINK_FALSE_CHUNK_NAME "h5_link_chunk_false"
+#define LINK_HARD_CHUNK_NAME "h5_link_chunk_hard"
+#define MULTI_HARD_CHUNK_NAME "h5_multi_chunk_hard"
+#define MULTI_COLL_CHUNK_NAME "h5_multi_chunk_coll"
+#define MULTI_INDP_CHUNK_NAME "h5_multi_chunk_indp"
+
+#define DSET_COLLECTIVE_CHUNK_NAME "coll_chunk_name"
+
+/*Constants for MPI derived data type generated from span tree */
+
+#define MSPACE1_RANK 1 /* Rank of the first dataset in memory */
+#define MSPACE1_DIM 27000 /* Dataset size in memory */
+#define FSPACE_RANK 2 /* Dataset rank as it is stored in the file */
+#define FSPACE_DIM1 9 /* Dimension sizes of the dataset as it is stored in the file */
+#define FSPACE_DIM2 3600
+/* We will read dataset back from the file to the dataset in memory with these dataspace parameters. */
+#define MSPACE_RANK 2
+#define MSPACE_DIM1 9
+#define MSPACE_DIM2 3600
+#define FHCOUNT0 1 /* Count of the first dimension of the first hyperslab selection*/
+#define FHCOUNT1 768 /* Count of the second dimension of the first hyperslab selection*/
+#define FHSTRIDE0 4 /* Stride of the first dimension of the first hyperslab selection*/
+#define FHSTRIDE1 3 /* Stride of the second dimension of the first hyperslab selection*/
+#define FHBLOCK0 3 /* Block of the first dimension of the first hyperslab selection*/
+#define FHBLOCK1 2 /* Block of the second dimension of the first hyperslab selection*/
+#define FHSTART0 0 /* start of the first dimension of the first hyperslab selection*/
+#define FHSTART1 1 /* start of the second dimension of the first hyperslab selection*/
+
+#define SHCOUNT0 1 /* Count of the first dimension of the first hyperslab selection*/
+#define SHCOUNT1 1 /* Count of the second dimension of the first hyperslab selection*/
+#define SHSTRIDE0 1 /* Stride of the first dimension of the first hyperslab selection*/
+#define SHSTRIDE1 1 /* Stride of the second dimension of the first hyperslab selection*/
+#define SHBLOCK0 3 /* Block of the first dimension of the first hyperslab selection*/
+#define SHBLOCK1 768 /* Block of the second dimension of the first hyperslab selection*/
+#define SHSTART0 4 /* start of the first dimension of the first hyperslab selection*/
+#define SHSTART1 0 /* start of the second dimension of the first hyperslab selection*/
+
+#define MHCOUNT0 6912 /* Count of the first dimension of the first hyperslab selection*/
+#define MHSTRIDE0 1 /* Stride of the first dimension of the first hyperslab selection*/
+#define MHBLOCK0 1 /* Block of the first dimension of the first hyperslab selection*/
+#define MHSTART0 1 /* start of the first dimension of the first hyperslab selection*/
+
+#define RFFHCOUNT0 3 /* Count of the first dimension of the first hyperslab selection*/
+#define RFFHCOUNT1 768 /* Count of the second dimension of the first hyperslab selection*/
+#define RFFHSTRIDE0 1 /* Stride of the first dimension of the first hyperslab selection*/
+#define RFFHSTRIDE1 1 /* Stride of the second dimension of the first hyperslab selection*/
+#define RFFHBLOCK0 1 /* Block of the first dimension of the first hyperslab selection*/
+#define RFFHBLOCK1 1 /* Block of the second dimension of the first hyperslab selection*/
+#define RFFHSTART0 1 /* start of the first dimension of the first hyperslab selection*/
+#define RFFHSTART1 2 /* start of the second dimension of the first hyperslab selection*/
+
+#define RFSHCOUNT0 3 /* Count of the first dimension of the first hyperslab selection*/
+#define RFSHCOUNT1 1536 /* Count of the second dimension of the first hyperslab selection*/
+#define RFSHSTRIDE0 1 /* Stride of the first dimension of the first hyperslab selection*/
+#define RFSHSTRIDE1 1 /* Stride of the second dimension of the first hyperslab selection*/
+#define RFSHBLOCK0 1 /* Block of the first dimension of the first hyperslab selection*/
+#define RFSHBLOCK1 1 /* Block of the second dimension of the first hyperslab selection*/
+#define RFSHSTART0 2 /* start of the first dimension of the first hyperslab selection*/
+#define RFSHSTART1 4 /* start of the second dimension of the first hyperslab selection*/
+
+#define RMFHCOUNT0 3 /* Count of the first dimension of the first hyperslab selection*/
+#define RMFHCOUNT1 768 /* Count of the second dimension of the first hyperslab selection*/
+#define RMFHSTRIDE0 1 /* Stride of the first dimension of the first hyperslab selection*/
+#define RMFHSTRIDE1 1 /* Stride of the second dimension of the first hyperslab selection*/
+#define RMFHBLOCK0 1 /* Block of the first dimension of the first hyperslab selection*/
+#define RMFHBLOCK1 1 /* Block of the second dimension of the first hyperslab selection*/
+#define RMFHSTART0 0 /* start of the first dimension of the first hyperslab selection*/
+#define RMFHSTART1 0 /* start of the second dimension of the first hyperslab selection*/
+
+#define RMSHCOUNT0 3 /* Count of the first dimension of the first hyperslab selection*/
+#define RMSHCOUNT1 1536 /* Count of the second dimension of the first hyperslab selection*/
+#define RMSHSTRIDE0 1 /* Stride of the first dimension of the first hyperslab selection*/
+#define RMSHSTRIDE1 1 /* Stride of the second dimension of the first hyperslab selection*/
+#define RMSHBLOCK0 1 /* Block of the first dimension of the first hyperslab selection*/
+#define RMSHBLOCK1 1 /* Block of the second dimension of the first hyperslab selection*/
+#define RMSHSTART0 1 /* start of the first dimension of the first hyperslab selection*/
+#define RMSHSTART1 2 /* start of the second dimension of the first hyperslab selection*/
+
+#define NPOINTS \
+ 4 /* Number of points that will be selected \
+ and overwritten */
+
+/* Definitions of the selection mode for the test_actual_io_function. */
+#define TEST_ACTUAL_IO_NO_COLLECTIVE 0
+#define TEST_ACTUAL_IO_RESET 1
+#define TEST_ACTUAL_IO_MULTI_CHUNK_IND 2
+#define TEST_ACTUAL_IO_MULTI_CHUNK_COL 3
+#define TEST_ACTUAL_IO_MULTI_CHUNK_MIX 4
+#define TEST_ACTUAL_IO_MULTI_CHUNK_MIX_DISAGREE 5
+#define TEST_ACTUAL_IO_DIRECT_MULTI_CHUNK_IND 6
+#define TEST_ACTUAL_IO_DIRECT_MULTI_CHUNK_COL 7
+#define TEST_ACTUAL_IO_LINK_CHUNK 8
+#define TEST_ACTUAL_IO_CONTIGUOUS 9
+
+/* Definitions of the selection mode for the no_collective_cause_tests function. */
+#define TEST_COLLECTIVE 0x001
+#define TEST_SET_INDEPENDENT 0x002
+#define TEST_DATATYPE_CONVERSION 0x004
+#define TEST_DATA_TRANSFORMS 0x008
+#define TEST_NOT_SIMPLE_OR_SCALAR_DATASPACES 0x010
+#define TEST_NOT_CONTIGUOUS_OR_CHUNKED_DATASET_COMPACT 0x020
+#define TEST_NOT_CONTIGUOUS_OR_CHUNKED_DATASET_EXTERNAL 0x040
+
+/* Don't erase these lines, they are put here for debugging purposes */
+/*
+#define MSPACE1_RANK 1
+#define MSPACE1_DIM 50
+#define MSPACE2_RANK 1
+#define MSPACE2_DIM 4
+#define FSPACE_RANK 2
+#define FSPACE_DIM1 8
+#define FSPACE_DIM2 12
+#define MSPACE_RANK 2
+#define MSPACE_DIM1 8
+#define MSPACE_DIM2 9
+#define NPOINTS 4
+*/ /* end of debugging macro */
+
+#ifdef H5_HAVE_INSTRUMENTED_LIBRARY
+/* Collective chunk instrumentation properties */
+#define H5D_XFER_COLL_CHUNK_LINK_HARD_NAME "coll_chunk_link_hard"
+#define H5D_XFER_COLL_CHUNK_MULTI_HARD_NAME "coll_chunk_multi_hard"
+#define H5D_XFER_COLL_CHUNK_LINK_NUM_TRUE_NAME "coll_chunk_link_true"
+#define H5D_XFER_COLL_CHUNK_LINK_NUM_FALSE_NAME "coll_chunk_link_false"
+#define H5D_XFER_COLL_CHUNK_MULTI_RATIO_COLL_NAME "coll_chunk_multi_coll"
+#define H5D_XFER_COLL_CHUNK_MULTI_RATIO_IND_NAME "coll_chunk_multi_ind"
+
+/* Definitions for all collective chunk instrumentation properties */
+#define H5D_XFER_COLL_CHUNK_SIZE sizeof(unsigned)
+#define H5D_XFER_COLL_CHUNK_DEF 1
+
+/* General collective I/O instrumentation properties */
+#define H5D_XFER_COLL_RANK0_BCAST_NAME "coll_rank0_bcast"
+
+/* Definitions for general collective I/O instrumentation properties */
+#define H5D_XFER_COLL_RANK0_BCAST_SIZE sizeof(hbool_t)
+#define H5D_XFER_COLL_RANK0_BCAST_DEF FALSE
+#endif /* H5_HAVE_INSTRUMENTED_LIBRARY */
+
+/* type definitions */
+typedef struct H5Ptest_param_t /* holds extra test parameters */
+{
+ char *name;
+ int count;
+} H5Ptest_param_t;
+
+/* Dataset data type. Int's can be easily octo dumped. */
+typedef int DATATYPE;
+
+/* Shape Same Tests Definitions */
+typedef enum {
+ IND_CONTIG, /* Independent IO on contiguous datasets */
+ COL_CONTIG, /* Collective IO on contiguous datasets */
+ IND_CHUNKED, /* Independent IO on chunked datasets */
+ COL_CHUNKED /* Collective IO on chunked datasets */
+} ShapeSameTestMethods;
+
+/* Shared global variables */
+extern int dim0, dim1; /*Dataset dimensions */
+extern int chunkdim0, chunkdim1; /*Chunk dimensions */
+extern int nerrors; /*errors count */
+extern H5E_auto2_t old_func; /* previous error handler */
+extern void *old_client_data; /*previous error handler arg.*/
+extern int facc_type; /*Test file access type */
+extern int dxfer_coll_type;
+
+/* Test program prototypes */
+void test_plist_ed(void);
+#if 0
+void external_links(void);
+#endif
+void zero_dim_dset(void);
+void test_file_properties(void);
+void test_delete(void);
+void multiple_dset_write(void);
+void multiple_group_write(void);
+void multiple_group_read(void);
+void collective_group_write_independent_group_read(void);
+void collective_group_write(void);
+void independent_group_read(void);
+void test_fapl_mpio_dup(void);
+void test_split_comm_access(void);
+void test_page_buffer_access(void);
+void dataset_atomicity(void);
+void dataset_writeInd(void);
+void dataset_writeAll(void);
+void extend_writeInd(void);
+void extend_writeInd2(void);
+void extend_writeAll(void);
+void dataset_readInd(void);
+void dataset_readAll(void);
+void extend_readInd(void);
+void extend_readAll(void);
+void none_selection_chunk(void);
+void actual_io_mode_tests(void);
+void no_collective_cause_tests(void);
+void test_chunk_alloc(void);
+void test_filter_read(void);
+void compact_dataset(void);
+void null_dataset(void);
+void big_dataset(void);
+void dataset_fillvalue(void);
+void coll_chunk1(void);
+void coll_chunk2(void);
+void coll_chunk3(void);
+void coll_chunk4(void);
+void coll_chunk5(void);
+void coll_chunk6(void);
+void coll_chunk7(void);
+void coll_chunk8(void);
+void coll_chunk9(void);
+void coll_chunk10(void);
+void coll_irregular_cont_read(void);
+void coll_irregular_cont_write(void);
+void coll_irregular_simple_chunk_read(void);
+void coll_irregular_simple_chunk_write(void);
+void coll_irregular_complex_chunk_read(void);
+void coll_irregular_complex_chunk_write(void);
+void io_mode_confusion(void);
+void rr_obj_hdr_flush_confusion(void);
+void rr_obj_hdr_flush_confusion_reader(MPI_Comm comm);
+void rr_obj_hdr_flush_confusion_writer(MPI_Comm comm);
+void chunk_align_bug_1(void);
+void lower_dim_size_comp_test(void);
+void link_chunk_collective_io_test(void);
+void contig_hyperslab_dr_pio_test(ShapeSameTestMethods sstest_type);
+void checker_board_hyperslab_dr_pio_test(ShapeSameTestMethods sstest_type);
+void file_image_daisy_chain_test(void);
+#ifdef H5_HAVE_FILTER_DEFLATE
+void compress_readAll(void);
+#endif /* H5_HAVE_FILTER_DEFLATE */
+void test_dense_attr(void);
+void test_partial_no_selection_coll_md_read(void);
+void test_multi_chunk_io_addrmap_issue(void);
+void test_link_chunk_io_sort_chunk_issue(void);
+void test_collective_global_heap_write(void);
+
+/* commonly used prototypes */
+hid_t create_faccess_plist(MPI_Comm comm, MPI_Info info, int l_facc_type);
+MPI_Offset h5_mpi_get_file_size(const char *filename, MPI_Comm comm, MPI_Info info);
+int dataset_vrfy(hsize_t start[], hsize_t count[], hsize_t stride[], hsize_t block[], DATATYPE *dataset,
+ DATATYPE *original);
+void point_set(hsize_t start[], hsize_t count[], hsize_t stride[], hsize_t block[], size_t num_points,
+ hsize_t coords[], int order);
+#endif /* PHDF5TEST_H */
diff --git a/testpar/CMakeLists.txt b/testpar/CMakeLists.txt
index 3a44fca..6bb5fa6 100644
--- a/testpar/CMakeLists.txt
+++ b/testpar/CMakeLists.txt
@@ -111,3 +111,7 @@ endforeach ()
if (HDF5_TEST_PARALLEL)
include (CMakeTests.cmake)
endif ()
+
+if (HDF5_TEST_API)
+ add_subdirectory (API)
+endif ()
diff --git a/testpar/t_subfiling_vfd.c b/testpar/t_subfiling_vfd.c
index f827aa5..0841923 100644
--- a/testpar/t_subfiling_vfd.c
+++ b/testpar/t_subfiling_vfd.c
@@ -1888,7 +1888,7 @@ test_subfiling_h5fuse(void)
if (pid == 0) {
char *tmp_filename;
- char *args[6];
+ char *args[7];
tmp_filename = HDmalloc(PATH_MAX);
VRFY(tmp_filename, "HDmalloc succeeded");
@@ -1900,9 +1900,10 @@ test_subfiling_h5fuse(void)
args[0] = HDstrdup("env");
args[1] = HDstrdup("sh");
args[2] = HDstrdup("h5fuse.sh");
- args[3] = HDstrdup("-q -f");
- args[4] = tmp_filename;
- args[5] = NULL;
+ args[3] = HDstrdup("-q");
+ args[4] = HDstrdup("-f");
+ args[5] = tmp_filename;
+ args[6] = NULL;
/* Call h5fuse script from MPI rank 0 */
HDexecvp("env", args);