summaryrefslogtreecommitdiffstats
path: root/hl/examples
diff options
context:
space:
mode:
Diffstat (limited to 'hl/examples')
-rw-r--r--hl/examples/CMakeLists.txt25
-rw-r--r--hl/examples/CMakeTests.cmake67
-rw-r--r--hl/examples/Makefile.am6
-rw-r--r--hl/examples/Makefile.in32
-rw-r--r--hl/examples/ex_ds1.c174
-rw-r--r--hl/examples/ex_image1.c87
-rw-r--r--hl/examples/ex_image2.c272
-rw-r--r--hl/examples/ex_lite1.c29
-rw-r--r--hl/examples/ex_lite2.c52
-rw-r--r--hl/examples/ex_lite3.c72
-rw-r--r--hl/examples/ex_table_01.c191
-rw-r--r--hl/examples/ex_table_02.c182
-rw-r--r--hl/examples/ex_table_03.c189
-rw-r--r--hl/examples/ex_table_04.c248
-rw-r--r--hl/examples/ex_table_05.c244
-rw-r--r--hl/examples/ex_table_06.c125
-rw-r--r--hl/examples/ex_table_07.c157
-rw-r--r--hl/examples/ex_table_08.c207
-rw-r--r--hl/examples/ex_table_09.c210
-rw-r--r--hl/examples/ex_table_10.c196
-rw-r--r--hl/examples/ex_table_11.c166
-rw-r--r--hl/examples/ex_table_12.c150
-rw-r--r--hl/examples/pal_rgb.h524
-rw-r--r--hl/examples/ptExampleFL.c90
-rwxr-xr-xhl/examples/run-hl-ex.sh2
-rw-r--r--hl/examples/run-hlc-ex.sh.in29
26 files changed, 1751 insertions, 1975 deletions
diff --git a/hl/examples/CMakeLists.txt b/hl/examples/CMakeLists.txt
index 79dfee1..93ae9e8 100644
--- a/hl/examples/CMakeLists.txt
+++ b/hl/examples/CMakeLists.txt
@@ -1,5 +1,5 @@
-cmake_minimum_required (VERSION 3.10)
-PROJECT (HDF5_HL_EXAMPLES )
+cmake_minimum_required (VERSION 3.12)
+project (HDF5_HL_EXAMPLES C)
#-----------------------------------------------------------------------------
# Define Sources
@@ -28,13 +28,24 @@ set (examples
foreach (example ${examples})
add_executable (hl_ex_${example} ${HDF5_HL_EXAMPLES_SOURCE_DIR}/${example}.c)
- TARGET_C_PROPERTIES (hl_ex_${example} STATIC " " " ")
- target_link_libraries (hl_ex_${example} ${HDF5_HL_LIB_TARGET} ${HDF5_LIB_TARGET})
+ target_include_directories (hl_ex_${example} PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
+ if (NOT BUILD_SHARED_LIBS)
+ TARGET_C_PROPERTIES (hl_ex_${example} STATIC)
+ target_link_libraries (hl_ex_${example} PRIVATE ${HDF5_HL_LIB_TARGET} ${HDF5_LIB_TARGET})
+ else ()
+ TARGET_C_PROPERTIES (hl_ex_${example} SHARED)
+ target_link_libraries (hl_ex_${example} PRIVATE ${HDF5_HL_LIBSH_TARGET} ${HDF5_LIBSH_TARGET})
+ endif ()
set_target_properties (hl_ex_${example} PROPERTIES FOLDER examples/hl)
-endforeach ()
-if (BUILD_TESTING)
+ #-----------------------------------------------------------------------------
+ # Add Target to clang-format
+ #-----------------------------------------------------------------------------
+ if (HDF5_ENABLE_FORMATTERS)
+ clang_format (HDF5_HL_EXAMPLES_${example}_FORMAT hl_ex_${example})
+ endif ()
+endforeach ()
+if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL)
include (CMakeTests.cmake)
-
endif ()
diff --git a/hl/examples/CMakeTests.cmake b/hl/examples/CMakeTests.cmake
index 166fa7c..b997096 100644
--- a/hl/examples/CMakeTests.cmake
+++ b/hl/examples/CMakeTests.cmake
@@ -5,7 +5,7 @@
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
-# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
+# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
@@ -26,41 +26,39 @@ foreach (h5_file ${HDF5_TEST_FILES})
endforeach ()
add_custom_target(hl_ex_ex_ds1_files ALL COMMENT "Copying files needed by hl_ex_ex_ds1 tests" DEPENDS ${hl_ex_ex_ds1_files_list})
- # Remove any output file left over from previous test run
- add_test (
- NAME HL_ex-clear-objects
- COMMAND ${CMAKE_COMMAND}
- -E remove
- ex_lite1.h5
- ex_lite2.h5
- ex_lite3.h5
- packet_table_FLexample.h5
- ex_image1.h5
- ex_image2.h5
- ex_table_01.h5
- ex_table_02.h5
- ex_table_03.h5
- ex_table_04.h5
- ex_table_05.h5
- ex_table_06.h5
- ex_table_07.h5
- ex_table_08.h5
- ex_table_09.h5
- ex_table_10.h5
- ex_table_11.h5
- ex_table_12.h5
- ex_ds1.h5
- )
- if (NOT "${last_test}" STREQUAL "")
- set_tests_properties (HL_ex-clear-objects PROPERTIES DEPENDS ${last_test})
- endif ()
- set (last_test "HL_ex-clear-objects")
+# Remove any output file left over from previous test run
+add_test (
+ NAME HL_ex-clear-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E remove
+ ex_lite1.h5
+ ex_lite2.h5
+ ex_lite3.h5
+ packet_table_FLexample.h5
+ ex_image1.h5
+ ex_image2.h5
+ ex_table_01.h5
+ ex_table_02.h5
+ ex_table_03.h5
+ ex_table_04.h5
+ ex_table_05.h5
+ ex_table_06.h5
+ ex_table_07.h5
+ ex_table_08.h5
+ ex_table_09.h5
+ ex_table_10.h5
+ ex_table_11.h5
+ ex_table_12.h5
+ ex_ds1.h5
+)
+set_tests_properties (HL_ex-clear-objects PROPERTIES FIXTURES_SETUP clear_HL_ex)
foreach (example ${examples})
if (HDF5_ENABLE_USING_MEMCHECKER)
- add_test (NAME HL_ex_${example} COMMAND $<TARGET_FILE:hl_ex_${example}>)
+ add_test (NAME HL_ex_${example} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $<TARGET_FILE:hl_ex_${example}>)
else ()
add_test (NAME HL_ex_${example} COMMAND "${CMAKE_COMMAND}"
+ -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}"
-D "TEST_PROGRAM=$<TARGET_FILE:hl_ex_${example}>"
-D "TEST_ARGS:STRING="
-D "TEST_EXPECT=0"
@@ -71,8 +69,11 @@ foreach (example ${examples})
-P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake"
)
endif ()
- if (NOT "${last_test}" STREQUAL "")
- set_tests_properties (HL_ex_${example} PROPERTIES DEPENDS ${last_test})
+ if (last_test)
+ set_tests_properties (HL_ex_${example} PROPERTIES
+ DEPENDS ${last_test}
+ FIXTURES_REQUIRED clear_HL_ex
+ )
endif ()
set (last_test "HL_ex_${example}")
endforeach ()
diff --git a/hl/examples/Makefile.am b/hl/examples/Makefile.am
index 29e1a48..0f617bb 100644
--- a/hl/examples/Makefile.am
+++ b/hl/examples/Makefile.am
@@ -6,7 +6,7 @@
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
-# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
+# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
##
@@ -25,8 +25,8 @@ endif
# Example directory
# Note: no '/' after DESTDIR. Explanation in commence.am
-EXAMPLEDIR=${DESTDIR}$(exec_prefix)/share/hdf5_examples/hl/c
-EXAMPLETOPDIR=${DESTDIR}$(exec_prefix)/share/hdf5_examples/hl
+EXAMPLEDIR=$(examplesdir)/hl/c
+EXAMPLETOPDIR=$(examplesdir)/hl
INSTALL_SCRIPT_FILES = run-hlc-ex.sh
INSTALL_TOP_SCRIPT_FILES = run-hl-ex.sh
diff --git a/hl/examples/Makefile.in b/hl/examples/Makefile.in
index 8e10146..cb2d0fc 100644
--- a/hl/examples/Makefile.in
+++ b/hl/examples/Makefile.in
@@ -22,7 +22,7 @@
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
-# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
+# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
@@ -353,9 +353,9 @@ AMTAR = @AMTAR@
# AM_CFLAGS is an automake construct which should be used by Makefiles
# instead of CFLAGS, as CFLAGS is reserved solely for the user to define.
# This applies to FCFLAGS, CXXFLAGS, CPPFLAGS, and LDFLAGS as well.
-AM_CFLAGS = @AM_CFLAGS@ @H5_CFLAGS@
+AM_CFLAGS = @AM_CFLAGS@ @H5_CFLAGS@ @H5_ECFLAGS@
AM_CPPFLAGS = @AM_CPPFLAGS@ @H5_CPPFLAGS@
-AM_CXXFLAGS = @AM_CXXFLAGS@ @H5_CXXFLAGS@
+AM_CXXFLAGS = @AM_CXXFLAGS@ @H5_CXXFLAGS@ @H5_ECXXFLAGS@
AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@
AM_FCFLAGS = @AM_FCFLAGS@ @H5_FCFLAGS@
AM_LDFLAGS = @AM_LDFLAGS@ @H5_LDFLAGS@
@@ -370,6 +370,7 @@ CC = @CC@
CCDEPMODE = @CCDEPMODE@
CC_VERSION = @CC_VERSION@
CFLAGS = @CFLAGS@
+CLANG_SANITIZE_CHECKS = @CLANG_SANITIZE_CHECKS@
CLEARFILEBUF = @CLEARFILEBUF@
CODESTACK = @CODESTACK@
CONFIG_DATE = @CONFIG_DATE@
@@ -388,6 +389,7 @@ DEFAULT_API_VERSION = @DEFAULT_API_VERSION@
DEFS = @DEFS@
DEPDIR = @DEPDIR@
DEPRECATED_SYMBOLS = @DEPRECATED_SYMBOLS@
+DEV_WARNINGS = @DEV_WARNINGS@
DIRECT_VFD = @DIRECT_VFD@
DLLTOOL = @DLLTOOL@
DSYMUTIL = @DSYMUTIL@
@@ -415,6 +417,8 @@ GREP = @GREP@
H5_CFLAGS = @H5_CFLAGS@
H5_CPPFLAGS = @H5_CPPFLAGS@
H5_CXXFLAGS = @H5_CXXFLAGS@
+H5_ECFLAGS = @H5_ECFLAGS@
+H5_ECXXFLAGS = @H5_ECXXFLAGS@
H5_FCFLAGS = @H5_FCFLAGS@
H5_FORTRAN_SHARED = @H5_FORTRAN_SHARED@
H5_LDFLAGS = @H5_LDFLAGS@
@@ -422,9 +426,12 @@ H5_VERSION = @H5_VERSION@
HADDR_T = @HADDR_T@
HAVE_DMALLOC = @HAVE_DMALLOC@
HAVE_FORTRAN_2003 = @HAVE_FORTRAN_2003@
+HAVE_LIBHDFS = @HAVE_LIBHDFS@
HAVE_PTHREAD = @HAVE_PTHREAD@
HDF5_HL = @HDF5_HL@
HDF5_INTERFACES = @HDF5_INTERFACES@
+HDF5_TESTS = @HDF5_TESTS@
+HDF5_TOOLS = @HDF5_TOOLS@
HDF_CXX = @HDF_CXX@
HDF_FORTRAN = @HDF_FORTRAN@
HDF_FORTRAN2003 = @HDF_FORTRAN2003@
@@ -440,6 +447,7 @@ INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
INSTRUMENT = @INSTRUMENT@
INSTRUMENT_LIBRARY = @INSTRUMENT_LIBRARY@
+JNI_LDFLAGS = @JNI_LDFLAGS@
LD = @LD@
LDFLAGS = @LDFLAGS@
LIBOBJS = @LIBOBJS@
@@ -461,6 +469,7 @@ NMEDIT = @NMEDIT@
OBJDUMP = @OBJDUMP@
OBJECT_NAMELEN_DEFAULT_F = @OBJECT_NAMELEN_DEFAULT_F@
OBJEXT = @OBJEXT@
+OPTIMIZATION = @OPTIMIZATION@
OTOOL = @OTOOL@
OTOOL64 = @OTOOL64@
PACKAGE = @PACKAGE@
@@ -473,8 +482,10 @@ PACKAGE_VERSION = @PACKAGE_VERSION@
PARALLEL = @PARALLEL@
PATH_SEPARATOR = @PATH_SEPARATOR@
PERL = @PERL@
+PROFILING = @PROFILING@
RANLIB = @RANLIB@
ROOT = @ROOT@
+ROS3_VFD = @ROS3_VFD@
RUNPARALLEL = @RUNPARALLEL@
RUNSERIAL = @RUNSERIAL@
R_INTEGER = @R_INTEGER@
@@ -488,6 +499,7 @@ STATIC_EXEC = @STATIC_EXEC@
STATIC_SHARED = @STATIC_SHARED@
STRICT_FORMAT_CHECKS = @STRICT_FORMAT_CHECKS@
STRIP = @STRIP@
+SYMBOLS = @SYMBOLS@
TESTPARALLEL = @TESTPARALLEL@
THREADSAFE = @THREADSAFE@
TIME = @TIME@
@@ -529,6 +541,7 @@ docdir = $(exec_prefix)/doc
dvidir = @dvidir@
enable_shared = @enable_shared@
enable_static = @enable_static@
+examplesdir = @examplesdir@
exec_prefix = @exec_prefix@
fortran_linux_linker_option = @fortran_linux_linker_option@
host = @host@
@@ -616,8 +629,8 @@ CHECK_CLEANFILES = *.chkexe *.chklog *.clog *.clog2 *.h5
# Example directory
# Note: no '/' after DESTDIR. Explanation in commence.am
-EXAMPLEDIR = ${DESTDIR}$(exec_prefix)/share/hdf5_examples/hl/c
-EXAMPLETOPDIR = ${DESTDIR}$(exec_prefix)/share/hdf5_examples/hl
+EXAMPLEDIR = $(examplesdir)/hl/c
+EXAMPLETOPDIR = $(examplesdir)/hl
INSTALL_SCRIPT_FILES = run-hlc-ex.sh
INSTALL_TOP_SCRIPT_FILES = run-hl-ex.sh
@@ -653,11 +666,11 @@ EXTRA_PROG = $(EXAMPLE_PROG) $(EXAMPLE_PROG_PARA)
MOSTLYCLEANFILES = *.raw *.meta *.o
CLEANFILES = $(EXAMPLE_PROG) $(EXAMPLE_PROG_PARA)
-# Automake needs to be taught how to build lib, dyn, progs, and tests targets.
+# Automake needs to be taught how to build lib, progs and tests targets.
# These will be filled in automatically for the most part (e.g.,
# lib_LIBRARIES are built for lib target), but EXTRA_LIB, EXTRA_PROG, and
# EXTRA_TEST variables are supplied to allow the user to force targets to
-# be built at certain times.
+# be built at certain times.
LIB = $(lib_LIBRARIES) $(lib_LTLIBRARIES) $(noinst_LIBRARIES) \
$(noinst_LTLIBRARIES) $(check_LIBRARIES) $(check_LTLIBRARIES) $(EXTRA_LIB)
@@ -1136,6 +1149,7 @@ installcheck-local:
(cd $(EXAMPLEDIR); \
/bin/sh ./$(TEST_EXAMPLES_SCRIPT);) \
fi
+
# lib/progs/tests targets recurse into subdirectories. build-* targets
# build files in this directory.
build-lib: $(LIB)
@@ -1171,7 +1185,7 @@ all-local: $(EXTRA_LIB) $(EXTRA_PROG) $(chk_TESTS)
# make install-doc doesn't do anything outside of doc directory, but
# Makefiles should recognize it.
# UPDATE: docs no longer reside in this build tree, so this target
-# is depreciated.
+# is deprecated.
install-doc uninstall-doc:
@echo "Nothing to be done."
@@ -1332,7 +1346,7 @@ build-check-p: $(LIB) $(PROGS) $(chk_TESTS)
echo "**** Hint ****"; \
echo "Parallel test files reside in the current directory" \
"by default."; \
- echo "Set HDF5_PARAPREFIX to use another directory. E.g.,"; \
+ echo "Set HDF5_PARAPREFIX to use another directory. e.g.,"; \
echo " HDF5_PARAPREFIX=/PFS/user/me"; \
echo " export HDF5_PARAPREFIX"; \
echo " make check"; \
diff --git a/hl/examples/ex_ds1.c b/hl/examples/ex_ds1.c
index 1e0c592..73578f1 100644
--- a/hl/examples/ex_ds1.c
+++ b/hl/examples/ex_ds1.c
@@ -6,7 +6,7 @@
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
@@ -14,99 +14,93 @@
#include "hdf5.h"
#include "hdf5_hl.h"
+#define RANK 2
+#define DIM_DATA 12
+#define DIM1_SIZE 3
+#define DIM2_SIZE 4
+#define DIM0 0
+#define DIM1 1
-#define RANK 2
-#define DIM_DATA 12
-#define DIM1_SIZE 3
-#define DIM2_SIZE 4
-#define DIM0 0
-#define DIM1 1
+#define DSET_NAME "Mydata"
+#define DS_1_NAME "Yaxis"
+#define DS_2_NAME "Xaxis"
-#define DSET_NAME "Mydata"
-#define DS_1_NAME "Yaxis"
-#define DS_2_NAME "Xaxis"
-
-int main(void)
+int
+main(void)
{
- hid_t fid; /* file ID */
- hid_t did; /* dataset ID */
- hid_t dsid; /* DS dataset ID */
- int rank = RANK; /* rank of data dataset */
- int rankds = 1; /* rank of DS dataset */
- hsize_t dims[RANK] = {DIM1_SIZE,DIM2_SIZE}; /* size of data dataset */
- int buf[DIM_DATA] = {1,2,3,4,5,6,7,8,9,10,11,12}; /* data of data dataset */
- hsize_t s1_dim[1] = {DIM1_SIZE}; /* size of DS 1 dataset */
- hsize_t s2_dim[1] = {DIM2_SIZE}; /* size of DS 2 dataset */
- float s1_wbuf[DIM1_SIZE] = {10,20,30}; /* data of DS 1 dataset */
- int s2_wbuf[DIM2_SIZE] = {10,20,50,100}; /* data of DS 2 dataset */
-
-
- /* create a file using default properties */
- if ((fid=H5Fcreate("ex_ds1.h5",H5F_ACC_TRUNC,H5P_DEFAULT,H5P_DEFAULT))<0)
- goto out;
-
- /* make a dataset */
- if (H5LTmake_dataset_int(fid,DSET_NAME,rank,dims,buf)<0)
- goto out;
-
- /* make a DS dataset for the first dimension */
- if (H5LTmake_dataset_float(fid,DS_1_NAME,rankds,s1_dim,s1_wbuf)<0)
- goto out;
-
- /* make a DS dataset for the second dimension */
- if (H5LTmake_dataset_int(fid,DS_2_NAME,rankds,s2_dim,s2_wbuf)<0)
- goto out;
-
-
-/*-------------------------------------------------------------------------
- * attach the DS_1_NAME dimension scale to DSET_NAME at dimension 0
- *-------------------------------------------------------------------------
- */
-
- /* get the dataset id for DSET_NAME */
- if ((did = H5Dopen2(fid,DSET_NAME, H5P_DEFAULT))<0)
- goto out;
-
- /* get the DS dataset id */
- if ((dsid = H5Dopen2(fid,DS_1_NAME, H5P_DEFAULT))<0)
- goto out;
-
- /* attach the DS_1_NAME dimension scale to DSET_NAME at dimension index 0 */
- if (H5DSattach_scale(did,dsid,DIM0)<0)
- goto out;
-
- /* close DS id */
- if (H5Dclose(dsid)<0)
- goto out;
-
-/*-------------------------------------------------------------------------
- * attach the DS_2_NAME dimension scale to DSET_NAME
- *-------------------------------------------------------------------------
- */
-
- /* get the DS dataset id */
- if ((dsid = H5Dopen2(fid,DS_2_NAME, H5P_DEFAULT))<0)
- goto out;
-
- /* attach the DS_2_NAME dimension scale to DSET_NAME as the 2nd dimension (index 1) */
- if (H5DSattach_scale(did,dsid,DIM1)<0)
- goto out;
-
- /* close DS id */
- if (H5Dclose(dsid)<0)
- goto out;
-
- /* close file */
- H5Fclose(fid);
-
- return 0;
+ hid_t fid; /* file ID */
+ hid_t did; /* dataset ID */
+ hid_t dsid; /* DS dataset ID */
+ int rank = RANK; /* rank of data dataset */
+ int rankds = 1; /* rank of DS dataset */
+ hsize_t dims[RANK] = {DIM1_SIZE, DIM2_SIZE}; /* size of data dataset */
+ int buf[DIM_DATA] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; /* data of data dataset */
+ hsize_t s1_dim[1] = {DIM1_SIZE}; /* size of DS 1 dataset */
+ hsize_t s2_dim[1] = {DIM2_SIZE}; /* size of DS 2 dataset */
+ float s1_wbuf[DIM1_SIZE] = {10, 20, 30}; /* data of DS 1 dataset */
+ int s2_wbuf[DIM2_SIZE] = {10, 20, 50, 100}; /* data of DS 2 dataset */
+
+ /* create a file using default properties */
+ if ((fid = H5Fcreate("ex_ds1.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0)
+ goto out;
+
+ /* make a dataset */
+ if (H5LTmake_dataset_int(fid, DSET_NAME, rank, dims, buf) < 0)
+ goto out;
+
+ /* make a DS dataset for the first dimension */
+ if (H5LTmake_dataset_float(fid, DS_1_NAME, rankds, s1_dim, s1_wbuf) < 0)
+ goto out;
+
+ /* make a DS dataset for the second dimension */
+ if (H5LTmake_dataset_int(fid, DS_2_NAME, rankds, s2_dim, s2_wbuf) < 0)
+ goto out;
+
+ /*-------------------------------------------------------------------------
+ * attach the DS_1_NAME dimension scale to DSET_NAME at dimension 0
+ *-------------------------------------------------------------------------
+ */
+
+ /* get the dataset id for DSET_NAME */
+ if ((did = H5Dopen2(fid, DSET_NAME, H5P_DEFAULT)) < 0)
+ goto out;
+
+ /* get the DS dataset id */
+ if ((dsid = H5Dopen2(fid, DS_1_NAME, H5P_DEFAULT)) < 0)
+ goto out;
+
+ /* attach the DS_1_NAME dimension scale to DSET_NAME at dimension index 0 */
+ if (H5DSattach_scale(did, dsid, DIM0) < 0)
+ goto out;
+
+ /* close DS id */
+ if (H5Dclose(dsid) < 0)
+ goto out;
+
+ /*-------------------------------------------------------------------------
+ * attach the DS_2_NAME dimension scale to DSET_NAME
+ *-------------------------------------------------------------------------
+ */
+
+ /* get the DS dataset id */
+ if ((dsid = H5Dopen2(fid, DS_2_NAME, H5P_DEFAULT)) < 0)
+ goto out;
+
+ /* attach the DS_2_NAME dimension scale to DSET_NAME as the 2nd dimension (index 1) */
+ if (H5DSattach_scale(did, dsid, DIM1) < 0)
+ goto out;
+
+ /* close DS id */
+ if (H5Dclose(dsid) < 0)
+ goto out;
+
+ /* close file */
+ H5Fclose(fid);
+
+ return 0;
out:
- printf("Error on return function...Exiting\n");
- return 1;
-
+ printf("Error on return function...Exiting\n");
+ return 1;
}
-
-
-
diff --git a/hl/examples/ex_image1.c b/hl/examples/ex_image1.c
index 56a175d..ead1715 100644
--- a/hl/examples/ex_image1.c
+++ b/hl/examples/ex_image1.c
@@ -6,7 +6,7 @@
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
@@ -14,56 +14,55 @@
#include "hdf5.h"
#include "hdf5_hl.h"
-#define WIDTH 400
-#define HEIGHT 200
-#define PAL_ENTRIES 9
-unsigned char buf [ WIDTH*HEIGHT ];
+#define WIDTH 400
+#define HEIGHT 200
+#define PAL_ENTRIES 9
+unsigned char buf[WIDTH * HEIGHT];
-int main( void )
+int
+main(void)
{
- hid_t file_id;
- hsize_t pal_dims[] = {PAL_ENTRIES,3};
- size_t i, j;
- int n, space;
- unsigned char pal[PAL_ENTRIES*3] = { /* create a palette with 9 colors */
- 0,0,168, /* dark blue */
- 0,0,252, /* blue */
- 0,168,252, /* ocean blue */
- 84,252,252, /* light blue */
- 168,252,168, /* light green */
- 0,252,168, /* green */
- 252,252,84, /* yellow */
- 252,168,0, /* orange */
- 252,0,0}; /* red */
+ hid_t file_id;
+ hsize_t pal_dims[] = {PAL_ENTRIES, 3};
+ size_t i, j;
+ int n, space;
+ unsigned char pal[PAL_ENTRIES * 3] = { /* create a palette with 9 colors */
+ 0, 0, 168, /* dark blue */
+ 0, 0, 252, /* blue */
+ 0, 168, 252, /* ocean blue */
+ 84, 252, 252, /* light blue */
+ 168, 252, 168, /* light green */
+ 0, 252, 168, /* green */
+ 252, 252, 84, /* yellow */
+ 252, 168, 0, /* orange */
+ 252, 0, 0}; /* red */
- /* create an image of 9 values divided evenly by the array */
- space = WIDTH*HEIGHT / PAL_ENTRIES;
- for (i=0, j=0, n=0; i < WIDTH*HEIGHT; i++, j++ )
- {
- buf[i] = n;
- if ( j > space )
- {
- n++;
- j=0;
- }
- if (n>PAL_ENTRIES-1) n=0;
- }
+ /* create an image of 9 values divided evenly by the array */
+ space = WIDTH * HEIGHT / PAL_ENTRIES;
+ for (i = 0, j = 0, n = 0; i < WIDTH * HEIGHT; i++, j++) {
+ buf[i] = n;
+ if (j > space) {
+ n++;
+ j = 0;
+ }
+ if (n > PAL_ENTRIES - 1)
+ n = 0;
+ }
- /* create a new HDF5 file using default properties. */
- file_id = H5Fcreate( "ex_image1.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
+ /* create a new HDF5 file using default properties. */
+ file_id = H5Fcreate("ex_image1.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
- /* make the image */
- H5IMmake_image_8bit( file_id, "image1", (hsize_t)WIDTH, (hsize_t)HEIGHT, buf );
+ /* make the image */
+ H5IMmake_image_8bit(file_id, "image1", (hsize_t)WIDTH, (hsize_t)HEIGHT, buf);
- /* make a palette */
- H5IMmake_palette( file_id, "pallete", pal_dims, pal );
+ /* make a palette */
+ H5IMmake_palette(file_id, "pallete", pal_dims, pal);
- /* attach the palette to the image */
- H5IMlink_palette( file_id, "image1", "pallete" );
+ /* attach the palette to the image */
+ H5IMlink_palette(file_id, "image1", "pallete");
- /* close the file. */
- H5Fclose( file_id );
-
- return 0;
+ /* close the file. */
+ H5Fclose(file_id);
+ return 0;
}
diff --git a/hl/examples/ex_image2.c b/hl/examples/ex_image2.c
index 5abf723..be9fc4b 100644
--- a/hl/examples/ex_image2.c
+++ b/hl/examples/ex_image2.c
@@ -6,7 +6,7 @@
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
@@ -16,90 +16,89 @@
#include <stdlib.h>
#include <string.h>
-#define DATA_FILE1 "image8.txt"
-#define DATA_FILE2 "image24pixel.txt"
-#define IMAGE1_NAME "image8bit"
-#define IMAGE2_NAME "image24bitpixel"
-#define PAL_NAME "palette"
-#define PAL_ENTRIES 256
+#define DATA_FILE1 "image8.txt"
+#define DATA_FILE2 "image24pixel.txt"
+#define IMAGE1_NAME "image8bit"
+#define IMAGE2_NAME "image24bitpixel"
+#define PAL_NAME "palette"
+#define PAL_ENTRIES 256
-static int read_data(const char* file_name, hsize_t *width, hsize_t *height );
-unsigned char *gbuf = NULL; /* global buffer for image data */
+static int read_data(const char *file_name, hsize_t *width, hsize_t *height);
+unsigned char *gbuf = NULL; /* global buffer for image data */
-int main( void )
+int
+main(void)
{
- hid_t file_id; /* HDF5 file identifier */
- hsize_t width; /* width of image */
- hsize_t height; /* height of image */
- unsigned char pal[ PAL_ENTRIES * 3 ]; /* palette array */
- hsize_t pal_dims[2] = {PAL_ENTRIES,3}; /* palette dimensions */
- herr_t i, n;
-
- /* create a new HDF5 file using default properties. */
- file_id = H5Fcreate( "ex_image2.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
-
- /* read first data file */
- if (read_data(DATA_FILE1,&width,&height)<0)
- goto out;
-
- /* make the image */
- H5IMmake_image_8bit( file_id, IMAGE1_NAME, width, height, gbuf );
- if (gbuf) {
- free(gbuf);
- gbuf = NULL;
- }
-
-/*-------------------------------------------------------------------------
- * define a palette, blue to red tones
- *-------------------------------------------------------------------------
- */
- for ( i=0, n=0; i<PAL_ENTRIES*3; i+=3, n++)
- {
- pal[i] =n; /* red */
- pal[i+1]=0; /* green */
- pal[i+2]=255-n; /* blue */
- }
-
- /* make a palette */
- H5IMmake_palette( file_id, PAL_NAME, pal_dims, pal );
-
- /* attach the palette to the image */
- H5IMlink_palette( file_id, IMAGE1_NAME, PAL_NAME );
-
-/*-------------------------------------------------------------------------
- * True color image example with pixel interlace
- *-------------------------------------------------------------------------
- */
-
- /* read second data file */
- if (read_data(DATA_FILE2,&width,&height)<0)
- goto out;
-
- /* make dataset */
- H5IMmake_image_24bit( file_id, IMAGE2_NAME, width, height, "INTERLACE_PIXEL", gbuf );
-
- /* close the file. */
- H5Fclose( file_id );
-
- if(gbuf) {
- free(gbuf);
- gbuf = NULL;
- }
-
- return 0;
+ hid_t file_id; /* HDF5 file identifier */
+ hsize_t width; /* width of image */
+ hsize_t height; /* height of image */
+ unsigned char pal[PAL_ENTRIES * 3]; /* palette array */
+ hsize_t pal_dims[2] = {PAL_ENTRIES, 3}; /* palette dimensions */
+ herr_t i, n;
+
+ /* create a new HDF5 file using default properties. */
+ file_id = H5Fcreate("ex_image2.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* read first data file */
+ if (read_data(DATA_FILE1, &width, &height) < 0)
+ goto out;
+
+ /* make the image */
+ H5IMmake_image_8bit(file_id, IMAGE1_NAME, width, height, gbuf);
+ if (gbuf) {
+ free(gbuf);
+ gbuf = NULL;
+ }
+
+ /*-------------------------------------------------------------------------
+ * define a palette, blue to red tones
+ *-------------------------------------------------------------------------
+ */
+ for (i = 0, n = 0; i < PAL_ENTRIES * 3; i += 3, n++) {
+ pal[i] = n; /* red */
+ pal[i + 1] = 0; /* green */
+ pal[i + 2] = 255 - n; /* blue */
+ }
+
+ /* make a palette */
+ H5IMmake_palette(file_id, PAL_NAME, pal_dims, pal);
+
+ /* attach the palette to the image */
+ H5IMlink_palette(file_id, IMAGE1_NAME, PAL_NAME);
+
+ /*-------------------------------------------------------------------------
+ * True color image example with pixel interlace
+ *-------------------------------------------------------------------------
+ */
+
+ /* read second data file */
+ if (read_data(DATA_FILE2, &width, &height) < 0)
+ goto out;
+
+ /* make dataset */
+ H5IMmake_image_24bit(file_id, IMAGE2_NAME, width, height, "INTERLACE_PIXEL", gbuf);
+
+ /* close the file. */
+ H5Fclose(file_id);
+
+ if (gbuf) {
+ free(gbuf);
+ gbuf = NULL;
+ }
+
+ return 0;
out:
- printf("Error on return function...Exiting\n");
+ printf("Error on return function...Exiting\n");
- if(gbuf) {
- free(gbuf);
- gbuf = NULL;
- }
+ if (gbuf) {
+ free(gbuf);
+ gbuf = NULL;
+ }
- return 1;
+ return 1;
}
-
/*-------------------------------------------------------------------------
* read_data
* utility function to read ASCII image data
@@ -117,68 +116,63 @@ out:
*-------------------------------------------------------------------------
*/
-static int read_data( const char* fname, /*IN*/
- hsize_t *width, /*OUT*/
- hsize_t *height /*OUT*/ )
+static int
+read_data(const char *fname, /*IN*/
+ hsize_t * width, /*OUT*/
+ hsize_t * height /*OUT*/)
{
- int i, n;
- int color_planes;
- char str[20];
- FILE *f;
- int w, h;
- char *srcdir = getenv("srcdir"); /* the source directory */
- char data_file[512]=""; /* buffer to hold name of existing data file */
-
-/*-------------------------------------------------------------------------
- * compose the name of the file to open, using "srcdir", if appropriate
- *-------------------------------------------------------------------------
- */
- strcpy(data_file, "");
- if (srcdir)
- {
- strcpy(data_file, srcdir);
- strcat(data_file, "/");
- }
- strcat(data_file,fname);
-
-/*-------------------------------------------------------------------------
- * read
- *-------------------------------------------------------------------------
- */
-
- f = fopen(data_file, "r");
- if ( f == NULL )
- {
- printf( "Could not open file %s. Try set $srcdir \n", data_file );
- return -1;
- }
-
- fscanf( f, "%s", str );
- fscanf( f, "%d", &color_planes );
- fscanf( f, "%s", str );
- fscanf( f, "%d", &h);
- fscanf( f, "%s", str );
- fscanf( f, "%d", &w);
-
- *width = (hsize_t)w;
- *height = (hsize_t)h;
-
- if ( gbuf )
- {
- free( gbuf );
- gbuf=NULL;
- }
-
- gbuf = (unsigned char*) malloc (w * h * color_planes * sizeof( unsigned char ));
-
- for (i = 0; i < h * w * color_planes ; i++)
- {
- fscanf( f, "%d",&n );
- gbuf[i] = (unsigned char)n;
- }
- fclose(f);
-
- return 1;
-
+ int i, n;
+ int color_planes;
+ char str[20];
+ FILE *f;
+ int w, h;
+ char *srcdir = getenv("srcdir"); /* the source directory */
+ char data_file[512] = ""; /* buffer to hold name of existing data file */
+
+ /*-------------------------------------------------------------------------
+ * compose the name of the file to open, using "srcdir", if appropriate
+ *-------------------------------------------------------------------------
+ */
+ strcpy(data_file, "");
+ if (srcdir) {
+ strcpy(data_file, srcdir);
+ strcat(data_file, "/");
+ }
+ strcat(data_file, fname);
+
+ /*-------------------------------------------------------------------------
+ * read
+ *-------------------------------------------------------------------------
+ */
+
+ f = fopen(data_file, "r");
+ if (f == NULL) {
+ printf("Could not open file %s. Try set $srcdir \n", data_file);
+ return -1;
+ }
+
+ fscanf(f, "%s", str);
+ fscanf(f, "%d", &color_planes);
+ fscanf(f, "%s", str);
+ fscanf(f, "%d", &h);
+ fscanf(f, "%s", str);
+ fscanf(f, "%d", &w);
+
+ *width = (hsize_t)w;
+ *height = (hsize_t)h;
+
+ if (gbuf) {
+ free(gbuf);
+ gbuf = NULL;
+ }
+
+ gbuf = (unsigned char *)malloc(w * h * color_planes * sizeof(unsigned char));
+
+ for (i = 0; i < h * w * color_planes; i++) {
+ fscanf(f, "%d", &n);
+ gbuf[i] = (unsigned char)n;
+ }
+ fclose(f);
+
+ return 1;
}
-
diff --git a/hl/examples/ex_lite1.c b/hl/examples/ex_lite1.c
index 89f60dc..b28be70 100644
--- a/hl/examples/ex_lite1.c
+++ b/hl/examples/ex_lite1.c
@@ -6,34 +6,31 @@
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
#include "hdf5.h"
#include "hdf5_hl.h"
#define RANK 2
-
-int main( void )
+int
+main(void)
{
- hid_t file_id;
- hsize_t dims[RANK]={2,3};
- int data[6]={1,2,3,4,5,6};
+ hid_t file_id;
+ hsize_t dims[RANK] = {2, 3};
+ int data[6] = {1, 2, 3, 4, 5, 6};
- /* create a HDF5 file */
- file_id = H5Fcreate ("ex_lite1.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ /* create a HDF5 file */
+ file_id = H5Fcreate("ex_lite1.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
- /* create and write an integer type dataset named "dset" */
- H5LTmake_dataset(file_id,"/dset",RANK,dims,H5T_NATIVE_INT,data);
+ /* create and write an integer type dataset named "dset" */
+ H5LTmake_dataset(file_id, "/dset", RANK, dims, H5T_NATIVE_INT, data);
- /* close file */
- H5Fclose (file_id);
+ /* close file */
+ H5Fclose(file_id);
- return 0;
+ return 0;
}
-
-
diff --git a/hl/examples/ex_lite2.c b/hl/examples/ex_lite2.c
index 261fc73..a0c2a65 100644
--- a/hl/examples/ex_lite2.c
+++ b/hl/examples/ex_lite2.c
@@ -6,7 +6,7 @@
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
@@ -14,38 +14,34 @@
#include "hdf5.h"
#include "hdf5_hl.h"
-int main( void )
+int
+main(void)
{
- hid_t file_id;
- int data[6];
- hsize_t dims[2];
- size_t i, j, nrow, n_values;
+ hid_t file_id;
+ int data[6];
+ hsize_t dims[2];
+ size_t i, j, nrow, n_values;
- /* open file from ex_lite1.c */
- file_id = H5Fopen ("ex_lite1.h5", H5F_ACC_RDONLY, H5P_DEFAULT);
+ /* open file from ex_lite1.c */
+ file_id = H5Fopen("ex_lite1.h5", H5F_ACC_RDONLY, H5P_DEFAULT);
- /* read dataset */
- H5LTread_dataset_int(file_id,"/dset",data);
+ /* read dataset */
+ H5LTread_dataset_int(file_id, "/dset", data);
- /* get the dimensions of the dataset */
- H5LTget_dataset_info(file_id,"/dset",dims,NULL,NULL);
+ /* get the dimensions of the dataset */
+ H5LTget_dataset_info(file_id, "/dset", dims, NULL, NULL);
- /* print it by rows */
- n_values = (size_t)(dims[0] * dims[1]);
- nrow = (size_t)dims[1];
- for (i=0; i<n_values/nrow; i++ )
- {
- for (j=0; j<nrow; j++)
- printf (" %d", data[i*nrow + j]);
- printf ("\n");
- }
-
- /* close file */
- H5Fclose (file_id);
-
- return 0;
+ /* print it by rows */
+ n_values = (size_t)(dims[0] * dims[1]);
+ nrow = (size_t)dims[1];
+ for (i = 0; i < n_values / nrow; i++) {
+ for (j = 0; j < nrow; j++)
+ printf(" %d", data[i * nrow + j]);
+ printf("\n");
+ }
+ /* close file */
+ H5Fclose(file_id);
+ return 0;
}
-
-
diff --git a/hl/examples/ex_lite3.c b/hl/examples/ex_lite3.c
index 420cbcb..43291b7 100644
--- a/hl/examples/ex_lite3.c
+++ b/hl/examples/ex_lite3.c
@@ -6,7 +6,7 @@
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
@@ -17,51 +17,51 @@
#define ATTR_SIZE 5
-int main( void )
+int
+main(void)
{
- hid_t file_id;
- hid_t dset_id;
- hid_t space_id;
- hsize_t dims[1] = { ATTR_SIZE };
- int data[ATTR_SIZE] = {1,2,3,4,5};
- int i;
+ hid_t file_id;
+ hid_t dset_id;
+ hid_t space_id;
+ hsize_t dims[1] = {ATTR_SIZE};
+ int data[ATTR_SIZE] = {1, 2, 3, 4, 5};
+ int i;
- /* create a file */
- file_id = H5Fcreate("ex_lite3.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ /* create a file */
+ file_id = H5Fcreate("ex_lite3.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
- /* create a data space */
- space_id = H5Screate_simple(1, dims, NULL);
+ /* create a data space */
+ space_id = H5Screate_simple(1, dims, NULL);
- /* create a dataset named "dset" */
- dset_id = H5Dcreate2(file_id, "dset", H5T_NATIVE_INT, space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ /* create a dataset named "dset" */
+ dset_id = H5Dcreate2(file_id, "dset", H5T_NATIVE_INT, space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
- /* close */
- H5Dclose(dset_id);
- H5Sclose(space_id);
+ /* close */
+ H5Dclose(dset_id);
+ H5Sclose(space_id);
-/*-------------------------------------------------------------------------
- * example of H5LTset_attribute_int
- *-------------------------------------------------------------------------
- */
+ /*-------------------------------------------------------------------------
+ * example of H5LTset_attribute_int
+ *-------------------------------------------------------------------------
+ */
- /* create and write the attribute "attr1" on the dataset "dset" */
- H5LTset_attribute_int(file_id, "dset", "attr1", data, ATTR_SIZE);
+ /* create and write the attribute "attr1" on the dataset "dset" */
+ H5LTset_attribute_int(file_id, "dset", "attr1", data, ATTR_SIZE);
-/*-------------------------------------------------------------------------
- * example of H5LTget_attribute_int
- *-------------------------------------------------------------------------
- */
+ /*-------------------------------------------------------------------------
+ * example of H5LTget_attribute_int
+ *-------------------------------------------------------------------------
+ */
- /* get the attribute "attr1" from the dataset "dset" */
- H5LTget_attribute_int(file_id, "dset", "attr1", data);
+ /* get the attribute "attr1" from the dataset "dset" */
+ H5LTget_attribute_int(file_id, "dset", "attr1", data);
- for(i = 0; i < ATTR_SIZE; i++ )
- printf(" %d", data[i]);
- printf("\n");
+ for (i = 0; i < ATTR_SIZE; i++)
+ printf(" %d", data[i]);
+ printf("\n");
- /* close file */
- H5Fclose(file_id);
+ /* close file */
+ H5Fclose(file_id);
- return 0;
+ return 0;
}
-
diff --git a/hl/examples/ex_table_01.c b/hl/examples/ex_table_01.c
index f1d0266..8635acf 100644
--- a/hl/examples/ex_table_01.c
+++ b/hl/examples/ex_table_01.c
@@ -6,7 +6,7 @@
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
@@ -24,112 +24,91 @@
*-------------------------------------------------------------------------
*/
-#define NFIELDS (hsize_t) 5
-#define NRECORDS (hsize_t) 8
+#define NFIELDS (hsize_t)5
+#define NRECORDS (hsize_t)8
#define TABLE_NAME "table"
-
-int main( void )
+int
+main(void)
{
- typedef struct Particle
- {
- char name[16];
- int lati;
- int longi;
- float pressure;
- double temperature;
- } Particle;
-
- Particle dst_buf[NRECORDS];
-
- /* Calculate the size and the offsets of our struct members in memory */
- size_t dst_size = sizeof( Particle );
- size_t dst_offset[NFIELDS] = { HOFFSET( Particle, name ),
- HOFFSET( Particle, lati ),
- HOFFSET( Particle, longi ),
- HOFFSET( Particle, pressure ),
- HOFFSET( Particle, temperature )};
-
- size_t dst_sizes[NFIELDS] = { sizeof( dst_buf[0].name),
- sizeof( dst_buf[0].lati),
- sizeof( dst_buf[0].longi),
- sizeof( dst_buf[0].pressure),
- sizeof( dst_buf[0].temperature)};
-
-
- /* Define an array of Particles */
- Particle p_data[NRECORDS] = {
- {"zero",0,0, 0.0f, 0.0},
- {"one",10,10, 1.0f, 10.0},
- {"two", 20,20, 2.0f, 20.0},
- {"three",30,30, 3.0f, 30.0},
- {"four", 40,40, 4.0f, 40.0},
- {"five", 50,50, 5.0f, 50.0},
- {"six", 60,60, 6.0f, 60.0},
- {"seven",70,70, 7.0f, 70.0}
- };
-
- /* Define field information */
- const char *field_names[NFIELDS] =
- { "Name","Latitude", "Longitude", "Pressure", "Temperature" };
- hid_t field_type[NFIELDS];
- hid_t string_type;
- hid_t file_id;
- hsize_t chunk_size = 10;
- int *fill_data = NULL;
- int compress = 0;
- int i;
-
- /* Initialize field_type */
- string_type = H5Tcopy( H5T_C_S1 );
- H5Tset_size( string_type, 16 );
- field_type[0] = string_type;
- field_type[1] = H5T_NATIVE_INT;
- field_type[2] = H5T_NATIVE_INT;
- field_type[3] = H5T_NATIVE_FLOAT;
- field_type[4] = H5T_NATIVE_DOUBLE;
-
- /* Create a new file using default properties. */
- file_id = H5Fcreate( "ex_table_01.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
-
-/*-------------------------------------------------------------------------
- * H5TBmake_table
- *-------------------------------------------------------------------------
- */
-
- H5TBmake_table( "Table Title", file_id, TABLE_NAME,NFIELDS,NRECORDS,
- dst_size,field_names, dst_offset, field_type,
- chunk_size, fill_data, compress, p_data );
-
-/*-------------------------------------------------------------------------
- * H5TBread_table
- *-------------------------------------------------------------------------
- */
-
- H5TBread_table( file_id, TABLE_NAME, dst_size, dst_offset, dst_sizes, dst_buf );
-
- /* print it by rows */
- for (i=0; i<NRECORDS; i++) {
- printf ("%-5s %-5d %-5d %-5f %-5f",
- dst_buf[i].name,
- dst_buf[i].lati,
- dst_buf[i].longi,
- dst_buf[i].pressure,
- dst_buf[i].temperature);
- printf ("\n");
- }
-
-/*-------------------------------------------------------------------------
- * end
- *-------------------------------------------------------------------------
- */
-
- /* close type */
- H5Tclose( string_type );
-
- /* close the file */
- H5Fclose( file_id );
-
- return 0;
+ typedef struct Particle {
+ char name[16];
+ int lati;
+ int longi;
+ float pressure;
+ double temperature;
+ } Particle;
+
+ Particle dst_buf[NRECORDS];
+
+ /* Calculate the size and the offsets of our struct members in memory */
+ size_t dst_size = sizeof(Particle);
+ size_t dst_offset[NFIELDS] = {HOFFSET(Particle, name), HOFFSET(Particle, lati), HOFFSET(Particle, longi),
+ HOFFSET(Particle, pressure), HOFFSET(Particle, temperature)};
+
+ size_t dst_sizes[NFIELDS] = {sizeof(dst_buf[0].name), sizeof(dst_buf[0].lati), sizeof(dst_buf[0].longi),
+ sizeof(dst_buf[0].pressure), sizeof(dst_buf[0].temperature)};
+
+ /* Define an array of Particles */
+ Particle p_data[NRECORDS] = {{"zero", 0, 0, 0.0f, 0.0}, {"one", 10, 10, 1.0f, 10.0},
+ {"two", 20, 20, 2.0f, 20.0}, {"three", 30, 30, 3.0f, 30.0},
+ {"four", 40, 40, 4.0f, 40.0}, {"five", 50, 50, 5.0f, 50.0},
+ {"six", 60, 60, 6.0f, 60.0}, {"seven", 70, 70, 7.0f, 70.0}};
+
+ /* Define field information */
+ const char *field_names[NFIELDS] = {"Name", "Latitude", "Longitude", "Pressure", "Temperature"};
+ hid_t field_type[NFIELDS];
+ hid_t string_type;
+ hid_t file_id;
+ hsize_t chunk_size = 10;
+ int * fill_data = NULL;
+ int compress = 0;
+ int i;
+
+ /* Initialize field_type */
+ string_type = H5Tcopy(H5T_C_S1);
+ H5Tset_size(string_type, 16);
+ field_type[0] = string_type;
+ field_type[1] = H5T_NATIVE_INT;
+ field_type[2] = H5T_NATIVE_INT;
+ field_type[3] = H5T_NATIVE_FLOAT;
+ field_type[4] = H5T_NATIVE_DOUBLE;
+
+ /* Create a new file using default properties. */
+ file_id = H5Fcreate("ex_table_01.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /*-------------------------------------------------------------------------
+ * H5TBmake_table
+ *-------------------------------------------------------------------------
+ */
+
+ H5TBmake_table("Table Title", file_id, TABLE_NAME, NFIELDS, NRECORDS, dst_size, field_names, dst_offset,
+ field_type, chunk_size, fill_data, compress, p_data);
+
+ /*-------------------------------------------------------------------------
+ * H5TBread_table
+ *-------------------------------------------------------------------------
+ */
+
+ H5TBread_table(file_id, TABLE_NAME, dst_size, dst_offset, dst_sizes, dst_buf);
+
+ /* print it by rows */
+ for (i = 0; i < NRECORDS; i++) {
+ printf("%-5s %-5d %-5d %-5f %-5f", dst_buf[i].name, dst_buf[i].lati, dst_buf[i].longi,
+ dst_buf[i].pressure, dst_buf[i].temperature);
+ printf("\n");
+ }
+
+ /*-------------------------------------------------------------------------
+ * end
+ *-------------------------------------------------------------------------
+ */
+
+ /* close type */
+ H5Tclose(string_type);
+
+ /* close the file */
+ H5Fclose(file_id);
+
+ return 0;
}
-
diff --git a/hl/examples/ex_table_02.c b/hl/examples/ex_table_02.c
index 923f810..9c476b3 100644
--- a/hl/examples/ex_table_02.c
+++ b/hl/examples/ex_table_02.c
@@ -6,7 +6,7 @@
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
@@ -23,107 +23,85 @@
*-------------------------------------------------------------------------
*/
-#define NFIELDS (hsize_t) 5
-#define NRECORDS (hsize_t) 8
-#define NRECORDS_ADD (hsize_t) 2
-#define TABLE_NAME "table"
+#define NFIELDS (hsize_t)5
+#define NRECORDS (hsize_t)8
+#define NRECORDS_ADD (hsize_t)2
+#define TABLE_NAME "table"
-int main( void )
+int
+main(void)
{
- typedef struct Particle
- {
- char name[16];
- int lati;
- int longi;
- float pressure;
- double temperature;
- } Particle;
-
- Particle dst_buf[NRECORDS+NRECORDS_ADD];
-
-/* Define an array of Particles */
- Particle p_data[NRECORDS] = {
- {"zero",0,0, 0.0f, 0.0},
- {"one",10,10, 1.0f, 10.0},
- {"two", 20,20, 2.0f, 20.0},
- {"three",30,30, 3.0f, 30.0},
- {"four", 40,40, 4.0f, 40.0},
- {"five", 50,50, 5.0f, 50.0},
- {"six", 60,60, 6.0f, 60.0},
- {"seven",70,70, 7.0f, 70.0}
- };
-
- /* Calculate the size and the offsets of our struct members in memory */
- size_t dst_size = sizeof( Particle );
- size_t dst_offset[NFIELDS] = { HOFFSET( Particle, name ),
- HOFFSET( Particle, lati ),
- HOFFSET( Particle, longi ),
- HOFFSET( Particle, pressure ),
- HOFFSET( Particle, temperature )};
-
- size_t dst_sizes[NFIELDS] = { sizeof( p_data[0].name),
- sizeof( p_data[0].lati),
- sizeof( p_data[0].longi),
- sizeof( p_data[0].pressure),
- sizeof( p_data[0].temperature)};
-
- /* Define field information */
- const char *field_names[NFIELDS] =
- { "Name","Latitude", "Longitude", "Pressure", "Temperature" };
- hid_t field_type[NFIELDS];
- hid_t string_type;
- hid_t file_id;
- hsize_t chunk_size = 10;
- int *fill_data = NULL;
- int compress = 0;
- int i;
-
- /* Append particles */
- Particle particle_in[ NRECORDS_ADD ] =
- {{ "eight",80,80, 8.0f, 80.0},
- {"nine",90,90, 9.0f, 90.0} };
-
- /* Initialize the field field_type */
- string_type = H5Tcopy( H5T_C_S1 );
- H5Tset_size( string_type, 16 );
- field_type[0] = string_type;
- field_type[1] = H5T_NATIVE_INT;
- field_type[2] = H5T_NATIVE_INT;
- field_type[3] = H5T_NATIVE_FLOAT;
- field_type[4] = H5T_NATIVE_DOUBLE;
-
- /* Create a new file using default properties. */
- file_id = H5Fcreate( "ex_table_02.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
-
- /* make a table */
- H5TBmake_table( "Table Title",file_id,TABLE_NAME,NFIELDS,NRECORDS,
- dst_size, field_names, dst_offset, field_type,
- chunk_size, fill_data, compress, p_data );
-
- /* append two records */
- H5TBappend_records(file_id, TABLE_NAME,NRECORDS_ADD, dst_size, dst_offset, dst_sizes,
- &particle_in );
-
- /* read the table */
- H5TBread_table( file_id, TABLE_NAME, dst_size, dst_offset, dst_sizes, dst_buf );
-
- /* print it by rows */
- for (i=0; i<NRECORDS+NRECORDS_ADD; i++) {
- printf ("%-5s %-5d %-5d %-5f %-5f",
- dst_buf[i].name,
- dst_buf[i].lati,
- dst_buf[i].longi,
- dst_buf[i].pressure,
- dst_buf[i].temperature);
- printf ("\n");
- }
-
- /* close type */
- H5Tclose( string_type );
-
- /* close the file */
- H5Fclose( file_id );
-
- return 0;
+ typedef struct Particle {
+ char name[16];
+ int lati;
+ int longi;
+ float pressure;
+ double temperature;
+ } Particle;
+
+ Particle dst_buf[NRECORDS + NRECORDS_ADD];
+
+ /* Define an array of Particles */
+ Particle p_data[NRECORDS] = {{"zero", 0, 0, 0.0f, 0.0}, {"one", 10, 10, 1.0f, 10.0},
+ {"two", 20, 20, 2.0f, 20.0}, {"three", 30, 30, 3.0f, 30.0},
+ {"four", 40, 40, 4.0f, 40.0}, {"five", 50, 50, 5.0f, 50.0},
+ {"six", 60, 60, 6.0f, 60.0}, {"seven", 70, 70, 7.0f, 70.0}};
+
+ /* Calculate the size and the offsets of our struct members in memory */
+ size_t dst_size = sizeof(Particle);
+ size_t dst_offset[NFIELDS] = {HOFFSET(Particle, name), HOFFSET(Particle, lati), HOFFSET(Particle, longi),
+ HOFFSET(Particle, pressure), HOFFSET(Particle, temperature)};
+
+ size_t dst_sizes[NFIELDS] = {sizeof(p_data[0].name), sizeof(p_data[0].lati), sizeof(p_data[0].longi),
+ sizeof(p_data[0].pressure), sizeof(p_data[0].temperature)};
+
+ /* Define field information */
+ const char *field_names[NFIELDS] = {"Name", "Latitude", "Longitude", "Pressure", "Temperature"};
+ hid_t field_type[NFIELDS];
+ hid_t string_type;
+ hid_t file_id;
+ hsize_t chunk_size = 10;
+ int * fill_data = NULL;
+ int compress = 0;
+ int i;
+
+ /* Append particles */
+ Particle particle_in[NRECORDS_ADD] = {{"eight", 80, 80, 8.0f, 80.0}, {"nine", 90, 90, 9.0f, 90.0}};
+
+ /* Initialize the field field_type */
+ string_type = H5Tcopy(H5T_C_S1);
+ H5Tset_size(string_type, 16);
+ field_type[0] = string_type;
+ field_type[1] = H5T_NATIVE_INT;
+ field_type[2] = H5T_NATIVE_INT;
+ field_type[3] = H5T_NATIVE_FLOAT;
+ field_type[4] = H5T_NATIVE_DOUBLE;
+
+ /* Create a new file using default properties. */
+ file_id = H5Fcreate("ex_table_02.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* make a table */
+ H5TBmake_table("Table Title", file_id, TABLE_NAME, NFIELDS, NRECORDS, dst_size, field_names, dst_offset,
+ field_type, chunk_size, fill_data, compress, p_data);
+
+ /* append two records */
+ H5TBappend_records(file_id, TABLE_NAME, NRECORDS_ADD, dst_size, dst_offset, dst_sizes, &particle_in);
+
+ /* read the table */
+ H5TBread_table(file_id, TABLE_NAME, dst_size, dst_offset, dst_sizes, dst_buf);
+
+ /* print it by rows */
+ for (i = 0; i < NRECORDS + NRECORDS_ADD; i++) {
+ printf("%-5s %-5d %-5d %-5f %-5f", dst_buf[i].name, dst_buf[i].lati, dst_buf[i].longi,
+ dst_buf[i].pressure, dst_buf[i].temperature);
+ printf("\n");
+ }
+
+ /* close type */
+ H5Tclose(string_type);
+
+ /* close the file */
+ H5Fclose(file_id);
+
+ return 0;
}
-
diff --git a/hl/examples/ex_table_03.c b/hl/examples/ex_table_03.c
index 76a9eae..31cf970 100644
--- a/hl/examples/ex_table_03.c
+++ b/hl/examples/ex_table_03.c
@@ -6,12 +6,11 @@
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
#include "hdf5.h"
#include "hdf5_hl.h"
#include <stdlib.h>
@@ -24,113 +23,85 @@
*-------------------------------------------------------------------------
*/
-#define NFIELDS (hsize_t) 5
-#define NRECORDS (hsize_t) 8
-#define NRECORDS_WRITE (hsize_t) 2
-#define TABLE_NAME "table"
+#define NFIELDS (hsize_t)5
+#define NRECORDS (hsize_t)8
+#define NRECORDS_WRITE (hsize_t)2
+#define TABLE_NAME "table"
-int main( void )
+int
+main(void)
{
- typedef struct Particle
- {
- char name[16];
- int lati;
- int longi;
- float pressure;
- double temperature;
- } Particle;
-
- Particle dst_buf[NRECORDS];
-
- /* Calculate the size and the offsets of our struct members in memory */
- size_t dst_size = sizeof( Particle );
- size_t dst_offset[NFIELDS] = { HOFFSET( Particle, name ),
- HOFFSET( Particle, lati ),
- HOFFSET( Particle, longi ),
- HOFFSET( Particle, pressure ),
- HOFFSET( Particle, temperature )};
-
- Particle p = {"zero",0,0, 0.0f, 0.0};
- size_t dst_sizes[NFIELDS] = { sizeof( p.name),
- sizeof( p.lati),
- sizeof( p.longi),
- sizeof( p.pressure),
- sizeof( p.temperature)};
-
- /* Define field information */
- const char *field_names[NFIELDS] =
- { "Name","Latitude", "Longitude", "Pressure", "Temperature" };
- /* Fill value particle */
- Particle fill_data[1] =
- { {"no data",-1,-1, -99.0f, -99.0} };
- hid_t field_type[NFIELDS];
- hid_t string_type;
- hid_t file_id;
- hsize_t chunk_size = 10;
- hsize_t start; /* Record to start reading/writing */
- hsize_t nrecords; /* Number of records to read/write */
- int i;
-
- /* Define 2 new particles to write */
- Particle particle_in[NRECORDS_WRITE] =
- { {"zero",0,0, 0.0f, 0.0},
- {"one",10,10, 1.0f, 10.0} };
-
- /* Initialize the field field_type */
- string_type = H5Tcopy( H5T_C_S1 );
- H5Tset_size( string_type, 16 );
- field_type[0] = string_type;
- field_type[1] = H5T_NATIVE_INT;
- field_type[2] = H5T_NATIVE_INT;
- field_type[3] = H5T_NATIVE_FLOAT;
- field_type[4] = H5T_NATIVE_DOUBLE;
-
-/* Create a new file using default properties. */
- file_id = H5Fcreate( "ex_table_03.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
-
- /* Make the table */
- H5TBmake_table( "Table Title",
- file_id,
- TABLE_NAME,
- NFIELDS,
- NRECORDS,
- dst_size,
- field_names,
- dst_offset,
- field_type,
- chunk_size,
- fill_data,
- 0, /* no compression */
- NULL ); /* no data written */
-
-
- /* Overwrite 2 records starting at record 0 */
- start = 0;
- nrecords = NRECORDS_WRITE;
- H5TBwrite_records( file_id, TABLE_NAME, start, nrecords, dst_size, dst_offset,
- dst_sizes, particle_in);
-
- /* read the table */
- H5TBread_table( file_id, TABLE_NAME, dst_size, dst_offset, dst_sizes, dst_buf );
-
- /* print it by rows */
- for (i=0; i<NRECORDS; i++) {
- printf ("%-5s %-5d %-5d %-5f %-5f",
- dst_buf[i].name,
- dst_buf[i].lati,
- dst_buf[i].longi,
- dst_buf[i].pressure,
- dst_buf[i].temperature);
- printf ("\n");
- }
-
- /* close type */
- H5Tclose( string_type );
-
- /* close the file */
- H5Fclose( file_id );
-
- return 0;
-
+ typedef struct Particle {
+ char name[16];
+ int lati;
+ int longi;
+ float pressure;
+ double temperature;
+ } Particle;
+
+ Particle dst_buf[NRECORDS];
+
+ /* Calculate the size and the offsets of our struct members in memory */
+ size_t dst_size = sizeof(Particle);
+ size_t dst_offset[NFIELDS] = {HOFFSET(Particle, name), HOFFSET(Particle, lati), HOFFSET(Particle, longi),
+ HOFFSET(Particle, pressure), HOFFSET(Particle, temperature)};
+
+ Particle p = {"zero", 0, 0, 0.0f, 0.0};
+ size_t dst_sizes[NFIELDS] = {sizeof(p.name), sizeof(p.lati), sizeof(p.longi), sizeof(p.pressure),
+ sizeof(p.temperature)};
+
+ /* Define field information */
+ const char *field_names[NFIELDS] = {"Name", "Latitude", "Longitude", "Pressure", "Temperature"};
+ /* Fill value particle */
+ Particle fill_data[1] = {{"no data", -1, -1, -99.0f, -99.0}};
+ hid_t field_type[NFIELDS];
+ hid_t string_type;
+ hid_t file_id;
+ hsize_t chunk_size = 10;
+ hsize_t start; /* Record to start reading/writing */
+ hsize_t nrecords; /* Number of records to read/write */
+ int i;
+
+ /* Define 2 new particles to write */
+ Particle particle_in[NRECORDS_WRITE] = {{"zero", 0, 0, 0.0f, 0.0}, {"one", 10, 10, 1.0f, 10.0}};
+
+ /* Initialize the field field_type */
+ string_type = H5Tcopy(H5T_C_S1);
+ H5Tset_size(string_type, 16);
+ field_type[0] = string_type;
+ field_type[1] = H5T_NATIVE_INT;
+ field_type[2] = H5T_NATIVE_INT;
+ field_type[3] = H5T_NATIVE_FLOAT;
+ field_type[4] = H5T_NATIVE_DOUBLE;
+
+ /* Create a new file using default properties. */
+ file_id = H5Fcreate("ex_table_03.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* Make the table */
+ H5TBmake_table("Table Title", file_id, TABLE_NAME, NFIELDS, NRECORDS, dst_size, field_names, dst_offset,
+ field_type, chunk_size, fill_data, 0, /* no compression */
+ NULL); /* no data written */
+
+ /* Overwrite 2 records starting at record 0 */
+ start = 0;
+ nrecords = NRECORDS_WRITE;
+ H5TBwrite_records(file_id, TABLE_NAME, start, nrecords, dst_size, dst_offset, dst_sizes, particle_in);
+
+ /* read the table */
+ H5TBread_table(file_id, TABLE_NAME, dst_size, dst_offset, dst_sizes, dst_buf);
+
+ /* print it by rows */
+ for (i = 0; i < NRECORDS; i++) {
+ printf("%-5s %-5d %-5d %-5f %-5f", dst_buf[i].name, dst_buf[i].lati, dst_buf[i].longi,
+ dst_buf[i].pressure, dst_buf[i].temperature);
+ printf("\n");
+ }
+
+ /* close type */
+ H5Tclose(string_type);
+
+ /* close the file */
+ H5Fclose(file_id);
+
+ return 0;
}
-
diff --git a/hl/examples/ex_table_04.c b/hl/examples/ex_table_04.c
index 203114c..863fe15 100644
--- a/hl/examples/ex_table_04.c
+++ b/hl/examples/ex_table_04.c
@@ -6,7 +6,7 @@
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
@@ -23,141 +23,117 @@
*-------------------------------------------------------------------------
*/
-#define NFIELDS (hsize_t) 5
-#define NRECORDS (hsize_t) 8
-#define NRECORDS_ADD (hsize_t) 3
-#define TABLE_NAME "table"
+#define NFIELDS (hsize_t)5
+#define NRECORDS (hsize_t)8
+#define NRECORDS_ADD (hsize_t)3
+#define TABLE_NAME "table"
-int main( void )
+int
+main(void)
{
- typedef struct Particle
- {
- char name[16];
- int lati;
- int longi;
- float pressure;
- double temperature;
- } Particle;
-
- /* Define a subset of Particle, with latitude and longitude fields */
- typedef struct Position
- {
- int lati;
- int longi;
- } Position;
-
- /* Define a subset of Particle, with name and pressure fields */
- typedef struct NamePressure
- {
- char name[16];
- float pressure;
- } NamePressure;
-
- Particle dst_buf[NRECORDS];
- /* Calculate the size and the offsets of our struct members in memory */
- size_t dst_size = sizeof( Particle );
- size_t dst_offset[NFIELDS] = { HOFFSET( Particle, name ),
- HOFFSET( Particle, lati ),
- HOFFSET( Particle, longi ),
- HOFFSET( Particle, pressure ),
- HOFFSET( Particle, temperature )};
- size_t dst_sizes[NFIELDS] = { sizeof( dst_buf[0].name),
- sizeof( dst_buf[0].lati),
- sizeof( dst_buf[0].longi),
- sizeof( dst_buf[0].pressure),
- sizeof( dst_buf[0].temperature)};
- size_t field_offset_pos[2] = { HOFFSET( Position, lati ),
- HOFFSET( Position, longi )};
- const char *field_names[NFIELDS] = /* Define field information */
- { "Name","Latitude", "Longitude", "Pressure", "Temperature" };
- hid_t field_type[NFIELDS];
- hid_t string_type;
- hid_t file_id;
- hsize_t chunk_size = 10;
- Particle fill_data[1] =
- { {"no data",-1,-1, -99.0f, -99.0} }; /* Fill value particle */
- hsize_t start; /* Record to start reading/writing */
- hsize_t nrecords; /* Number of records to read/write */
- int compress = 0;
- int i;
- Particle *p_data = NULL; /* Initially no data */
- float pressure_in [NRECORDS_ADD] = /* Define new values for the field "Pressure" */
- { 0.0f,1.0f,2.0f};
- Position position_in[NRECORDS_ADD] = {/* Define new values for "Latitude,Longitude" */
- {0,0},
- {10,10},
- {20,20}};
- NamePressure namepre_in[NRECORDS_ADD] =/* Define new values for "Name,Pressure" */
- { {"zero",0.0f},
- {"one", 1.0f},
- {"two", 2.0f},
- };
- size_t field_sizes_pos[2]=
- {
- sizeof(position_in[0].longi),
- sizeof(position_in[0].lati)
- };
- size_t field_sizes_pre[1]=
- {
- sizeof(namepre_in[0].pressure)
- };
-
- /* Initialize the field field_type */
- string_type = H5Tcopy( H5T_C_S1 );
- H5Tset_size( string_type, 16 );
- field_type[0] = string_type;
- field_type[1] = H5T_NATIVE_INT;
- field_type[2] = H5T_NATIVE_INT;
- field_type[3] = H5T_NATIVE_FLOAT;
- field_type[4] = H5T_NATIVE_DOUBLE;
-
- /* Create a new file using default properties. */
- file_id = H5Fcreate( "ex_table_04.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
-
- /* Make the table */
- H5TBmake_table( "Table Title",file_id,TABLE_NAME,NFIELDS,NRECORDS,
- dst_size,field_names, dst_offset, field_type,
- chunk_size, fill_data, compress, p_data );
-
- /* Write the pressure field starting at record 2 */
- start = 2;
- nrecords = NRECORDS_ADD;
- H5TBwrite_fields_name( file_id, TABLE_NAME, "Pressure", start, nrecords,
- sizeof( float ), 0, field_sizes_pre, pressure_in );
-
- /* Write the new longitude and latitude information starting at record 2 */
- start = 2;
- nrecords = NRECORDS_ADD;
- H5TBwrite_fields_name( file_id, TABLE_NAME, "Latitude,Longitude", start, nrecords,
- sizeof( Position ), field_offset_pos, field_sizes_pos, position_in );
-
- /* read the table */
- H5TBread_table( file_id, TABLE_NAME, dst_size, dst_offset, dst_sizes, dst_buf );
-
- /* print it by rows */
- for (i=0; i<NRECORDS; i++) {
- printf ("%-5s %-5d %-5d %-5f %-5f",
- dst_buf[i].name,
- dst_buf[i].lati,
- dst_buf[i].longi,
- dst_buf[i].pressure,
- dst_buf[i].temperature);
- printf ("\n");
- }
-
-/*-------------------------------------------------------------------------
- * end
- *-------------------------------------------------------------------------
- */
-
- /* close type */
- H5Tclose( string_type );
-
- /* close the file */
- H5Fclose( file_id );
-
- return 0;
-
-
+ typedef struct Particle {
+ char name[16];
+ int lati;
+ int longi;
+ float pressure;
+ double temperature;
+ } Particle;
+
+ /* Define a subset of Particle, with latitude and longitude fields */
+ typedef struct Position {
+ int lati;
+ int longi;
+ } Position;
+
+ /* Define a subset of Particle, with name and pressure fields */
+ typedef struct NamePressure {
+ char name[16];
+ float pressure;
+ } NamePressure;
+
+ Particle dst_buf[NRECORDS];
+ /* Calculate the size and the offsets of our struct members in memory */
+ size_t dst_size = sizeof(Particle);
+ size_t dst_offset[NFIELDS] = {HOFFSET(Particle, name), HOFFSET(Particle, lati), HOFFSET(Particle, longi),
+ HOFFSET(Particle, pressure), HOFFSET(Particle, temperature)};
+ size_t dst_sizes[NFIELDS] = {sizeof(dst_buf[0].name), sizeof(dst_buf[0].lati), sizeof(dst_buf[0].longi),
+ sizeof(dst_buf[0].pressure), sizeof(dst_buf[0].temperature)};
+ size_t field_offset_pos[2] = {HOFFSET(Position, lati), HOFFSET(Position, longi)};
+ const char *field_names[NFIELDS] = /* Define field information */
+ {"Name", "Latitude", "Longitude", "Pressure", "Temperature"};
+ hid_t field_type[NFIELDS];
+ hid_t string_type;
+ hid_t file_id;
+ hsize_t chunk_size = 10;
+ Particle fill_data[1] = {{"no data", -1, -1, -99.0f, -99.0}}; /* Fill value particle */
+ hsize_t start; /* Record to start reading/writing */
+ hsize_t nrecords; /* Number of records to read/write */
+ int compress = 0;
+ int i;
+ Particle *p_data = NULL; /* Initially no data */
+ float pressure_in[NRECORDS_ADD] = /* Define new values for the field "Pressure" */
+ {0.0f, 1.0f, 2.0f};
+ Position position_in[NRECORDS_ADD] = {/* Define new values for "Latitude,Longitude" */
+ {0, 0},
+ {10, 10},
+ {20, 20}};
+ NamePressure namepre_in[NRECORDS_ADD] = /* Define new values for "Name,Pressure" */
+ {
+ {"zero", 0.0f},
+ {"one", 1.0f},
+ {"two", 2.0f},
+ };
+ size_t field_sizes_pos[2] = {sizeof(position_in[0].longi), sizeof(position_in[0].lati)};
+ size_t field_sizes_pre[1] = {sizeof(namepre_in[0].pressure)};
+
+ /* Initialize the field field_type */
+ string_type = H5Tcopy(H5T_C_S1);
+ H5Tset_size(string_type, 16);
+ field_type[0] = string_type;
+ field_type[1] = H5T_NATIVE_INT;
+ field_type[2] = H5T_NATIVE_INT;
+ field_type[3] = H5T_NATIVE_FLOAT;
+ field_type[4] = H5T_NATIVE_DOUBLE;
+
+ /* Create a new file using default properties. */
+ file_id = H5Fcreate("ex_table_04.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* Make the table */
+ H5TBmake_table("Table Title", file_id, TABLE_NAME, NFIELDS, NRECORDS, dst_size, field_names, dst_offset,
+ field_type, chunk_size, fill_data, compress, p_data);
+
+ /* Write the pressure field starting at record 2 */
+ start = 2;
+ nrecords = NRECORDS_ADD;
+ H5TBwrite_fields_name(file_id, TABLE_NAME, "Pressure", start, nrecords, sizeof(float), 0, field_sizes_pre,
+ pressure_in);
+
+ /* Write the new longitude and latitude information starting at record 2 */
+ start = 2;
+ nrecords = NRECORDS_ADD;
+ H5TBwrite_fields_name(file_id, TABLE_NAME, "Latitude,Longitude", start, nrecords, sizeof(Position),
+ field_offset_pos, field_sizes_pos, position_in);
+
+ /* read the table */
+ H5TBread_table(file_id, TABLE_NAME, dst_size, dst_offset, dst_sizes, dst_buf);
+
+ /* print it by rows */
+ for (i = 0; i < NRECORDS; i++) {
+ printf("%-5s %-5d %-5d %-5f %-5f", dst_buf[i].name, dst_buf[i].lati, dst_buf[i].longi,
+ dst_buf[i].pressure, dst_buf[i].temperature);
+ printf("\n");
+ }
+
+ /*-------------------------------------------------------------------------
+ * end
+ *-------------------------------------------------------------------------
+ */
+
+ /* close type */
+ H5Tclose(string_type);
+
+ /* close the file */
+ H5Fclose(file_id);
+
+ return 0;
}
-
diff --git a/hl/examples/ex_table_05.c b/hl/examples/ex_table_05.c
index b43d635..337bfb6 100644
--- a/hl/examples/ex_table_05.c
+++ b/hl/examples/ex_table_05.c
@@ -6,12 +6,11 @@
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
#include "hdf5.h"
#include "hdf5_hl.h"
#include <stdlib.h>
@@ -24,142 +23,111 @@
*-------------------------------------------------------------------------
*/
-#define NFIELDS (hsize_t) 5
-#define NRECORDS (hsize_t) 8
-#define NRECORDS_ADD (hsize_t) 3
-#define TABLE_NAME "table"
-
-
+#define NFIELDS (hsize_t)5
+#define NRECORDS (hsize_t)8
+#define NRECORDS_ADD (hsize_t)3
+#define TABLE_NAME "table"
-int main( void )
+int
+main(void)
{
- typedef struct Particle
- {
- char name[16];
- int lati;
- int longi;
- float pressure;
- double temperature;
- } Particle;
-
- /* Define a subset of Particle, with latitude and longitude fields */
- typedef struct Position
- {
- int lati;
- int longi;
- } Position;
-
- /* Calculate the type_size and the offsets of our struct members */
- Particle dst_buf[NRECORDS];
- size_t dst_size = sizeof( Particle );
- size_t dst_offset[NFIELDS] = { HOFFSET( Particle, name ),
- HOFFSET( Particle, lati ),
- HOFFSET( Particle, longi ),
- HOFFSET( Particle, pressure ),
- HOFFSET( Particle, temperature )};
- size_t dst_sizes[NFIELDS] = { sizeof( dst_buf[0].name),
- sizeof( dst_buf[0].lati),
- sizeof( dst_buf[0].longi),
- sizeof( dst_buf[0].pressure),
- sizeof( dst_buf[0].temperature)};
-
- size_t field_offset_pos[2] = { HOFFSET( Position, lati ),
- HOFFSET( Position, longi )};
-
- /* Initially no data */
- Particle *p_data = NULL;
-
- /* Define field information */
- const char *field_names[NFIELDS] =
- { "Name","Latitude", "Longitude", "Pressure", "Temperature" };
- hid_t field_type[NFIELDS];
- hid_t string_type;
- hid_t file_id;
- hsize_t chunk_size = 10;
- Particle fill_data[1] =
- { {"no data",-1,-1, -99.0f, -99.0} }; /* Fill value particle */
- int compress = 0;
- hsize_t nfields;
- hsize_t start; /* Record to start reading/writing */
- hsize_t nrecords; /* Number of records to read/write */
- int i;
-
- /* Define new values for the field "Pressure" */
- float pressure_in [NRECORDS_ADD] =
- { 0.0f,1.0f,2.0f};
- int field_index_pre[1] = { 3 };
- int field_index_pos[2] = { 1,2 };
-
- /* Define new values for the fields "Latitude,Longitude" */
- Position position_in[NRECORDS_ADD] = { {0,0},
- {10,10},
- {20,20} };
-
- size_t field_sizes_pos[2]=
- {
- sizeof(position_in[0].longi),
- sizeof(position_in[0].lati)
- };
-
- size_t field_sizes_pre[1]=
- {
- sizeof(float)
- };
-
- /* Initialize the field field_type */
- string_type = H5Tcopy( H5T_C_S1 );
- H5Tset_size( string_type, 16 );
- field_type[0] = string_type;
- field_type[1] = H5T_NATIVE_INT;
- field_type[2] = H5T_NATIVE_INT;
- field_type[3] = H5T_NATIVE_FLOAT;
- field_type[4] = H5T_NATIVE_DOUBLE;
-
- /* Create a new file using default properties. */
- file_id = H5Fcreate( "ex_table_05.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
-
- /* Make the table */
- H5TBmake_table( "Table Title", file_id, TABLE_NAME,NFIELDS,NRECORDS,
- dst_size,field_names, dst_offset, field_type,
- chunk_size, fill_data, compress, p_data );
-
- /* Write the pressure field starting at record 2 */
- nfields = 1;
- start = 2;
- nrecords = NRECORDS_ADD;
- H5TBwrite_fields_index( file_id, TABLE_NAME, nfields, field_index_pre, start, nrecords,
- sizeof( float ), 0, field_sizes_pre, pressure_in );
-
-
- /* Write the new longitude and latitude information starting at record 2 */
- nfields = 2;
- start = 2;
- nrecords = NRECORDS_ADD;
- H5TBwrite_fields_index( file_id, TABLE_NAME, nfields, field_index_pos, start, nrecords,
- sizeof( Position ), field_offset_pos, field_sizes_pos, position_in );
-
-
- /* read the table */
- H5TBread_table( file_id, TABLE_NAME, dst_size, dst_offset, dst_sizes, dst_buf );
-
- /* print it by rows */
- for (i=0; i<NRECORDS; i++) {
- printf ("%-5s %-5d %-5d %-5f %-5f",
- dst_buf[i].name,
- dst_buf[i].lati,
- dst_buf[i].longi,
- dst_buf[i].pressure,
- dst_buf[i].temperature);
- printf ("\n");
- }
-
- /* close type */
- H5Tclose( string_type );
-
- /* close the file */
- H5Fclose( file_id );
-
- return 0;
-
+ typedef struct Particle {
+ char name[16];
+ int lati;
+ int longi;
+ float pressure;
+ double temperature;
+ } Particle;
+
+ /* Define a subset of Particle, with latitude and longitude fields */
+ typedef struct Position {
+ int lati;
+ int longi;
+ } Position;
+
+ /* Calculate the type_size and the offsets of our struct members */
+ Particle dst_buf[NRECORDS];
+ size_t dst_size = sizeof(Particle);
+ size_t dst_offset[NFIELDS] = {HOFFSET(Particle, name), HOFFSET(Particle, lati), HOFFSET(Particle, longi),
+ HOFFSET(Particle, pressure), HOFFSET(Particle, temperature)};
+ size_t dst_sizes[NFIELDS] = {sizeof(dst_buf[0].name), sizeof(dst_buf[0].lati), sizeof(dst_buf[0].longi),
+ sizeof(dst_buf[0].pressure), sizeof(dst_buf[0].temperature)};
+
+ size_t field_offset_pos[2] = {HOFFSET(Position, lati), HOFFSET(Position, longi)};
+
+ /* Initially no data */
+ Particle *p_data = NULL;
+
+ /* Define field information */
+ const char *field_names[NFIELDS] = {"Name", "Latitude", "Longitude", "Pressure", "Temperature"};
+ hid_t field_type[NFIELDS];
+ hid_t string_type;
+ hid_t file_id;
+ hsize_t chunk_size = 10;
+ Particle fill_data[1] = {{"no data", -1, -1, -99.0f, -99.0}}; /* Fill value particle */
+ int compress = 0;
+ hsize_t nfields;
+ hsize_t start; /* Record to start reading/writing */
+ hsize_t nrecords; /* Number of records to read/write */
+ int i;
+
+ /* Define new values for the field "Pressure" */
+ float pressure_in[NRECORDS_ADD] = {0.0f, 1.0f, 2.0f};
+ int field_index_pre[1] = {3};
+ int field_index_pos[2] = {1, 2};
+
+ /* Define new values for the fields "Latitude,Longitude" */
+ Position position_in[NRECORDS_ADD] = {{0, 0}, {10, 10}, {20, 20}};
+
+ size_t field_sizes_pos[2] = {sizeof(position_in[0].longi), sizeof(position_in[0].lati)};
+
+ size_t field_sizes_pre[1] = {sizeof(float)};
+
+ /* Initialize the field field_type */
+ string_type = H5Tcopy(H5T_C_S1);
+ H5Tset_size(string_type, 16);
+ field_type[0] = string_type;
+ field_type[1] = H5T_NATIVE_INT;
+ field_type[2] = H5T_NATIVE_INT;
+ field_type[3] = H5T_NATIVE_FLOAT;
+ field_type[4] = H5T_NATIVE_DOUBLE;
+
+ /* Create a new file using default properties. */
+ file_id = H5Fcreate("ex_table_05.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* Make the table */
+ H5TBmake_table("Table Title", file_id, TABLE_NAME, NFIELDS, NRECORDS, dst_size, field_names, dst_offset,
+ field_type, chunk_size, fill_data, compress, p_data);
+
+ /* Write the pressure field starting at record 2 */
+ nfields = 1;
+ start = 2;
+ nrecords = NRECORDS_ADD;
+ H5TBwrite_fields_index(file_id, TABLE_NAME, nfields, field_index_pre, start, nrecords, sizeof(float), 0,
+ field_sizes_pre, pressure_in);
+
+ /* Write the new longitude and latitude information starting at record 2 */
+ nfields = 2;
+ start = 2;
+ nrecords = NRECORDS_ADD;
+ H5TBwrite_fields_index(file_id, TABLE_NAME, nfields, field_index_pos, start, nrecords, sizeof(Position),
+ field_offset_pos, field_sizes_pos, position_in);
+
+ /* read the table */
+ H5TBread_table(file_id, TABLE_NAME, dst_size, dst_offset, dst_sizes, dst_buf);
+
+ /* print it by rows */
+ for (i = 0; i < NRECORDS; i++) {
+ printf("%-5s %-5d %-5d %-5f %-5f", dst_buf[i].name, dst_buf[i].lati, dst_buf[i].longi,
+ dst_buf[i].pressure, dst_buf[i].temperature);
+ printf("\n");
+ }
+
+ /* close type */
+ H5Tclose(string_type);
+
+ /* close the file */
+ H5Fclose(file_id);
+
+ return 0;
}
-
diff --git a/hl/examples/ex_table_06.c b/hl/examples/ex_table_06.c
index 0397e83..f6b67c8 100644
--- a/hl/examples/ex_table_06.c
+++ b/hl/examples/ex_table_06.c
@@ -6,7 +6,7 @@
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
@@ -23,73 +23,64 @@
*-------------------------------------------------------------------------
*/
-#define NFIELDS (hsize_t) 5
-#define NRECORDS (hsize_t) 8
+#define NFIELDS (hsize_t)5
+#define NRECORDS (hsize_t)8
#define TABLE_NAME "table"
-int main( void )
+int
+main(void)
{
- typedef struct Particle
- {
- char name[16];
- int lati;
- int longi;
- float pressure;
- double temperature;
- } Particle;
-
- /* Calculate the size and the offsets of our struct members in memory */
- size_t dst_size = sizeof( Particle );
- size_t dst_offset[NFIELDS] = { HOFFSET( Particle, name ),
- HOFFSET( Particle, lati ),
- HOFFSET( Particle, longi ),
- HOFFSET( Particle, pressure ),
- HOFFSET( Particle, temperature )};
-
- /* Define field information */
- const char *field_names[NFIELDS] =
- { "Name","Latitude", "Longitude", "Pressure", "Temperature" };
- hid_t field_type[NFIELDS];
- hid_t string_type;
- hid_t file_id;
- hsize_t chunk_size = 10;
- Particle fill_data[1] =
- { {"no data",-1,-1, -99.0f, -99.0} }; /* Fill value particle */
- int compress = 0;
- hsize_t nfields_out;
- hsize_t nrecords_out;
-
- /* Initialize field_type */
- string_type = H5Tcopy( H5T_C_S1 );
- H5Tset_size( string_type, 16 );
- field_type[0] = string_type;
- field_type[1] = H5T_NATIVE_INT;
- field_type[2] = H5T_NATIVE_INT;
- field_type[3] = H5T_NATIVE_FLOAT;
- field_type[4] = H5T_NATIVE_DOUBLE;
-
- /* Create a new file using default properties. */
- file_id = H5Fcreate( "ex_table_06.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
-
- /* Make a table */
- H5TBmake_table( "Table Title",file_id,TABLE_NAME,NFIELDS,NRECORDS,dst_size,
- field_names, dst_offset, field_type,
- chunk_size, fill_data, compress, NULL);
-
- /* Get table info */
- H5TBget_table_info (file_id,TABLE_NAME, &nfields_out, &nrecords_out );
-
- /* print */
- printf ("Table has %d fields and %d records\n",(int)nfields_out,(int)nrecords_out);
-
- /* close type */
- H5Tclose( string_type );
-
- /* close the file */
- H5Fclose( file_id );
-
- return 0;
-
-
+ typedef struct Particle {
+ char name[16];
+ int lati;
+ int longi;
+ float pressure;
+ double temperature;
+ } Particle;
+
+ /* Calculate the size and the offsets of our struct members in memory */
+ size_t dst_size = sizeof(Particle);
+ size_t dst_offset[NFIELDS] = {HOFFSET(Particle, name), HOFFSET(Particle, lati), HOFFSET(Particle, longi),
+ HOFFSET(Particle, pressure), HOFFSET(Particle, temperature)};
+
+ /* Define field information */
+ const char *field_names[NFIELDS] = {"Name", "Latitude", "Longitude", "Pressure", "Temperature"};
+ hid_t field_type[NFIELDS];
+ hid_t string_type;
+ hid_t file_id;
+ hsize_t chunk_size = 10;
+ Particle fill_data[1] = {{"no data", -1, -1, -99.0f, -99.0}}; /* Fill value particle */
+ int compress = 0;
+ hsize_t nfields_out;
+ hsize_t nrecords_out;
+
+ /* Initialize field_type */
+ string_type = H5Tcopy(H5T_C_S1);
+ H5Tset_size(string_type, 16);
+ field_type[0] = string_type;
+ field_type[1] = H5T_NATIVE_INT;
+ field_type[2] = H5T_NATIVE_INT;
+ field_type[3] = H5T_NATIVE_FLOAT;
+ field_type[4] = H5T_NATIVE_DOUBLE;
+
+ /* Create a new file using default properties. */
+ file_id = H5Fcreate("ex_table_06.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* Make a table */
+ H5TBmake_table("Table Title", file_id, TABLE_NAME, NFIELDS, NRECORDS, dst_size, field_names, dst_offset,
+ field_type, chunk_size, fill_data, compress, NULL);
+
+ /* Get table info */
+ H5TBget_table_info(file_id, TABLE_NAME, &nfields_out, &nrecords_out);
+
+ /* print */
+ printf("Table has %d fields and %d records\n", (int)nfields_out, (int)nrecords_out);
+
+ /* close type */
+ H5Tclose(string_type);
+
+ /* close the file */
+ H5Fclose(file_id);
+
+ return 0;
}
-
diff --git a/hl/examples/ex_table_07.c b/hl/examples/ex_table_07.c
index d9ea444..ab36613 100644
--- a/hl/examples/ex_table_07.c
+++ b/hl/examples/ex_table_07.c
@@ -6,7 +6,7 @@
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
@@ -23,90 +23,77 @@
*-------------------------------------------------------------------------
*/
-#define NFIELDS (hsize_t) 5
-#define NRECORDS (hsize_t) 8
-#define TABLE_NAME "table"
+#define NFIELDS (hsize_t)5
+#define NRECORDS (hsize_t)8
+#define TABLE_NAME "table"
-int main( void )
+int
+main(void)
{
- typedef struct Particle
- {
- char name[16];
- int lati;
- int longi;
- float pressure;
- double temperature;
- } Particle;
-
- /* Calculate the size and the offsets of our struct members in memory */
- size_t dst_size = sizeof( Particle );
- size_t dst_offset[NFIELDS] = { HOFFSET( Particle, name ),
- HOFFSET( Particle, lati ),
- HOFFSET( Particle, longi ),
- HOFFSET( Particle, pressure ),
- HOFFSET( Particle, temperature )};
-
- /* Define an array of Particles */
- Particle p_data[NRECORDS] = {
- {"zero",0,0, 0.0f, 0.0},
- {"one",10,10, 1.0f, 10.0},
- {"two", 20,20, 2.0f, 20.0},
- {"three",30,30, 3.0f, 30.0},
- {"four", 40,40, 4.0f, 40.0},
- {"five", 50,50, 5.0f, 50.0},
- {"six", 60,60, 6.0f, 60.0},
- {"seven",70,70, 7.0f, 70.0}
- };
-
- const char *field_names[NFIELDS] = /* Define field information */
- { "Name","Latitude", "Longitude", "Pressure", "Temperature" };
- hid_t field_type[NFIELDS];
- hid_t string_type;
- hid_t file_id;
- hsize_t chunk_size = 10;
- int compress = 0;
- Particle fill_data[1] =
- { {"no data",-1,-1, -99.0f, -99.0} };
- hsize_t start; /* Record to start reading */
- hsize_t nrecords; /* Number of records to insert/delete */
- hsize_t nfields_out;
- hsize_t nrecords_out;
-
- /* Initialize the field field_type */
- string_type = H5Tcopy( H5T_C_S1 );
- H5Tset_size( string_type, 16 );
- field_type[0] = string_type;
- field_type[1] = H5T_NATIVE_INT;
- field_type[2] = H5T_NATIVE_INT;
- field_type[3] = H5T_NATIVE_FLOAT;
- field_type[4] = H5T_NATIVE_DOUBLE;
-
- /* Create a new file using default properties. */
- file_id = H5Fcreate( "ex_table_07.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
-
- /* Make the table */
- H5TBmake_table( "Table Title",file_id,TABLE_NAME,NFIELDS,NRECORDS,
- dst_size,field_names, dst_offset, field_type,
- chunk_size, fill_data, compress, p_data );
-
- /* Delete records */
- start = 3;
- nrecords = 3;
- H5TBdelete_record( file_id, TABLE_NAME, start, nrecords );
-
- /* Get table info */
- H5TBget_table_info (file_id,TABLE_NAME, &nfields_out, &nrecords_out );
-
- /* print */
- printf ("Table has %d fields and %d records\n",(int)nfields_out,(int)nrecords_out);
-
- /* close type */
- H5Tclose( string_type );
-
- /* close the file */
- H5Fclose( file_id );
-
- return 0;
-
+ typedef struct Particle {
+ char name[16];
+ int lati;
+ int longi;
+ float pressure;
+ double temperature;
+ } Particle;
+
+ /* Calculate the size and the offsets of our struct members in memory */
+ size_t dst_size = sizeof(Particle);
+ size_t dst_offset[NFIELDS] = {HOFFSET(Particle, name), HOFFSET(Particle, lati), HOFFSET(Particle, longi),
+ HOFFSET(Particle, pressure), HOFFSET(Particle, temperature)};
+
+ /* Define an array of Particles */
+ Particle p_data[NRECORDS] = {{"zero", 0, 0, 0.0f, 0.0}, {"one", 10, 10, 1.0f, 10.0},
+ {"two", 20, 20, 2.0f, 20.0}, {"three", 30, 30, 3.0f, 30.0},
+ {"four", 40, 40, 4.0f, 40.0}, {"five", 50, 50, 5.0f, 50.0},
+ {"six", 60, 60, 6.0f, 60.0}, {"seven", 70, 70, 7.0f, 70.0}};
+
+ const char *field_names[NFIELDS] = /* Define field information */
+ {"Name", "Latitude", "Longitude", "Pressure", "Temperature"};
+ hid_t field_type[NFIELDS];
+ hid_t string_type;
+ hid_t file_id;
+ hsize_t chunk_size = 10;
+ int compress = 0;
+ Particle fill_data[1] = {{"no data", -1, -1, -99.0f, -99.0}};
+ hsize_t start; /* Record to start reading */
+ hsize_t nrecords; /* Number of records to insert/delete */
+ hsize_t nfields_out;
+ hsize_t nrecords_out;
+
+ /* Initialize the field field_type */
+ string_type = H5Tcopy(H5T_C_S1);
+ H5Tset_size(string_type, 16);
+ field_type[0] = string_type;
+ field_type[1] = H5T_NATIVE_INT;
+ field_type[2] = H5T_NATIVE_INT;
+ field_type[3] = H5T_NATIVE_FLOAT;
+ field_type[4] = H5T_NATIVE_DOUBLE;
+
+ /* Create a new file using default properties. */
+ file_id = H5Fcreate("ex_table_07.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* Make the table */
+ H5TBmake_table("Table Title", file_id, TABLE_NAME, NFIELDS, NRECORDS, dst_size, field_names, dst_offset,
+ field_type, chunk_size, fill_data, compress, p_data);
+
+ /* Delete records */
+ start = 3;
+ nrecords = 3;
+ H5TBdelete_record(file_id, TABLE_NAME, start, nrecords);
+
+ /* Get table info */
+ H5TBget_table_info(file_id, TABLE_NAME, &nfields_out, &nrecords_out);
+
+ /* print */
+ printf("Table has %d fields and %d records\n", (int)nfields_out, (int)nrecords_out);
+
+ /* close type */
+ H5Tclose(string_type);
+
+ /* close the file */
+ H5Fclose(file_id);
+
+ return 0;
}
-
diff --git a/hl/examples/ex_table_08.c b/hl/examples/ex_table_08.c
index a45520d..5d447dd 100644
--- a/hl/examples/ex_table_08.c
+++ b/hl/examples/ex_table_08.c
@@ -6,7 +6,7 @@
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
@@ -22,121 +22,96 @@
*-------------------------------------------------------------------------
*/
-#define NFIELDS (hsize_t) 5
-#define NRECORDS (hsize_t) 8
-#define NRECORDS_INS (hsize_t) 2
-#define TABLE_NAME "table"
+#define NFIELDS (hsize_t)5
+#define NRECORDS (hsize_t)8
+#define NRECORDS_INS (hsize_t)2
+#define TABLE_NAME "table"
-int main( void )
+int
+main(void)
{
- typedef struct Particle
- {
- char name[16];
- int lati;
- int longi;
- float pressure;
- double temperature;
- } Particle;
-
- Particle dst_buf[ NRECORDS + NRECORDS_INS ];
-
- /* Define an array of Particles */
- Particle p_data[NRECORDS] = {
- {"zero",0,0, 0.0f, 0.0},
- {"one",10,10, 1.0f, 10.0},
- {"two", 20,20, 2.0f, 20.0},
- {"three",30,30, 3.0f, 30.0},
- {"four", 40,40, 4.0f, 40.0},
- {"five", 50,50, 5.0f, 50.0},
- {"six", 60,60, 6.0f, 60.0},
- {"seven",70,70, 7.0f, 70.0}
- };
-
- /* Calculate the size and the offsets of our struct members in memory */
- size_t dst_size = sizeof( Particle );
- size_t dst_offset[NFIELDS] = { HOFFSET( Particle, name ),
- HOFFSET( Particle, lati ),
- HOFFSET( Particle, longi ),
- HOFFSET( Particle, pressure ),
- HOFFSET( Particle, temperature )};
- size_t dst_sizes[NFIELDS] = { sizeof( p_data[0].name),
- sizeof( p_data[0].lati),
- sizeof( p_data[0].longi),
- sizeof( p_data[0].pressure),
- sizeof( p_data[0].temperature)};
-
- /* Define an array of Particles to insert */
- Particle p_data_insert[NRECORDS_INS] =
- { {"new",30,30, 3.0f, 30.0},
- {"new",40,40, 4.0f, 40.0}
- };
-
- /* Define field information */
- const char *field_names[NFIELDS] =
- { "Name","Latitude", "Longitude", "Pressure", "Temperature" };
- hid_t field_type[NFIELDS];
- hid_t string_type;
- hid_t file_id;
- hsize_t chunk_size = 10;
- int compress = 0;
- int *fill_data = NULL;
- hsize_t start; /* Record to start reading */
- hsize_t nrecords; /* Number of records to insert/delete */
- hsize_t nfields_out;
- hsize_t nrecords_out;
- int i;
-
- /* Initialize the field field_type */
- string_type = H5Tcopy( H5T_C_S1 );
- H5Tset_size( string_type, 16 );
- field_type[0] = string_type;
- field_type[1] = H5T_NATIVE_INT;
- field_type[2] = H5T_NATIVE_INT;
- field_type[3] = H5T_NATIVE_FLOAT;
- field_type[4] = H5T_NATIVE_DOUBLE;
-
- /* Create a new file using default properties. */
- file_id = H5Fcreate( "ex_table_08.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
-
- /* Make the table */
- H5TBmake_table( "Table Title",file_id,TABLE_NAME,NFIELDS,NRECORDS,
- dst_size,field_names, dst_offset, field_type,
- chunk_size, fill_data, compress, p_data );
-
- /* Insert records */
- start = 3;
- nrecords = NRECORDS_INS;
- H5TBinsert_record( file_id, TABLE_NAME, start, nrecords, dst_size, dst_offset,
- dst_sizes, p_data_insert );
-
- /* read the table */
- H5TBread_table( file_id, TABLE_NAME, dst_size, dst_offset, dst_sizes, dst_buf );
-
- /* get table info */
- H5TBget_table_info(file_id,TABLE_NAME, &nfields_out, &nrecords_out );
-
- /* print */
- printf ("Table has %d fields and %d records\n",(int)nfields_out,(int)nrecords_out);
-
- /* print it by rows */
- for (i=0; i<nrecords_out; i++) {
- printf ("%-5s %-5d %-5d %-5f %-5f",
- dst_buf[i].name,
- dst_buf[i].lati,
- dst_buf[i].longi,
- dst_buf[i].pressure,
- dst_buf[i].temperature);
- printf ("\n");
- }
-
-
- /* close type */
- H5Tclose( string_type );
-
- /* close the file */
- H5Fclose( file_id );
-
- return 0;
-
+ typedef struct Particle {
+ char name[16];
+ int lati;
+ int longi;
+ float pressure;
+ double temperature;
+ } Particle;
+
+ Particle dst_buf[NRECORDS + NRECORDS_INS];
+
+ /* Define an array of Particles */
+ Particle p_data[NRECORDS] = {{"zero", 0, 0, 0.0f, 0.0}, {"one", 10, 10, 1.0f, 10.0},
+ {"two", 20, 20, 2.0f, 20.0}, {"three", 30, 30, 3.0f, 30.0},
+ {"four", 40, 40, 4.0f, 40.0}, {"five", 50, 50, 5.0f, 50.0},
+ {"six", 60, 60, 6.0f, 60.0}, {"seven", 70, 70, 7.0f, 70.0}};
+
+ /* Calculate the size and the offsets of our struct members in memory */
+ size_t dst_size = sizeof(Particle);
+ size_t dst_offset[NFIELDS] = {HOFFSET(Particle, name), HOFFSET(Particle, lati), HOFFSET(Particle, longi),
+ HOFFSET(Particle, pressure), HOFFSET(Particle, temperature)};
+ size_t dst_sizes[NFIELDS] = {sizeof(p_data[0].name), sizeof(p_data[0].lati), sizeof(p_data[0].longi),
+ sizeof(p_data[0].pressure), sizeof(p_data[0].temperature)};
+
+ /* Define an array of Particles to insert */
+ Particle p_data_insert[NRECORDS_INS] = {{"new", 30, 30, 3.0f, 30.0}, {"new", 40, 40, 4.0f, 40.0}};
+
+ /* Define field information */
+ const char *field_names[NFIELDS] = {"Name", "Latitude", "Longitude", "Pressure", "Temperature"};
+ hid_t field_type[NFIELDS];
+ hid_t string_type;
+ hid_t file_id;
+ hsize_t chunk_size = 10;
+ int compress = 0;
+ int * fill_data = NULL;
+ hsize_t start; /* Record to start reading */
+ hsize_t nrecords; /* Number of records to insert/delete */
+ hsize_t nfields_out;
+ hsize_t nrecords_out;
+ int i;
+
+ /* Initialize the field field_type */
+ string_type = H5Tcopy(H5T_C_S1);
+ H5Tset_size(string_type, 16);
+ field_type[0] = string_type;
+ field_type[1] = H5T_NATIVE_INT;
+ field_type[2] = H5T_NATIVE_INT;
+ field_type[3] = H5T_NATIVE_FLOAT;
+ field_type[4] = H5T_NATIVE_DOUBLE;
+
+ /* Create a new file using default properties. */
+ file_id = H5Fcreate("ex_table_08.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* Make the table */
+ H5TBmake_table("Table Title", file_id, TABLE_NAME, NFIELDS, NRECORDS, dst_size, field_names, dst_offset,
+ field_type, chunk_size, fill_data, compress, p_data);
+
+ /* Insert records */
+ start = 3;
+ nrecords = NRECORDS_INS;
+ H5TBinsert_record(file_id, TABLE_NAME, start, nrecords, dst_size, dst_offset, dst_sizes, p_data_insert);
+
+ /* read the table */
+ H5TBread_table(file_id, TABLE_NAME, dst_size, dst_offset, dst_sizes, dst_buf);
+
+ /* get table info */
+ H5TBget_table_info(file_id, TABLE_NAME, &nfields_out, &nrecords_out);
+
+ /* print */
+ printf("Table has %d fields and %d records\n", (int)nfields_out, (int)nrecords_out);
+
+ /* print it by rows */
+ for (i = 0; i < nrecords_out; i++) {
+ printf("%-5s %-5d %-5d %-5f %-5f", dst_buf[i].name, dst_buf[i].lati, dst_buf[i].longi,
+ dst_buf[i].pressure, dst_buf[i].temperature);
+ printf("\n");
+ }
+
+ /* close type */
+ H5Tclose(string_type);
+
+ /* close the file */
+ H5Fclose(file_id);
+
+ return 0;
}
-
diff --git a/hl/examples/ex_table_09.c b/hl/examples/ex_table_09.c
index a9f5f11..381925f 100644
--- a/hl/examples/ex_table_09.c
+++ b/hl/examples/ex_table_09.c
@@ -6,7 +6,7 @@
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
@@ -22,121 +22,99 @@
*-------------------------------------------------------------------------
*/
-#define NFIELDS (hsize_t) 5
-#define NRECORDS (hsize_t) 8
-#define NRECORDS_INS (hsize_t) 2
-#define TABLE1_NAME "table1"
-#define TABLE2_NAME "table2"
+#define NFIELDS (hsize_t)5
+#define NRECORDS (hsize_t)8
+#define NRECORDS_INS (hsize_t)2
+#define TABLE1_NAME "table1"
+#define TABLE2_NAME "table2"
-int main( void )
+int
+main(void)
{
- typedef struct Particle
- {
- char name[16];
- int lati;
- int longi;
- float pressure;
- double temperature;
- } Particle;
-
- Particle dst_buf[ NRECORDS + NRECORDS_INS ];
-
- /* Calculate the size and the offsets of our struct members in memory */
- size_t dst_size = sizeof( Particle );
- size_t dst_offset[NFIELDS] = { HOFFSET( Particle, name ),
- HOFFSET( Particle, lati ),
- HOFFSET( Particle, longi ),
- HOFFSET( Particle, pressure ),
- HOFFSET( Particle, temperature )};
- size_t dst_sizes[NFIELDS] = { sizeof( dst_buf[0].name),
- sizeof( dst_buf[0].lati),
- sizeof( dst_buf[0].longi),
- sizeof( dst_buf[0].pressure),
- sizeof( dst_buf[0].temperature)};
-
- /* Define an array of Particles */
- Particle p_data[NRECORDS] = {
- {"zero",0,0, 0.0f, 0.0},
- {"one",10,10, 1.0f, 10.0},
- {"two", 20,20, 2.0f, 20.0},
- {"three",30,30, 3.0f, 30.0},
- {"four", 40,40, 4.0f, 40.0},
- {"five", 50,50, 5.0f, 50.0},
- {"six", 60,60, 6.0f, 60.0},
- {"seven",70,70, 7.0f, 70.0}
- };
-
- /* Define field information */
- const char *field_names[NFIELDS] =
- { "Name","Latitude", "Longitude", "Pressure", "Temperature" };
- hid_t field_type[NFIELDS];
- hid_t string_type;
- hid_t file_id;
- hsize_t chunk_size = 10;
- int compress = 0;
- Particle fill_data[1] =
- { {"no data",-1,-1, -99.0f, -99.0} }; /* Fill value particle */
- hsize_t start1; /* Record to start reading from 1st table */
- hsize_t nrecords; /* Number of records to insert */
- hsize_t start2; /* Record to start writing in 2nd table */
- int i;
- hsize_t nfields_out;
- hsize_t nrecords_out;
-
- /* Initialize the field field_type */
- string_type = H5Tcopy( H5T_C_S1 );
- H5Tset_size( string_type, 16 );
- field_type[0] = string_type;
- field_type[1] = H5T_NATIVE_INT;
- field_type[2] = H5T_NATIVE_INT;
- field_type[3] = H5T_NATIVE_FLOAT;
- field_type[4] = H5T_NATIVE_DOUBLE;
-
- /* Create a new file using default properties. */
- file_id = H5Fcreate( "ex_table_09.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
-
- /* Make 2 tables: TABLE2_NAME is empty */
- H5TBmake_table( "Table Title",file_id,TABLE1_NAME,NFIELDS,NRECORDS,
- dst_size,field_names, dst_offset, field_type,
- chunk_size, fill_data, compress, p_data );
-
- H5TBmake_table( "Table Title",file_id,TABLE2_NAME,NFIELDS,NRECORDS,
- dst_size,field_names, dst_offset, field_type,
- chunk_size, fill_data, compress, NULL );
-
-
- /* Add 2 records from TABLE1_NAME to TABLE2_NAME */
- start1 = 3;
- nrecords = NRECORDS_INS;
- start2 = 6;
- H5TBadd_records_from( file_id, TABLE1_NAME, start1, nrecords, TABLE2_NAME, start2 );
-
- /* read TABLE2_NAME: it should have 2 more records now */
- H5TBread_table( file_id, TABLE2_NAME, dst_size, dst_offset, dst_sizes, dst_buf );
-
- /* Get table info */
- H5TBget_table_info (file_id,TABLE2_NAME, &nfields_out, &nrecords_out );
-
- /* print */
- printf ("Table has %d fields and %d records\n",(int)nfields_out,(int)nrecords_out);
-
- /* print it by rows */
- for (i=0; i<nrecords_out; i++) {
- printf ("%-5s %-5d %-5d %-5f %-5f",
- dst_buf[i].name,
- dst_buf[i].lati,
- dst_buf[i].longi,
- dst_buf[i].pressure,
- dst_buf[i].temperature);
- printf ("\n");
- }
-
- /* close type */
- H5Tclose( string_type );
-
- /* close the file */
- H5Fclose( file_id );
-
- return 0;
+ typedef struct Particle {
+ char name[16];
+ int lati;
+ int longi;
+ float pressure;
+ double temperature;
+ } Particle;
+
+ Particle dst_buf[NRECORDS + NRECORDS_INS];
+
+ /* Calculate the size and the offsets of our struct members in memory */
+ size_t dst_size = sizeof(Particle);
+ size_t dst_offset[NFIELDS] = {HOFFSET(Particle, name), HOFFSET(Particle, lati), HOFFSET(Particle, longi),
+ HOFFSET(Particle, pressure), HOFFSET(Particle, temperature)};
+ size_t dst_sizes[NFIELDS] = {sizeof(dst_buf[0].name), sizeof(dst_buf[0].lati), sizeof(dst_buf[0].longi),
+ sizeof(dst_buf[0].pressure), sizeof(dst_buf[0].temperature)};
+
+ /* Define an array of Particles */
+ Particle p_data[NRECORDS] = {{"zero", 0, 0, 0.0f, 0.0}, {"one", 10, 10, 1.0f, 10.0},
+ {"two", 20, 20, 2.0f, 20.0}, {"three", 30, 30, 3.0f, 30.0},
+ {"four", 40, 40, 4.0f, 40.0}, {"five", 50, 50, 5.0f, 50.0},
+ {"six", 60, 60, 6.0f, 60.0}, {"seven", 70, 70, 7.0f, 70.0}};
+
+ /* Define field information */
+ const char *field_names[NFIELDS] = {"Name", "Latitude", "Longitude", "Pressure", "Temperature"};
+ hid_t field_type[NFIELDS];
+ hid_t string_type;
+ hid_t file_id;
+ hsize_t chunk_size = 10;
+ int compress = 0;
+ Particle fill_data[1] = {{"no data", -1, -1, -99.0f, -99.0}}; /* Fill value particle */
+ hsize_t start1; /* Record to start reading from 1st table */
+ hsize_t nrecords; /* Number of records to insert */
+ hsize_t start2; /* Record to start writing in 2nd table */
+ int i;
+ hsize_t nfields_out;
+ hsize_t nrecords_out;
+
+ /* Initialize the field field_type */
+ string_type = H5Tcopy(H5T_C_S1);
+ H5Tset_size(string_type, 16);
+ field_type[0] = string_type;
+ field_type[1] = H5T_NATIVE_INT;
+ field_type[2] = H5T_NATIVE_INT;
+ field_type[3] = H5T_NATIVE_FLOAT;
+ field_type[4] = H5T_NATIVE_DOUBLE;
+
+ /* Create a new file using default properties. */
+ file_id = H5Fcreate("ex_table_09.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* Make 2 tables: TABLE2_NAME is empty */
+ H5TBmake_table("Table Title", file_id, TABLE1_NAME, NFIELDS, NRECORDS, dst_size, field_names, dst_offset,
+ field_type, chunk_size, fill_data, compress, p_data);
+
+ H5TBmake_table("Table Title", file_id, TABLE2_NAME, NFIELDS, NRECORDS, dst_size, field_names, dst_offset,
+ field_type, chunk_size, fill_data, compress, NULL);
+
+ /* Add 2 records from TABLE1_NAME to TABLE2_NAME */
+ start1 = 3;
+ nrecords = NRECORDS_INS;
+ start2 = 6;
+ H5TBadd_records_from(file_id, TABLE1_NAME, start1, nrecords, TABLE2_NAME, start2);
+
+ /* read TABLE2_NAME: it should have 2 more records now */
+ H5TBread_table(file_id, TABLE2_NAME, dst_size, dst_offset, dst_sizes, dst_buf);
+
+ /* Get table info */
+ H5TBget_table_info(file_id, TABLE2_NAME, &nfields_out, &nrecords_out);
+
+ /* print */
+ printf("Table has %d fields and %d records\n", (int)nfields_out, (int)nrecords_out);
+
+ /* print it by rows */
+ for (i = 0; i < nrecords_out; i++) {
+ printf("%-5s %-5d %-5d %-5f %-5f", dst_buf[i].name, dst_buf[i].lati, dst_buf[i].longi,
+ dst_buf[i].pressure, dst_buf[i].temperature);
+ printf("\n");
+ }
+
+ /* close type */
+ H5Tclose(string_type);
+
+ /* close the file */
+ H5Fclose(file_id);
+
+ return 0;
}
-
diff --git a/hl/examples/ex_table_10.c b/hl/examples/ex_table_10.c
index 8c4d8ae..4ba5d64 100644
--- a/hl/examples/ex_table_10.c
+++ b/hl/examples/ex_table_10.c
@@ -6,7 +6,7 @@
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
@@ -22,114 +22,92 @@
*-------------------------------------------------------------------------
*/
-#define NFIELDS (hsize_t) 5
-#define NRECORDS (hsize_t) 8
-#define TABLE1_NAME "table1"
-#define TABLE2_NAME "table2"
-#define TABLE3_NAME "table3"
+#define NFIELDS (hsize_t)5
+#define NRECORDS (hsize_t)8
+#define TABLE1_NAME "table1"
+#define TABLE2_NAME "table2"
+#define TABLE3_NAME "table3"
-int main( void )
+int
+main(void)
{
- typedef struct Particle
- {
- char name[16];
- int lati;
- int longi;
- float pressure;
- double temperature;
- } Particle;
-
- /* Define an array of Particles */
- Particle p_data[NRECORDS] = {
- {"zero",0,0, 0.0f, 0.0},
- {"one",10,10, 1.0f, 10.0},
- {"two", 20,20, 2.0f, 20.0},
- {"three",30,30, 3.0f, 30.0},
- {"four", 40,40, 4.0f, 40.0},
- {"five", 50,50, 5.0f, 50.0},
- {"six", 60,60, 6.0f, 60.0},
- {"seven",70,70, 7.0f, 70.0}
- };
-
- Particle dst_buf[ 2 * NRECORDS ];
- /* Calculate the size and the offsets of our struct members in memory */
- size_t dst_size = sizeof( Particle );
- size_t dst_offset[NFIELDS] = { HOFFSET( Particle, name ),
- HOFFSET( Particle, lati ),
- HOFFSET( Particle, longi ),
- HOFFSET( Particle, pressure ),
- HOFFSET( Particle, temperature )};
- size_t dst_sizes[NFIELDS] = { sizeof( dst_buf[0].name),
- sizeof( dst_buf[0].lati),
- sizeof( dst_buf[0].longi),
- sizeof( dst_buf[0].pressure),
- sizeof( dst_buf[0].temperature)};
-
-
- /* Define field information */
- const char *field_names[NFIELDS] =
- { "Name","Latitude", "Longitude", "Pressure", "Temperature" };
- hid_t field_type[NFIELDS];
- hid_t string_type;
- hid_t file_id;
- hsize_t chunk_size = 10;
- int compress = 0;
- int *fill_data = NULL;
- hsize_t nfields_out;
- hsize_t nrecords_out;
- int i;
-
- /* Initialize the field field_type */
- string_type = H5Tcopy( H5T_C_S1 );
- H5Tset_size( string_type, 16 );
- field_type[0] = string_type;
- field_type[1] = H5T_NATIVE_INT;
- field_type[2] = H5T_NATIVE_INT;
- field_type[3] = H5T_NATIVE_FLOAT;
- field_type[4] = H5T_NATIVE_DOUBLE;
-
- /* Create a new file using default properties. */
- file_id = H5Fcreate( "ex_table_10.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
-
- /* Make two tables */
- H5TBmake_table( "Table Title",file_id,TABLE1_NAME,NFIELDS,NRECORDS,
- dst_size,field_names, dst_offset, field_type,
- chunk_size, fill_data, compress, p_data );
-
- H5TBmake_table( "Table Title",file_id,TABLE2_NAME,NFIELDS,NRECORDS,
- dst_size,field_names, dst_offset, field_type,
- chunk_size, fill_data, compress, p_data );
-
- /* Combine the two tables into a third in the same file */
- H5TBcombine_tables( file_id, TABLE1_NAME, file_id, TABLE2_NAME, TABLE3_NAME );
-
- /* read the combined table */
- H5TBread_table( file_id, TABLE3_NAME, dst_size, dst_offset, dst_sizes, dst_buf );
-
- /* Get table info */
- H5TBget_table_info (file_id,TABLE3_NAME, &nfields_out, &nrecords_out );
-
- /* print */
- printf ("Table has %d fields and %d records\n",(int)nfields_out,(int)nrecords_out);
-
- /* print it by rows */
- for (i=0; i<nrecords_out; i++) {
- printf ("%-5s %-5d %-5d %-5f %-5f",
- dst_buf[i].name,
- dst_buf[i].lati,
- dst_buf[i].longi,
- dst_buf[i].pressure,
- dst_buf[i].temperature);
- printf ("\n");
- }
-
- /* close type */
- H5Tclose( string_type );
-
- /* close the file */
- H5Fclose( file_id );
-
- return 0;
-
+ typedef struct Particle {
+ char name[16];
+ int lati;
+ int longi;
+ float pressure;
+ double temperature;
+ } Particle;
+
+ /* Define an array of Particles */
+ Particle p_data[NRECORDS] = {{"zero", 0, 0, 0.0f, 0.0}, {"one", 10, 10, 1.0f, 10.0},
+ {"two", 20, 20, 2.0f, 20.0}, {"three", 30, 30, 3.0f, 30.0},
+ {"four", 40, 40, 4.0f, 40.0}, {"five", 50, 50, 5.0f, 50.0},
+ {"six", 60, 60, 6.0f, 60.0}, {"seven", 70, 70, 7.0f, 70.0}};
+
+ Particle dst_buf[2 * NRECORDS];
+ /* Calculate the size and the offsets of our struct members in memory */
+ size_t dst_size = sizeof(Particle);
+ size_t dst_offset[NFIELDS] = {HOFFSET(Particle, name), HOFFSET(Particle, lati), HOFFSET(Particle, longi),
+ HOFFSET(Particle, pressure), HOFFSET(Particle, temperature)};
+ size_t dst_sizes[NFIELDS] = {sizeof(dst_buf[0].name), sizeof(dst_buf[0].lati), sizeof(dst_buf[0].longi),
+ sizeof(dst_buf[0].pressure), sizeof(dst_buf[0].temperature)};
+
+ /* Define field information */
+ const char *field_names[NFIELDS] = {"Name", "Latitude", "Longitude", "Pressure", "Temperature"};
+ hid_t field_type[NFIELDS];
+ hid_t string_type;
+ hid_t file_id;
+ hsize_t chunk_size = 10;
+ int compress = 0;
+ int * fill_data = NULL;
+ hsize_t nfields_out;
+ hsize_t nrecords_out;
+ int i;
+
+ /* Initialize the field field_type */
+ string_type = H5Tcopy(H5T_C_S1);
+ H5Tset_size(string_type, 16);
+ field_type[0] = string_type;
+ field_type[1] = H5T_NATIVE_INT;
+ field_type[2] = H5T_NATIVE_INT;
+ field_type[3] = H5T_NATIVE_FLOAT;
+ field_type[4] = H5T_NATIVE_DOUBLE;
+
+ /* Create a new file using default properties. */
+ file_id = H5Fcreate("ex_table_10.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* Make two tables */
+ H5TBmake_table("Table Title", file_id, TABLE1_NAME, NFIELDS, NRECORDS, dst_size, field_names, dst_offset,
+ field_type, chunk_size, fill_data, compress, p_data);
+
+ H5TBmake_table("Table Title", file_id, TABLE2_NAME, NFIELDS, NRECORDS, dst_size, field_names, dst_offset,
+ field_type, chunk_size, fill_data, compress, p_data);
+
+ /* Combine the two tables into a third in the same file */
+ H5TBcombine_tables(file_id, TABLE1_NAME, file_id, TABLE2_NAME, TABLE3_NAME);
+
+ /* read the combined table */
+ H5TBread_table(file_id, TABLE3_NAME, dst_size, dst_offset, dst_sizes, dst_buf);
+
+ /* Get table info */
+ H5TBget_table_info(file_id, TABLE3_NAME, &nfields_out, &nrecords_out);
+
+ /* print */
+ printf("Table has %d fields and %d records\n", (int)nfields_out, (int)nrecords_out);
+
+ /* print it by rows */
+ for (i = 0; i < nrecords_out; i++) {
+ printf("%-5s %-5d %-5d %-5f %-5f", dst_buf[i].name, dst_buf[i].lati, dst_buf[i].longi,
+ dst_buf[i].pressure, dst_buf[i].temperature);
+ printf("\n");
+ }
+
+ /* close type */
+ H5Tclose(string_type);
+
+ /* close the file */
+ H5Fclose(file_id);
+
+ return 0;
}
-
diff --git a/hl/examples/ex_table_11.c b/hl/examples/ex_table_11.c
index d6215cb..9bf3927 100644
--- a/hl/examples/ex_table_11.c
+++ b/hl/examples/ex_table_11.c
@@ -6,7 +6,7 @@
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
@@ -22,95 +22,81 @@
*-------------------------------------------------------------------------
*/
-#define NFIELDS (hsize_t) 5
-#define NRECORDS (hsize_t) 8
-#define TABLE_NAME "table"
+#define NFIELDS (hsize_t)5
+#define NRECORDS (hsize_t)8
+#define TABLE_NAME "table"
-int main( void )
+int
+main(void)
{
- typedef struct Particle1
- {
- char name[16];
- int lati;
- int longi;
- float pressure;
- double temperature;
- } Particle1;
-
-/* Define an array of Particles */
- Particle1 p_data[NRECORDS] = {
- {"zero",0,0, 0.0f, 0.0},
- {"one",10,10, 1.0f, 10.0},
- {"two", 20,20, 2.0f, 20.0},
- {"three",30,30, 3.0f, 30.0},
- {"four", 40,40, 4.0f, 40.0},
- {"five", 50,50, 5.0f, 50.0},
- {"six", 60,60, 6.0f, 60.0},
- {"seven",70,70, 7.0f, 70.0}
- };
-
- /* Calculate the size and the offsets of our struct members in memory */
- size_t dst_size1 = sizeof( Particle1 );
- size_t dst_offset1[NFIELDS] = { HOFFSET( Particle1, name ),
- HOFFSET( Particle1, lati ),
- HOFFSET( Particle1, longi ),
- HOFFSET( Particle1, pressure ),
- HOFFSET( Particle1, temperature )};
-
- /* Define field information */
- const char *field_names[NFIELDS] =
- { "Name","Latitude", "Longitude", "Pressure", "Temperature" };
- hid_t field_type[NFIELDS];
- hid_t string_type;
- hid_t file_id;
- hsize_t chunk_size = 10;
- int compress = 0;
- Particle1 fill_data[1] = { {"no data",-1,-1, -99.0f, -99.0} };
- int fill_data_new[1] = { -100 };
- hsize_t position;
- hsize_t nfields_out;
- hsize_t nrecords_out;
-
- /* Define the inserted field information */
- hid_t field_type_new = H5T_NATIVE_INT;
- int data[NRECORDS] = { 0,1,2,3,4,5,6,7 };
-
- /* Initialize the field type */
- string_type = H5Tcopy( H5T_C_S1 );
- H5Tset_size( string_type, 16 );
- field_type[0] = string_type;
- field_type[1] = H5T_NATIVE_INT;
- field_type[2] = H5T_NATIVE_INT;
- field_type[3] = H5T_NATIVE_FLOAT;
- field_type[4] = H5T_NATIVE_DOUBLE;
-
- /* Create a new file using default properties. */
- file_id = H5Fcreate( "ex_table_11.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
-
- /* Make the table */
- H5TBmake_table( "Table Title",file_id,TABLE_NAME,NFIELDS,NRECORDS,
- dst_size1,field_names, dst_offset1, field_type,
- chunk_size, fill_data, compress, p_data );
-
- /* Insert the new field at the end of the field list */
- position = NFIELDS;
- H5TBinsert_field( file_id, TABLE_NAME, "New Field", field_type_new, position,
- fill_data_new, data );
-
- /* Get table info */
- H5TBget_table_info (file_id,TABLE_NAME, &nfields_out, &nrecords_out );
-
- /* print */
- printf ("Table has %d fields and %d records\n",(int)nfields_out,(int)nrecords_out);
-
- /* close type */
- H5Tclose( string_type );
-
- /* close the file */
- H5Fclose( file_id );
-
- return 0;
-
-
+ typedef struct Particle1 {
+ char name[16];
+ int lati;
+ int longi;
+ float pressure;
+ double temperature;
+ } Particle1;
+
+ /* Define an array of Particles */
+ Particle1 p_data[NRECORDS] = {{"zero", 0, 0, 0.0f, 0.0}, {"one", 10, 10, 1.0f, 10.0},
+ {"two", 20, 20, 2.0f, 20.0}, {"three", 30, 30, 3.0f, 30.0},
+ {"four", 40, 40, 4.0f, 40.0}, {"five", 50, 50, 5.0f, 50.0},
+ {"six", 60, 60, 6.0f, 60.0}, {"seven", 70, 70, 7.0f, 70.0}};
+
+ /* Calculate the size and the offsets of our struct members in memory */
+ size_t dst_size1 = sizeof(Particle1);
+ size_t dst_offset1[NFIELDS] = {HOFFSET(Particle1, name), HOFFSET(Particle1, lati),
+ HOFFSET(Particle1, longi), HOFFSET(Particle1, pressure),
+ HOFFSET(Particle1, temperature)};
+
+ /* Define field information */
+ const char *field_names[NFIELDS] = {"Name", "Latitude", "Longitude", "Pressure", "Temperature"};
+ hid_t field_type[NFIELDS];
+ hid_t string_type;
+ hid_t file_id;
+ hsize_t chunk_size = 10;
+ int compress = 0;
+ Particle1 fill_data[1] = {{"no data", -1, -1, -99.0f, -99.0}};
+ int fill_data_new[1] = {-100};
+ hsize_t position;
+ hsize_t nfields_out;
+ hsize_t nrecords_out;
+
+ /* Define the inserted field information */
+ hid_t field_type_new = H5T_NATIVE_INT;
+ int data[NRECORDS] = {0, 1, 2, 3, 4, 5, 6, 7};
+
+ /* Initialize the field type */
+ string_type = H5Tcopy(H5T_C_S1);
+ H5Tset_size(string_type, 16);
+ field_type[0] = string_type;
+ field_type[1] = H5T_NATIVE_INT;
+ field_type[2] = H5T_NATIVE_INT;
+ field_type[3] = H5T_NATIVE_FLOAT;
+ field_type[4] = H5T_NATIVE_DOUBLE;
+
+ /* Create a new file using default properties. */
+ file_id = H5Fcreate("ex_table_11.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* Make the table */
+ H5TBmake_table("Table Title", file_id, TABLE_NAME, NFIELDS, NRECORDS, dst_size1, field_names, dst_offset1,
+ field_type, chunk_size, fill_data, compress, p_data);
+
+ /* Insert the new field at the end of the field list */
+ position = NFIELDS;
+ H5TBinsert_field(file_id, TABLE_NAME, "New Field", field_type_new, position, fill_data_new, data);
+
+ /* Get table info */
+ H5TBget_table_info(file_id, TABLE_NAME, &nfields_out, &nrecords_out);
+
+ /* print */
+ printf("Table has %d fields and %d records\n", (int)nfields_out, (int)nrecords_out);
+
+ /* close type */
+ H5Tclose(string_type);
+
+ /* close the file */
+ H5Fclose(file_id);
+
+ return 0;
}
-
diff --git a/hl/examples/ex_table_12.c b/hl/examples/ex_table_12.c
index f287c29..3e7c27a 100644
--- a/hl/examples/ex_table_12.c
+++ b/hl/examples/ex_table_12.c
@@ -6,7 +6,7 @@
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
@@ -23,87 +23,73 @@
*-------------------------------------------------------------------------
*/
-#define NFIELDS (hsize_t) 5
-#define NRECORDS (hsize_t) 8
-#define TABLE_NAME "table"
+#define NFIELDS (hsize_t)5
+#define NRECORDS (hsize_t)8
+#define TABLE_NAME "table"
-int main( void )
+int
+main(void)
{
- typedef struct Particle
- {
- char name[16];
- int lati;
- int longi;
- float pressure;
- double temperature;
- } Particle;
-
- /* Calculate the size and the offsets of our struct members in memory */
- size_t dst_size = sizeof( Particle );
- size_t dst_offset[NFIELDS] = { HOFFSET( Particle, name ),
- HOFFSET( Particle, lati ),
- HOFFSET( Particle, longi ),
- HOFFSET( Particle, pressure ),
- HOFFSET( Particle, temperature )};
-
- /* Define an array of Particles */
- Particle p_data[NRECORDS] = {
- {"zero",0,0, 0.0f, 0.0},
- {"one",10,10, 1.0f, 10.0},
- {"two", 20,20, 2.0f, 20.0},
- {"three",30,30, 3.0f, 30.0},
- {"four", 40,40, 4.0f, 40.0},
- {"five", 50,50, 5.0f, 50.0},
- {"six", 60,60, 6.0f, 60.0},
- {"seven",70,70, 7.0f, 70.0}
- };
-
- /* Define field information */
- const char *field_names[NFIELDS] =
- { "Name","Latitude", "Longitude", "Pressure", "Temperature" };
- hid_t field_type[NFIELDS];
- hid_t string_type;
- hid_t file_id;
- hsize_t chunk_size = 10;
- int compress = 0;
- Particle fill_data[1] =
- { {"no data",-1,-1, -99.0f, -99.0} };
- hsize_t nfields_out;
- hsize_t nrecords_out;
-
- /* Initialize the field type */
- string_type = H5Tcopy( H5T_C_S1 );
- H5Tset_size( string_type, 16 );
- field_type[0] = string_type;
- field_type[1] = H5T_NATIVE_INT;
- field_type[2] = H5T_NATIVE_INT;
- field_type[3] = H5T_NATIVE_FLOAT;
- field_type[4] = H5T_NATIVE_DOUBLE;
-
- /* Create a new file using default properties. */
- file_id = H5Fcreate( "ex_table_12.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
-
- /* Make a table */
- H5TBmake_table( "Table Title", file_id, TABLE_NAME, NFIELDS, NRECORDS, dst_size,
- field_names, dst_offset, field_type,
- chunk_size, fill_data, compress, p_data );
-
- /* Delete the field */
- H5TBdelete_field( file_id, TABLE_NAME, "Pressure" );
-
- /* Get table info */
- H5TBget_table_info (file_id,TABLE_NAME, &nfields_out, &nrecords_out );
-
- /* print */
- printf ("Table has %d fields and %d records\n",(int)nfields_out,(int)nrecords_out);
-
- /* close type */
- H5Tclose( string_type );
-
- /* close the file */
- H5Fclose( file_id );
-
- return 0;
-
+ typedef struct Particle {
+ char name[16];
+ int lati;
+ int longi;
+ float pressure;
+ double temperature;
+ } Particle;
+
+ /* Calculate the size and the offsets of our struct members in memory */
+ size_t dst_size = sizeof(Particle);
+ size_t dst_offset[NFIELDS] = {HOFFSET(Particle, name), HOFFSET(Particle, lati), HOFFSET(Particle, longi),
+ HOFFSET(Particle, pressure), HOFFSET(Particle, temperature)};
+
+ /* Define an array of Particles */
+ Particle p_data[NRECORDS] = {{"zero", 0, 0, 0.0f, 0.0}, {"one", 10, 10, 1.0f, 10.0},
+ {"two", 20, 20, 2.0f, 20.0}, {"three", 30, 30, 3.0f, 30.0},
+ {"four", 40, 40, 4.0f, 40.0}, {"five", 50, 50, 5.0f, 50.0},
+ {"six", 60, 60, 6.0f, 60.0}, {"seven", 70, 70, 7.0f, 70.0}};
+
+ /* Define field information */
+ const char *field_names[NFIELDS] = {"Name", "Latitude", "Longitude", "Pressure", "Temperature"};
+ hid_t field_type[NFIELDS];
+ hid_t string_type;
+ hid_t file_id;
+ hsize_t chunk_size = 10;
+ int compress = 0;
+ Particle fill_data[1] = {{"no data", -1, -1, -99.0f, -99.0}};
+ hsize_t nfields_out;
+ hsize_t nrecords_out;
+
+ /* Initialize the field type */
+ string_type = H5Tcopy(H5T_C_S1);
+ H5Tset_size(string_type, 16);
+ field_type[0] = string_type;
+ field_type[1] = H5T_NATIVE_INT;
+ field_type[2] = H5T_NATIVE_INT;
+ field_type[3] = H5T_NATIVE_FLOAT;
+ field_type[4] = H5T_NATIVE_DOUBLE;
+
+ /* Create a new file using default properties. */
+ file_id = H5Fcreate("ex_table_12.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* Make a table */
+ H5TBmake_table("Table Title", file_id, TABLE_NAME, NFIELDS, NRECORDS, dst_size, field_names, dst_offset,
+ field_type, chunk_size, fill_data, compress, p_data);
+
+ /* Delete the field */
+ H5TBdelete_field(file_id, TABLE_NAME, "Pressure");
+
+ /* Get table info */
+ H5TBget_table_info(file_id, TABLE_NAME, &nfields_out, &nrecords_out);
+
+ /* print */
+ printf("Table has %d fields and %d records\n", (int)nfields_out, (int)nrecords_out);
+
+ /* close type */
+ H5Tclose(string_type);
+
+ /* close the file */
+ H5Fclose(file_id);
+
+ return 0;
}
-
diff --git a/hl/examples/pal_rgb.h b/hl/examples/pal_rgb.h
index f3905b3..a2b17e2 100644
--- a/hl/examples/pal_rgb.h
+++ b/hl/examples/pal_rgb.h
@@ -6,272 +6,268 @@
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-const unsigned char pal_rgb[256*3] = {255,255,255,
-0,0,131,
-0,0,135,
-0,0,139,
-0,0,143,
-0,0,147,
-0,0,151,
-0,0,155,
-0,0,159,
-0,0,163,
-0,0,167,
-0,0,171,
-0,0,175,
-0,0,179,
-0,0,183,
-0,0,187,
-0,0,191,
-0,0,195,
-0,0,199,
-0,0,203,
-0,0,207,
-0,0,211,
-0,0,215,
-0,0,219,
-0,0,223,
-0,0,227,
-0,0,231,
-0,0,235,
-0,0,239,
-0,0,243,
-0,0,247,
-0,0,251,
-0,0,255,
-0,0,255,
-0,3,255,
-0,7,255,
-0,11,255,
-0,15,255,
-0,19,255,
-0,23,255,
-0,27,255,
-0,31,255,
-0,35,255,
-0,39,255,
-0,43,255,
-0,47,255,
-0,51,255,
-0,55,255,
-0,59,255,
-0,63,255,
-0,67,255,
-0,71,255,
-0,75,255,
-0,79,255,
-0,83,255,
-0,87,255,
-0,91,255,
-0,95,255,
-0,99,255,
-0,103,255,
-0,107,255,
-0,111,255,
-0,115,255,
-0,119,255,
-0,123,255,
-0,127,255,
-0,131,255,
-0,135,255,
-0,139,255,
-0,143,255,
-0,147,255,
-0,151,255,
-0,155,255,
-0,159,255,
-0,163,255,
-0,167,255,
-0,171,255,
-0,175,255,
-0,179,255,
-0,183,255,
-0,187,255,
-0,191,255,
-0,195,255,
-0,199,255,
-0,203,255,
-0,207,255,
-0,211,255,
-0,215,255,
-0,219,255,
-0,223,255,
-0,227,255,
-0,231,255,
-0,235,255,
-0,239,255,
-0,243,255,
-0,247,255,
-0,251,255,
-0,255,255,
-0,255,255,
-3,255,251,
-7,255,247,
-11,255,243,
-15,255,239,
-19,255,235,
-23,255,231,
-27,255,227,
-31,255,223,
-35,255,219,
-39,255,215,
-43,255,211,
-47,255,207,
-51,255,203,
-55,255,199,
-59,255,195,
-63,255,191,
-67,255,187,
-71,255,183,
-75,255,179,
-79,255,175,
-83,255,171,
-87,255,167,
-91,255,163,
-95,255,159,
-99,255,155,
-103,255,151,
-107,255,147,
-111,255,143,
-115,255,139,
-119,255,135,
-123,255,131,
-127,255,127,
-131,255,123,
-135,255,119,
-139,255,115,
-143,255,111,
-147,255,107,
-151,255,103,
-155,255,99,
-159,255,95,
-163,255,91,
-167,255,87,
-171,255,83,
-175,255,79,
-179,255,75,
-183,255,71,
-187,255,67,
-191,255,63,
-195,255,59,
-199,255,55,
-203,255,51,
-207,255,47,
-211,255,43,
-215,255,39,
-219,255,35,
-223,255,31,
-227,255,27,
-231,255,23,
-235,255,19,
-239,255,15,
-243,255,11,
-247,255,7,
-251,255,3,
-255,255,0,
-255,251,0,
-255,247,0,
-255,243,0,
-255,239,0,
-255,235,0,
-255,231,0,
-255,227,0,
-255,223,0,
-255,219,0,
-255,215,0,
-255,211,0,
-255,207,0,
-255,203,0,
-255,199,0,
-255,195,0,
-255,191,0,
-255,187,0,
-255,183,0,
-255,179,0,
-255,175,0,
-255,171,0,
-255,167,0,
-255,163,0,
-255,159,0,
-255,155,0,
-255,151,0,
-255,147,0,
-255,143,0,
-255,139,0,
-255,135,0,
-255,131,0,
-255,127,0,
-255,123,0,
-255,119,0,
-255,115,0,
-255,111,0,
-255,107,0,
-255,103,0,
-255,99,0,
-255,95,0,
-255,91,0,
-255,87,0,
-255,83,0,
-255,79,0,
-255,75,0,
-255,71,0,
-255,67,0,
-255,63,0,
-255,59,0,
-255,55,0,
-255,51,0,
-255,47,0,
-255,43,0,
-255,39,0,
-255,35,0,
-255,31,0,
-255,27,0,
-255,23,0,
-255,19,0,
-255,15,0,
-255,11,0,
-255,7,0,
-255,3,0,
-255,0,0,
-250,0,0,
-246,0,0,
-241,0,0,
-237,0,0,
-233,0,0,
-228,0,0,
-224,0,0,
-219,0,0,
-215,0,0,
-211,0,0,
-206,0,0,
-202,0,0,
-197,0,0,
-193,0,0,
-189,0,0,
-184,0,0,
-180,0,0,
-175,0,0,
-171,0,0,
-167,0,0,
-162,0,0,
-158,0,0,
-153,0,0,
-149,0,0,
-145,0,0,
-140,0,0,
-136,0,0,
-131,0,0,
-127,0,0
+/* clang-format off */
+const unsigned char pal_rgb[256*3] = {
+ 255,255,255,
+ 0,0,131,
+ 0,0,135,
+ 0,0,139,
+ 0,0,143,
+ 0,0,147,
+ 0,0,151,
+ 0,0,155,
+ 0,0,159,
+ 0,0,163,
+ 0,0,167,
+ 0,0,171,
+ 0,0,175,
+ 0,0,179,
+ 0,0,183,
+ 0,0,187,
+ 0,0,191,
+ 0,0,195,
+ 0,0,199,
+ 0,0,203,
+ 0,0,207,
+ 0,0,211,
+ 0,0,215,
+ 0,0,219,
+ 0,0,223,
+ 0,0,227,
+ 0,0,231,
+ 0,0,235,
+ 0,0,239,
+ 0,0,243,
+ 0,0,247,
+ 0,0,251,
+ 0,0,255,
+ 0,0,255,
+ 0,3,255,
+ 0,7,255,
+ 0,11,255,
+ 0,15,255,
+ 0,19,255,
+ 0,23,255,
+ 0,27,255,
+ 0,31,255,
+ 0,35,255,
+ 0,39,255,
+ 0,43,255,
+ 0,47,255,
+ 0,51,255,
+ 0,55,255,
+ 0,59,255,
+ 0,63,255,
+ 0,67,255,
+ 0,71,255,
+ 0,75,255,
+ 0,79,255,
+ 0,83,255,
+ 0,87,255,
+ 0,91,255,
+ 0,95,255,
+ 0,99,255,
+ 0,103,255,
+ 0,107,255,
+ 0,111,255,
+ 0,115,255,
+ 0,119,255,
+ 0,123,255,
+ 0,127,255,
+ 0,131,255,
+ 0,135,255,
+ 0,139,255,
+ 0,143,255,
+ 0,147,255,
+ 0,151,255,
+ 0,155,255,
+ 0,159,255,
+ 0,163,255,
+ 0,167,255,
+ 0,171,255,
+ 0,175,255,
+ 0,179,255,
+ 0,183,255,
+ 0,187,255,
+ 0,191,255,
+ 0,195,255,
+ 0,199,255,
+ 0,203,255,
+ 0,207,255,
+ 0,211,255,
+ 0,215,255,
+ 0,219,255,
+ 0,223,255,
+ 0,227,255,
+ 0,231,255,
+ 0,235,255,
+ 0,239,255,
+ 0,243,255,
+ 0,247,255,
+ 0,251,255,
+ 0,255,255,
+ 0,255,255,
+ 3,255,251,
+ 7,255,247,
+ 11,255,243,
+ 15,255,239,
+ 19,255,235,
+ 23,255,231,
+ 27,255,227,
+ 31,255,223,
+ 35,255,219,
+ 39,255,215,
+ 43,255,211,
+ 47,255,207,
+ 51,255,203,
+ 55,255,199,
+ 59,255,195,
+ 63,255,191,
+ 67,255,187,
+ 71,255,183,
+ 75,255,179,
+ 79,255,175,
+ 83,255,171,
+ 87,255,167,
+ 91,255,163,
+ 95,255,159,
+ 99,255,155,
+ 103,255,151,
+ 107,255,147,
+ 111,255,143,
+ 115,255,139,
+ 119,255,135,
+ 123,255,131,
+ 127,255,127,
+ 131,255,123,
+ 135,255,119,
+ 139,255,115,
+ 143,255,111,
+ 147,255,107,
+ 151,255,103,
+ 155,255,99,
+ 159,255,95,
+ 163,255,91,
+ 167,255,87,
+ 171,255,83,
+ 175,255,79,
+ 179,255,75,
+ 183,255,71,
+ 187,255,67,
+ 191,255,63,
+ 195,255,59,
+ 199,255,55,
+ 203,255,51,
+ 207,255,47,
+ 211,255,43,
+ 215,255,39,
+ 219,255,35,
+ 223,255,31,
+ 227,255,27,
+ 231,255,23,
+ 235,255,19,
+ 239,255,15,
+ 243,255,11,
+ 247,255,7,
+ 251,255,3,
+ 255,255,0,
+ 255,251,0,
+ 255,247,0,
+ 255,243,0,
+ 255,239,0,
+ 255,235,0,
+ 255,231,0,
+ 255,227,0,
+ 255,223,0,
+ 255,219,0,
+ 255,215,0,
+ 255,211,0,
+ 255,207,0,
+ 255,203,0,
+ 255,199,0,
+ 255,195,0,
+ 255,191,0,
+ 255,187,0,
+ 255,183,0,
+ 255,179,0,
+ 255,175,0,
+ 255,171,0,
+ 255,167,0,
+ 255,163,0,
+ 255,159,0,
+ 255,155,0,
+ 255,151,0,
+ 255,147,0,
+ 255,143,0,
+ 255,139,0,
+ 255,135,0,
+ 255,131,0,
+ 255,127,0,
+ 255,123,0,
+ 255,119,0,
+ 255,115,0,
+ 255,111,0,
+ 255,107,0,
+ 255,103,0,
+ 255,99,0,
+ 255,95,0,
+ 255,91,0,
+ 255,87,0,
+ 255,83,0,
+ 255,79,0,
+ 255,75,0,
+ 255,71,0,
+ 255,67,0,
+ 255,63,0,
+ 255,59,0,
+ 255,55,0,
+ 255,51,0,
+ 255,47,0,
+ 255,43,0,
+ 255,39,0,
+ 255,35,0,
+ 255,31,0,
+ 255,27,0,
+ 255,23,0,
+ 255,19,0,
+ 255,15,0,
+ 255,11,0,
+ 255,7,0,
+ 255,3,0,
+ 255,0,0,
+ 250,0,0,
+ 246,0,0,
+ 241,0,0,
+ 237,0,0,
+ 233,0,0,
+ 228,0,0,
+ 224,0,0,
+ 219,0,0,
+ 215,0,0,
+ 211,0,0,
+ 206,0,0,
+ 202,0,0,
+ 197,0,0,
+ 193,0,0,
+ 189,0,0,
+ 184,0,0,
+ 180,0,0,
+ 175,0,0,
+ 171,0,0,
+ 167,0,0,
+ 162,0,0,
+ 158,0,0,
+ 153,0,0,
+ 149,0,0,
+ 145,0,0,
+ 140,0,0,
+ 136,0,0,
+ 131,0,0,
+ 127,0,0
};
-
-
-
-
-
-
-
+/* clang-format on */
diff --git a/hl/examples/ptExampleFL.c b/hl/examples/ptExampleFL.c
index ba7a3a0..88ba8e5 100644
--- a/hl/examples/ptExampleFL.c
+++ b/hl/examples/ptExampleFL.c
@@ -6,7 +6,7 @@
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
@@ -23,82 +23,80 @@
*-------------------------------------------------------------------------
*/
-int main(void)
+int
+main(void)
{
- hid_t fid; /* File identifier */
- hid_t ptable; /* Packet table identifier */
+ hid_t fid; /* File identifier */
+ hid_t ptable; /* Packet table identifier */
- herr_t err; /* Function return status */
- hsize_t count; /* Number of records in the table */
+ herr_t err; /* Function return status */
+ hsize_t count; /* Number of records in the table */
- int x; /* Loop variable */
+ int x; /* Loop variable */
/* Buffers to hold data */
- int writeBuffer[5];
- int readBuffer[5];
+ int writeBuffer[5];
+ int readBuffer[5];
- /* Initialize buffers */
- for(x=0; x<5; x++)
- {
- writeBuffer[x]=x;
- readBuffer[x] = -1;
- }
+ /* Initialize buffers */
+ for (x = 0; x < 5; x++) {
+ writeBuffer[x] = x;
+ readBuffer[x] = -1;
+ }
/* Create a file using default properties */
- fid=H5Fcreate("packet_table_FLexample.h5",H5F_ACC_TRUNC,H5P_DEFAULT,H5P_DEFAULT);
+ fid = H5Fcreate("packet_table_FLexample.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
/* Create a fixed-length packet table within the file */
/* This table's "packets" will be simple integers and it will use compression
* level 5. */
- ptable = H5PTcreate_fl(fid, "Packet Test Dataset", H5T_NATIVE_INT, (hsize_t)100, 5);
- if(ptable == H5I_INVALID_HID)
- goto out;
+ ptable = H5PTcreate_fl(fid, "Packet Test Dataset", H5T_NATIVE_INT, (hsize_t)100, 5);
+ if (ptable == H5I_INVALID_HID)
+ goto out;
/* Write one packet to the packet table */
- err = H5PTappend(ptable, (hsize_t)1, &(writeBuffer[0]) );
- if(err < 0)
- goto out;
+ err = H5PTappend(ptable, (hsize_t)1, &(writeBuffer[0]));
+ if (err < 0)
+ goto out;
/* Write several packets to the packet table */
- err = H5PTappend(ptable, (hsize_t)4, &(writeBuffer[1]) );
- if(err < 0)
- goto out;
+ err = H5PTappend(ptable, (hsize_t)4, &(writeBuffer[1]));
+ if (err < 0)
+ goto out;
/* Get the number of packets in the packet table. This should be five. */
- err = H5PTget_num_packets(ptable, &count);
- if(err < 0)
- goto out;
+ err = H5PTget_num_packets(ptable, &count);
+ if (err < 0)
+ goto out;
- printf("Number of packets in packet table after five appends: %d\n", (int)count);
+ printf("Number of packets in packet table after five appends: %d\n", (int)count);
/* Initialize packet table's "current record" */
- err = H5PTcreate_index(ptable);
- if(err < 0)
- goto out;
+ err = H5PTcreate_index(ptable);
+ if (err < 0)
+ goto out;
/* Iterate through packets, read each one back */
- for(x=0; x<5; x++)
- {
- err = H5PTget_next(ptable, (hsize_t)1, &(readBuffer[x]) );
- if(err < 0)
- goto out;
+ for (x = 0; x < 5; x++) {
+ err = H5PTget_next(ptable, (hsize_t)1, &(readBuffer[x]));
+ if (err < 0)
+ goto out;
- printf("Packet %d's value is %d\n", x, readBuffer[x]);
- }
+ printf("Packet %d's value is %d\n", x, readBuffer[x]);
+ }
/* Close the packet table */
- err = H5PTclose(ptable);
- if(err < 0)
- goto out;
+ err = H5PTclose(ptable);
+ if (err < 0)
+ goto out;
/* Close the file */
- H5Fclose(fid);
+ H5Fclose(fid);
- return 0;
+ return 0;
- out: /* An error has occurred. Clean up and exit. */
+out: /* An error has occurred. Clean up and exit. */
H5PTclose(ptable);
H5Fclose(fid);
return -1;
}
-
diff --git a/hl/examples/run-hl-ex.sh b/hl/examples/run-hl-ex.sh
index 6f736cc..c31e0d4 100755
--- a/hl/examples/run-hl-ex.sh
+++ b/hl/examples/run-hl-ex.sh
@@ -6,7 +6,7 @@
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
-# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
+# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
diff --git a/hl/examples/run-hlc-ex.sh.in b/hl/examples/run-hlc-ex.sh.in
index 11560ff..18b86fa 100644
--- a/hl/examples/run-hlc-ex.sh.in
+++ b/hl/examples/run-hlc-ex.sh.in
@@ -6,7 +6,7 @@
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
-# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
+# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
@@ -18,7 +18,7 @@
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# #
# This script will compile and run the c examples from source files installed #
-# in .../share/hdf5_examples/hl/c using h5cc or h5pc. The order for running #
+# in @examplesdir@/hl/c using h5cc or h5pc. The order for running #
# programs with RunTest in the MAIN section below is taken from the Makefile. #
# The order is important since some of the test programs use data files created #
# by earlier test programs. Any future additions should be placed accordingly. #
@@ -29,9 +29,32 @@
EXIT_SUCCESS=0
EXIT_FAILURE=1
+#
+# Try to derive the path to the installation $prefix established
+# by ./configure relative to the examples directory established by
+# ./configure. If successful, set `prefix_relto_examplesdir` to the
+# relative path. Otherwise, set `prefix_relto_examplesdir` to the
+# absolute installation $prefix.
+#
+# This script uses the value of `prefix` in the user's environment, if
+# it is set, below. The content of $() is evaluated in a sub-shell, so
+# if `prefix` is set in the user's environment, the shell statements in
+# $() won't clobbered it.
+#
+prefix_relto_examplesdir=$(
+prefix=@prefix@
+examplesdir=@examplesdir@
+if [ ${examplesdir##${prefix}/} != ${examplesdir} ]; then
+ echo $(echo ${examplesdir##${prefix}/} | \
+ sed 's,[^/][^/]*,..,g')
+else
+ echo $prefix
+fi
+)
+
# Where the tool is installed.
# default is relative path to installed location of the tools
-prefix="${prefix:-../../../../}"
+prefix="${prefix:-../../${prefix_relto_examplesdir}}"
PARALLEL=@PARALLEL@ # Am I in parallel mode?
AR="@AR@"
RANLIB="@RANLIB@"