summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorQuincey Koziol <koziol@hdfgroup.org>2015-08-29 01:43:37 (GMT)
committerQuincey Koziol <koziol@hdfgroup.org>2015-08-29 01:43:37 (GMT)
commitb68b9d8786f6b9bf4bd52a3b8c87fa64933803b1 (patch)
tree32b5219b2bea7df6846f83ebf725f7310f991206
parent1847391fc51728811407f3e1586213758c1d0e89 (diff)
downloadhdf5-b68b9d8786f6b9bf4bd52a3b8c87fa64933803b1.zip
hdf5-b68b9d8786f6b9bf4bd52a3b8c87fa64933803b1.tar.gz
hdf5-b68b9d8786f6b9bf4bd52a3b8c87fa64933803b1.tar.bz2
[svn-r27612] Description:
Align w/vds branch: Whitespace cleanup, move common code for opening a dataset into a new routine, misc. style cleanups. Tested on: MacOSX/64 10.10.5 (amazon) w/serial & parallel (h5committest upcoming)
-rw-r--r--MANIFEST1
-rw-r--r--configure.ac114
-rw-r--r--src/H5D.c44
-rw-r--r--src/H5Ddeprec.c46
-rw-r--r--src/H5Dint.c70
-rw-r--r--src/H5Dpkg.h3
-rw-r--r--src/H5Olayout.c6
-rw-r--r--src/H5Pdcpl.c12
-rw-r--r--tools/h5dump/errfiles/tdset-2.err13
-rw-r--r--tools/h5dump/errfiles/tperror.err13
-rw-r--r--tools/h5dump/errfiles/tslink-D.err19
11 files changed, 180 insertions, 161 deletions
diff --git a/MANIFEST b/MANIFEST
index b18c4ee..44bdc24 100644
--- a/MANIFEST
+++ b/MANIFEST
@@ -1064,7 +1064,6 @@
./tools/h5dump/testh5dumpxml.sh.in
./tools/h5dump/binread.c
-
./tools/h5import/Makefile.am
./tools/h5import/Makefile.in
./tools/h5import/h5import.h
diff --git a/configure.ac b/configure.ac
index b441cad..ff8a667 100644
--- a/configure.ac
+++ b/configure.ac
@@ -187,7 +187,7 @@ AC_SUBST([PAC_C_MAX_REAL_PRECISION])
## ----------------------------------------------------------------------
## Some platforms have broken basename, and/or xargs programs. Check
## that it actually does what it's supposed to do. Catch this early
-## since configure and scripts relies upon them heavily and there's
+## since configure and scripts relies upon them heavily and there's
## no use continuing if it's broken.
##
AC_MSG_CHECKING([if basename works])
@@ -335,7 +335,7 @@ AC_ARG_ENABLE([unsupported],
[Allow unsupported combinations of configure options])],
[ALLOW_UNSUPPORTED=$enableval])
-case "X-$ALLOW_UNSUPPORTED" in
+case "X-$ALLOW_UNSUPPORTED" in
X-|X-no)
AC_MSG_RESULT([no])
;;
@@ -416,7 +416,7 @@ if test "X$HDF_FORTRAN" = "Xyes"; then
[cat fortran/src/H5config_f.inc | sed '1d;s%^/\* \(.*\) \*/%\1%;s/#define /#define H5_/;s/#undef /#undef H5_/' >fortran/src/H5config_f.inc.tmp; mv -f fortran/src/H5config_f.inc.tmp fortran/src/H5config_f.inc])
AC_SUBST([FC]) HDF_FORTRAN=yes
-
+
HDF5_INTERFACES="$HDF5_INTERFACES fortran"
## --------------------------------------------------------------------
@@ -441,7 +441,7 @@ if test "X$HDF_FORTRAN" = "Xyes"; then
## --------------------------------------------------------------------
## Check for a Fortran compiler and how to include modules.
- ##
+ ##
AC_PROG_FC([PAC_FC_SEARCH_LIST],)
AC_F9X_MODS
@@ -631,8 +631,8 @@ if test "X$HDF_CXX" = "Xyes"; then
# Checking if C++ can handle namespaces
PAC_PROG_CXX_NAMESPACE
-
- # Checking if C++ has offsetof extension
+
+ # Checking if C++ has offsetof extension
PAC_PROG_CXX_OFFSETOF
# if C++ can handle static cast
@@ -781,7 +781,7 @@ if test "X${HDF_FORTRAN}" = "Xyes" && test "X${enable_shared}" != "Xno"; then
H5_FORTRAN_SHARED="yes"
## Disable fortran shared libraries on Mac. (MAM - 03/30/11)
-
+
case "`uname`" in
Darwin*)
H5_FORTRAN_SHARED="no"
@@ -790,7 +790,7 @@ if test "X${HDF_FORTRAN}" = "Xyes" && test "X${enable_shared}" != "Xno"; then
esac
## Report results of check(s)
-
+
if test "X${H5_FORTRAN_SHARED}" = "Xno"; then
AC_MSG_RESULT([no])
AC_MSG_WARN([$CHECK_WARN])
@@ -826,7 +826,7 @@ LT_INIT([dlopen,win32-dll])
## ----------------------------------------------------------------------
## Check if we should install only statically linked executables.
## This check needs to occur after libtool is initialized because
-## we check a libtool cache value and may issue a warning based
+## we check a libtool cache value and may issue a warning based
## on its result.
AC_MSG_CHECKING([if we should install only statically linked executables])
AC_ARG_ENABLE([static_exec],
@@ -895,7 +895,7 @@ esac
AC_SUBST([AM_MAKEFLAGS]) AM_MAKEFLAGS=""
## Don't run test if MAKE is defined but is the empty string
-if test -n "${MAKE-make}"; then
+if test -n "${MAKE-make}"; then
AC_MSG_CHECKING([whether make will build with undefined variables])
@@ -1047,11 +1047,11 @@ fi
## ----------------------------------------------------------------------
## Use the macro _AC_SYS_LARGEFILE_MACRO_VALUE to test defines
-## that might need to be set for largefile support to behave
+## that might need to be set for largefile support to behave
## correctly. This macro is defined in acsite.m4 and overrides
## the version provided by Autoconf (as of v2.65). The custom
-## macro additionally adds the appropriate defines to AM_CPPFLAGS
-## so that later configure checks have them visible.
+## macro additionally adds the appropriate defines to AM_CPPFLAGS
+## so that later configure checks have them visible.
## Check for _FILE_OFFSET_BITS
_AC_SYS_LARGEFILE_MACRO_VALUE([_FILE_OFFSET_BITS], [64],
@@ -1072,16 +1072,16 @@ fi
##
case "$host_cpu-$host_vendor-$host_os" in
*linux*)
- ## Make available various LFS-related routines using the following
+ ## Make available various LFS-related routines using the following
## _LARGEFILE*_SOURCE macros.
AM_CPPFLAGS="-D_LARGEFILE64_SOURCE -D_LARGEFILE_SOURCE $AM_CPPFLAGS"
## Add POSIX support on Linux systems, so <features.h> defines
## __USE_POSIX, which is required to get the prototype for fdopen
- ## defined correctly in <stdio.h>.
+ ## defined correctly in <stdio.h>.
##
## This flag was removed from h5cc as of 2009-10-17 when it was found
- ## that the flag broke compiling netCDF-4 code with h5cc, but kept in
+ ## that the flag broke compiling netCDF-4 code with h5cc, but kept in
## H5_CPPFLAGS because fdopen and HDfdopen fail without it. HDfdopen
## is used only by H5_debug_mask which is used only when debugging in
## H5_init_library (all in H5.c). When the flag was removed this was
@@ -1108,7 +1108,7 @@ case "$host_cpu-$host_vendor-$host_os" in
## correctly in <stdio.h>
## Linking to the bsd-compat library is required as per the gcc manual:
## http://www.gnu.org/s/libc/manual/html_node/Feature-Test-Macros.html
- ## however, we do not do this since it breaks the big test on some
+ ## however, we do not do this since it breaks the big test on some
## older platforms.
H5_CPPFLAGS="-D_BSD_SOURCE $H5_CPPFLAGS"
@@ -1119,7 +1119,7 @@ case "$host_cpu-$host_vendor-$host_os" in
;;
esac
-## Need to add the AM_ and H5_ into CFLAGS/CPPFLAGS to make them visible
+## Need to add the AM_ and H5_ into CFLAGS/CPPFLAGS to make them visible
## for configure checks.
## Note: Both will be restored by the end of configure.
CPPFLAGS="$H5_CPPFLAGS $AM_CPPFLAGS $CPPFLAGS"
@@ -1430,17 +1430,17 @@ case $withval in
fi
;;
esac
-
+
saved_CPPFLAGS="$CPPFLAGS"
saved_AM_CPPFLAGS="$AM_CPPFLAGS"
saved_LDFLAGS="$LDFLAGS"
saved_AM_LDFLAGS="$AM_LDFLAGS"
-
+
if test -n "$szlib_inc"; then
CPPFLAGS="$CPPFLAGS -I$szlib_inc"
AM_CPPFLAGS="$AM_CPPFLAGS -I$szlib_inc"
fi
-
+
AC_CHECK_HEADERS([szlib.h],
[HAVE_SZLIB_H="yes"],
[CPPFLAGS="$saved_CPPFLAGS"; AM_CPPFLAGS="$saved_AM_CPPFLAGS"] [unset HAVE_SZLIB])
@@ -1449,7 +1449,7 @@ case $withval in
LDFLAGS="$LDFLAGS -L$szlib_lib"
AM_LDFLAGS="$AM_LDFLAGS -L$szlib_lib"
fi
-
+
if test "x$HAVE_SZLIB" = "xyes" -a "x$HAVE_SZLIB_H" = "xyes"; then
AC_CHECK_LIB([sz], [SZ_BufftoBuffCompress],,
[LDFLAGS="$saved_LDFLAGS"; AM_LDFLAGS="$saved_AM_LDFLAGS"; unset HAVE_SZLIB])
@@ -1466,7 +1466,7 @@ if test "x$HAVE_SZLIB" = "xyes" -a "x$HAVE_SZLIB_H" = "xyes"; then
AC_MSG_CHECKING([for szlib encoder])
## Set LD_LIBRARY_PATH so encoder test can find the library and run.
- ## Also add LL_PATH substitution to Makefiles so they can use the
+ ## Also add LL_PATH substitution to Makefiles so they can use the
## path as well, for testing examples.
if test -z "$LD_LIBRARY_PATH"; then
export LD_LIBRARY_PATH="$szlib_lib"
@@ -1489,25 +1489,25 @@ if test "x$HAVE_SZLIB" = "xyes" -a "x$HAVE_SZLIB_H" = "xyes"; then
]])]
, [hdf5_cv_szlib_can_encode=yes], [hdf5_cv_szlib_can_encode=no],)]
)
-
- AC_DEFINE([HAVE_FILTER_SZIP], [1],
+
+ AC_DEFINE([HAVE_FILTER_SZIP], [1],
[Define if support for szip filter is enabled])
USE_FILTER_SZIP="yes"
if test ${hdf5_cv_szlib_can_encode} = "yes"; then
AC_MSG_RESULT([yes])
- fi
+ fi
if test ${hdf5_cv_szlib_can_encode} = "no"; then
AC_MSG_RESULT([no])
- fi
-
+ fi
+
## Add "szip" to external filter list
if test ${hdf5_cv_szlib_can_encode} = "yes"; then
if test "X$EXTERNAL_FILTERS" != "X"; then
EXTERNAL_FILTERS="${EXTERNAL_FILTERS},"
fi
EXTERNAL_FILTERS="${EXTERNAL_FILTERS}szip(encoder)"
- fi
+ fi
if test ${hdf5_cv_szlib_can_encode} = "no"; then
if test "X$EXTERNAL_FILTERS" != "X"; then
EXTERNAL_FILTERS="${EXTERNAL_FILTERS},"
@@ -1560,7 +1560,7 @@ case "X-$THREADSAFE" in
AC_MSG_RESULT([no])
;;
X-yes)
- THREADSAFE=yes
+ THREADSAFE=yes
AC_MSG_RESULT([yes])
;;
*)
@@ -1867,7 +1867,7 @@ for hdf5_cv_printf_ll in l ll L q unknown; do
done])
AC_MSG_RESULT([%${hdf5_cv_printf_ll}d and %${hdf5_cv_printf_ll}u])
-AC_DEFINE_UNQUOTED([PRINTF_LL_WIDTH], ["$hdf5_cv_printf_ll"],
+AC_DEFINE_UNQUOTED([PRINTF_LL_WIDTH], ["$hdf5_cv_printf_ll"],
[Width for printf() for type `long long' or `__int64', use `ll'])
@@ -2186,7 +2186,7 @@ if test -n "$PARALLEL"; then
fi
## If RUNSERIAL or RUNPARALLEL is the word `none' then replace it with
- ## the empty string. This means that no launch commands were requested,
+ ## the empty string. This means that no launch commands were requested,
## so we will not use any launch commands.
if test "X$RUNSERIAL" = "Xnone"; then
RUNSERIAL=""
@@ -2252,7 +2252,7 @@ if test -n "$PARALLEL"; then
fi
;;
esac
-
+
if test -n "$mpe_inc"; then
saved_CPPFLAGS="$CPPFLAGS"
saved_AM_CPPFLAGS="$AM_CPPFLAGS"
@@ -2262,7 +2262,7 @@ if test -n "$PARALLEL"; then
else
AC_CHECK_HEADERS([mpe.h],, [unset MPE])
fi
-
+
if test -n "$mpe_lib"; then
saved_LDFLAGS="$LDFLAGS"
saved_AM_LDFLAGS="$AM_LDFLAGS"
@@ -2347,12 +2347,12 @@ AC_DEFINE_UNQUOTED([DEFAULT_PLUGINDIR], ["$default_plugindir"],
## Decide whether the presence of user's exception handling functions is
## checked and data conversion exceptions are returned. This is mainly
## for the speed optimization of hard conversions. Soft conversions can
-## actually benefit little.
+## actually benefit little.
##
AC_MSG_CHECKING([whether exception handling functions is checked during data conversions])
AC_ARG_ENABLE([dconv-exception],
[AS_HELP_STRING([--enable-dconv-exception],
- [if exception handling functions is checked during
+ [if exception handling functions is checked during
data conversions [default=yes]])],
[DCONV_EXCEPTION=$enableval], [DCONV_EXCEPTION=yes])
@@ -2406,9 +2406,9 @@ esac
## ----------------------------------------------------------------------
## Set the flag to indicate that the machine is using a special algorithm to convert
-## 'long double' to '(unsigned) long' values. (This flag should only be set for
-## the IBM Power6 Linux. When the bit sequence of long double is
-## 0x4351ccf385ebc8a0bfcc2a3c3d855620, the converted value of (unsigned)long
+## 'long double' to '(unsigned) long' values. (This flag should only be set for
+## the IBM Power6 Linux. When the bit sequence of long double is
+## 0x4351ccf385ebc8a0bfcc2a3c3d855620, the converted value of (unsigned)long
## is 0x004733ce17af227f, not the same as the library's conversion to 0x004733ce17af2282.
## The machine's conversion gets the correct value. We define the macro and disable
## this kind of test until we figure out what algorithm they use.
@@ -2429,12 +2429,12 @@ else
unsigned char s[16];
unsigned char s2[8];
int ret = 1;
-
+
if(sizeof(long double) == 16 && sizeof(long) == 8) {
- /*make sure the long double type has 16 bytes in size and
+ /*make sure the long double type has 16 bytes in size and
* 11 bits of exponent. If it is,
- *the bit sequence should be like below. It's not
- *a decent way to check but this info isn't available. */
+ *the bit sequence should be like below. It's not
+ *a decent way to check but this info isn't available. */
memcpy(s, &ld, 16);
if(s[0]==0x43 && s[1]==0x51 && s[2]==0xcc && s[3]==0xf3 &&
s[4]==0x85 && s[5]==0xeb && s[6]==0xc8 && s[7]==0xa0 &&
@@ -2468,8 +2468,8 @@ else
if(s2[0]==0x00 && s2[1]==0x47 && s2[2]==0x33 && s2[3]==0xce &&
s2[4]==0x17 && s2[5]==0xaf && s2[6]==0x22 && s2[7]==0x7f)
ret = 0;
- }
- }
+ }
+ }
exit(ret);
]])]
, [hdf5_cv_ldouble_to_long_special=yes], [hdf5_cv_ldouble_to_long_special=no],)])
@@ -2485,10 +2485,10 @@ fi
## ----------------------------------------------------------------------
## Set the flag to indicate that the machine is using a special algorithm
-## to convert some values of '(unsigned) long' to 'long double' values.
-## (This flag should be off for all machines, except for IBM Power6 Linux,
-## when the bit sequences are 003fff..., 007fff..., 00ffff..., 01ffff...,
-## ..., 7fffff..., the compiler uses a unknown algorithm. We define a
+## to convert some values of '(unsigned) long' to 'long double' values.
+## (This flag should be off for all machines, except for IBM Power6 Linux,
+## when the bit sequences are 003fff..., 007fff..., 00ffff..., 01ffff...,
+## ..., 7fffff..., the compiler uses a unknown algorithm. We define a
## macro and skip the test for now until we know about the algorithm.
##
AC_MSG_CHECKING([if using special algorithm to convert (unsigned) long to long double values])
@@ -2506,17 +2506,17 @@ else
unsigned long ull;
unsigned char s[16];
int flag=0, ret=1;
-
+
/*Determine if long double has 16 byte in size, 11 bit exponent, and
- *the bias is 0x3ff */
- if(sizeof(long double) == 16) {
+ *the bias is 0x3ff */
+ if(sizeof(long double) == 16) {
ld = 1.0L;
memcpy(s, &ld, 16);
if(s[0]==0x3f && s[1]==0xf0 && s[2]==0x00 && s[3]==0x00 &&
- s[4]==0x00 && s[5]==0x00 && s[6]==0x00 && s[7]==0x00)
- flag = 1;
+ s[4]==0x00 && s[5]==0x00 && s[6]==0x00 && s[7]==0x00)
+ flag = 1;
}
-
+
if(flag==1 && sizeof(long)==8) {
ll = 0x003fffffffffffffL;
ld = (long double)ll;
@@ -2532,7 +2532,7 @@ else
s[8]==0xbf && s[9]==0xf0 && s[10]==0x00 && s[11]==0x00 &&
s[12]==0x00 && s[13]==0x00 && s[14]==0x00 && s[15]==0x00)
ret = 0;
- }
+ }
if(flag==1 && sizeof(unsigned long)==8) {
ull = 0xffffffffffffffffUL;
ld = (long double)ull;
@@ -2550,7 +2550,7 @@ else
s[8]==0xbf && s[9]==0xf0 && s[10]==0x00 && s[11]==0x00 &&
s[12]==0x00 && s[13]==0x00 && s[14]==0x00 && s[15]==0x00)
ret = 0;
- }
+ }
exit(ret);
]])]
, [hdf5_cv_long_to_ldouble_special=yes], [hdf5_cv_long_to_ldouble_special=no],)])
diff --git a/src/H5D.c b/src/H5D.c
index f06ec9b..6183ec5 100644
--- a/src/H5D.c
+++ b/src/H5D.c
@@ -323,14 +323,9 @@ hid_t
H5Dopen2(hid_t loc_id, const char *name, hid_t dapl_id)
{
H5D_t *dset = NULL;
- H5G_loc_t loc; /* Object location of group */
- H5G_loc_t dset_loc; /* Object location of dataset */
- H5G_name_t path; /* Dataset group hier. path */
- H5O_loc_t oloc; /* Dataset object location */
- H5O_type_t obj_type; /* Type of object at location */
- hbool_t loc_found = FALSE; /* Location at 'name' found */
- hid_t dxpl_id = H5AC_ind_dxpl_id; /* dxpl to use to open datset */
- hid_t ret_value;
+ H5G_loc_t loc; /* Object location of group */
+ hid_t dxpl_id = H5AC_ind_dxpl_id; /* dxpl to use to open datset */
+ hid_t ret_value;
FUNC_ENTER_API(FAIL)
H5TRACE3("i", "i*si", loc_id, name, dapl_id);
@@ -348,41 +343,18 @@ H5Dopen2(hid_t loc_id, const char *name, hid_t dapl_id)
if(TRUE != H5P_isa_class(dapl_id, H5P_DATASET_ACCESS))
HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not dataset access property list")
- /* Set up dataset location to fill in */
- dset_loc.oloc = &oloc;
- dset_loc.path = &path;
- H5G_loc_reset(&dset_loc);
-
- /* Find the dataset object */
- if(H5G_loc_find(&loc, name, &dset_loc, dapl_id, dxpl_id) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_NOTFOUND, FAIL, "not found")
- loc_found = TRUE;
-
- /* Check that the object found is the correct type */
- if(H5O_obj_type(&oloc, &obj_type, dxpl_id) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get object type")
- if(obj_type != H5O_TYPE_DATASET)
- HGOTO_ERROR(H5E_DATASET, H5E_BADTYPE, FAIL, "not a dataset")
-
/* Open the dataset */
- if(NULL == (dset = H5D_open(&dset_loc, dapl_id, dxpl_id)))
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't open dataset")
+ if(NULL == (dset = H5D__open_name(&loc, name, dapl_id, dxpl_id)))
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTOPENOBJ, FAIL, "unable to open dataset")
/* Register an atom for the dataset */
if((ret_value = H5I_register(H5I_DATASET, dset, TRUE)) < 0)
HGOTO_ERROR(H5E_ATOM, H5E_CANTREGISTER, FAIL, "can't register dataset atom")
done:
- if(ret_value < 0) {
- if(dset) {
- if(H5D_close(dset) < 0)
- HDONE_ERROR(H5E_DATASET, H5E_CLOSEERROR, FAIL, "unable to release dataset")
- } /* end if */
- else {
- if(loc_found && H5G_loc_free(&dset_loc) < 0)
- HDONE_ERROR(H5E_DATASET, H5E_CANTRELEASE, FAIL, "can't free location")
- } /* end else */
- } /* end if */
+ if(ret_value < 0)
+ if(dset && H5D_close(dset) < 0)
+ HDONE_ERROR(H5E_DATASET, H5E_CLOSEERROR, FAIL, "unable to release dataset")
FUNC_LEAVE_API(ret_value)
} /* end H5Dopen2() */
diff --git a/src/H5Ddeprec.c b/src/H5Ddeprec.c
index 3da6b95..cd2bc84 100644
--- a/src/H5Ddeprec.c
+++ b/src/H5Ddeprec.c
@@ -226,15 +226,10 @@ hid_t
H5Dopen1(hid_t loc_id, const char *name)
{
H5D_t *dset = NULL;
- H5G_loc_t loc; /* Object location of group */
- H5G_loc_t dset_loc; /* Object location of dataset */
- H5G_name_t path; /* Dataset group hier. path */
- H5O_loc_t oloc; /* Dataset object location */
- H5O_type_t obj_type; /* Type of object at location */
- hbool_t loc_found = FALSE; /* Location at 'name' found */
- hid_t dapl_id = H5P_DATASET_ACCESS_DEFAULT; /* dapl to use to open dataset */
- hid_t dxpl_id = H5AC_ind_dxpl_id; /* dxpl to use to open datset */
- hid_t ret_value;
+ H5G_loc_t loc; /* Object location of group */
+ hid_t dapl_id = H5P_DATASET_ACCESS_DEFAULT; /* dapl to use to open dataset */
+ hid_t dxpl_id = H5AC_ind_dxpl_id; /* dxpl to use to open datset */
+ hid_t ret_value;
FUNC_ENTER_API(FAIL)
H5TRACE2("i", "i*s", loc_id, name);
@@ -245,41 +240,18 @@ H5Dopen1(hid_t loc_id, const char *name)
if(!name || !*name)
HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "no name")
- /* Set up dataset location to fill in */
- dset_loc.oloc = &oloc;
- dset_loc.path = &path;
- H5G_loc_reset(&dset_loc);
-
- /* Find the dataset object */
- if(H5G_loc_find(&loc, name, &dset_loc, H5P_DEFAULT, dxpl_id) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_NOTFOUND, FAIL, "not found")
- loc_found = TRUE;
-
- /* Check that the object found is the correct type */
- if(H5O_obj_type(&oloc, &obj_type, dxpl_id) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get object type")
- if(obj_type != H5O_TYPE_DATASET)
- HGOTO_ERROR(H5E_DATASET, H5E_BADTYPE, FAIL, "not a dataset")
-
/* Open the dataset */
- if(NULL == (dset = H5D_open(&dset_loc, dapl_id, dxpl_id)))
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't open dataset")
+ if(NULL == (dset = H5D__open_name(&loc, name, dapl_id, dxpl_id)))
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTOPENOBJ, FAIL, "unable to open dataset")
/* Register an atom for the dataset */
if((ret_value = H5I_register(H5I_DATASET, dset, TRUE)) < 0)
HGOTO_ERROR(H5E_ATOM, H5E_CANTREGISTER, FAIL, "can't register dataset atom")
done:
- if(ret_value < 0) {
- if(dset != NULL) {
- if(H5D_close(dset) < 0)
- HDONE_ERROR(H5E_DATASET, H5E_CLOSEERROR, FAIL, "unable to release dataset")
- } /* end if */
- else {
- if(loc_found && H5G_loc_free(&dset_loc) < 0)
- HDONE_ERROR(H5E_SYM, H5E_CANTRELEASE, FAIL, "can't free location")
- } /* end else */
- } /* end if */
+ if(ret_value < 0)
+ if(dset && H5D_close(dset) < 0)
+ HDONE_ERROR(H5E_DATASET, H5E_CLOSEERROR, FAIL, "unable to release dataset")
FUNC_LEAVE_API(ret_value)
} /* end H5Dopen1() */
diff --git a/src/H5Dint.c b/src/H5Dint.c
index 53cd86a..b41e5a8 100644
--- a/src/H5Dint.c
+++ b/src/H5Dint.c
@@ -1091,7 +1091,7 @@ H5D__create(H5F_t *file, hid_t type_id, const H5S_t *space, hid_t dcpl_id,
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "unable to initialize I/O operations")
/* Create the layout information for the new dataset */
- if((new_dset->shared->layout.ops->construct)(file, new_dset) < 0)
+ if(new_dset->shared->layout.ops->construct && (new_dset->shared->layout.ops->construct)(file, new_dset) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "unable to construct layout information")
/* Update the dataset's object header info. */
@@ -1144,6 +1144,69 @@ done:
} /* end H5D__create() */
+/*-------------------------------------------------------------------------
+ * Function: H5D__open_name
+ *
+ * Purpose: Opens an existing dataset by name.
+ *
+ * Return: Success: Ptr to a new dataset.
+ * Failure: NULL
+ *
+ * Programmer: Neil Fortner
+ * Friday, March 6, 2015
+ *
+ *-------------------------------------------------------------------------
+ */
+H5D_t *
+H5D__open_name(const H5G_loc_t *loc, const char *name, hid_t dapl_id,
+ hid_t dxpl_id)
+{
+ H5D_t *dset = NULL;
+ H5G_loc_t dset_loc; /* Object location of dataset */
+ H5G_name_t path; /* Dataset group hier. path */
+ H5O_loc_t oloc; /* Dataset object location */
+ H5O_type_t obj_type; /* Type of object at location */
+ hbool_t loc_found = FALSE; /* Location at 'name' found */
+ H5D_t *ret_value; /* Return value */
+
+ FUNC_ENTER_PACKAGE
+
+ /* Check args */
+ HDassert(loc);
+ HDassert(name);
+
+ /* Set up dataset location to fill in */
+ dset_loc.oloc = &oloc;
+ dset_loc.path = &path;
+ H5G_loc_reset(&dset_loc);
+
+ /* Find the dataset object */
+ if(H5G_loc_find(loc, name, &dset_loc, dapl_id, dxpl_id) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_NOTFOUND, NULL, "not found")
+ loc_found = TRUE;
+
+ /* Check that the object found is the correct type */
+ if(H5O_obj_type(&oloc, &obj_type, dxpl_id) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, NULL, "can't get object type")
+ if(obj_type != H5O_TYPE_DATASET)
+ HGOTO_ERROR(H5E_DATASET, H5E_BADTYPE, NULL, "not a dataset")
+
+ /* Open the dataset */
+ if(NULL == (dset = H5D_open(&dset_loc, dapl_id, dxpl_id)))
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "can't open dataset")
+
+ /* Set return value */
+ ret_value = dset;
+
+done:
+ if(!ret_value)
+ if(loc_found && H5G_loc_free(&dset_loc) < 0)
+ HDONE_ERROR(H5E_DATASET, H5E_CANTRELEASE, NULL, "can't free location")
+
+ FUNC_LEAVE_NOAPI(ret_value)
+} /* end H5D__open_name() */
+
+
/*
*-------------------------------------------------------------------------
* Function: H5D_open
@@ -2206,6 +2269,7 @@ H5D__set_extent(H5D_t *dset, const hsize_t *size, hid_t dxpl_id)
{
hsize_t curr_dims[H5S_MAX_RANK]; /* Current dimension sizes */
htri_t changed; /* Whether the dataspace changed size */
+ size_t u; /* Local index variable */
herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_PACKAGE_TAG(dxpl_id, dset->oloc.addr, FAIL)
@@ -2241,10 +2305,9 @@ H5D__set_extent(H5D_t *dset, const hsize_t *size, hid_t dxpl_id)
hbool_t shrink = FALSE; /* Flag to indicate a dimension has shrank */
hbool_t expand = FALSE; /* Flag to indicate a dimension has grown */
hbool_t update_chunks = FALSE; /* Flag to indicate chunk cache update is needed */
- unsigned u; /* Local index variable */
/* Determine if we are shrinking and/or expanding any dimensions */
- for(u = 0; u < dset->shared->ndims; u++) {
+ for(u = 0; u < (size_t)dset->shared->ndims; u++) {
/* Check for various status changes */
if(size[u] < curr_dims[u])
shrink = TRUE;
@@ -2815,3 +2878,4 @@ done:
FUNC_LEAVE_NOAPI(ret_value)
} /* end H5D_get_type() */
+
diff --git a/src/H5Dpkg.h b/src/H5Dpkg.h
index 1476229..7aeba5d 100644
--- a/src/H5Dpkg.h
+++ b/src/H5Dpkg.h
@@ -535,6 +535,8 @@ H5_DLL H5D_t *H5D__create(H5F_t *file, hid_t type_id, const H5S_t *space,
H5_DLL H5D_t *H5D__create_named(const H5G_loc_t *loc, const char *name,
hid_t type_id, const H5S_t *space, hid_t lcpl_id, hid_t dcpl_id,
hid_t dapl_id, hid_t dxpl_id);
+H5_DLL H5D_t *H5D__open_name(const H5G_loc_t *loc, const char *name,
+ hid_t dapl_id, hid_t dxpl_id);
H5_DLL herr_t H5D__get_space_status(H5D_t *dset, H5D_space_status_t *allocation,
hid_t dxpl_id);
H5_DLL herr_t H5D__alloc_storage(const H5D_t *dset, hid_t dxpl_id, H5D_time_alloc_t time_alloc,
@@ -605,7 +607,6 @@ H5_DLL herr_t H5D__contig_copy(H5F_t *f_src, const H5O_storage_contig_t *storage
H5_DLL herr_t H5D__contig_delete(H5F_t *f, hid_t dxpl_id,
const H5O_storage_t *store);
-
/* Functions that operate on chunked dataset storage */
H5_DLL htri_t H5D__chunk_cacheable(const H5D_io_info_t *io_info, haddr_t caddr,
hbool_t write_op);
diff --git a/src/H5Olayout.c b/src/H5Olayout.c
index 8821694..b499e31 100644
--- a/src/H5Olayout.c
+++ b/src/H5Olayout.c
@@ -190,7 +190,7 @@ H5O_layout_decode(H5F_t *f, hid_t H5_ATTR_UNUSED dxpl_id, H5O_t H5_ATTR_UNUSED *
} /* end if */
else {
/* Layout class */
- mesg->type = (H5D_layout_t)*p++;
+ mesg->type = mesg->storage.type = (H5D_layout_t)*p++;
/* Interpret the rest of the message according to the layout class */
switch(mesg->type) {
@@ -686,7 +686,7 @@ H5O_layout_debug(H5F_t H5_ATTR_UNUSED *f, hid_t H5_ATTR_UNUSED dxpl_id, const vo
FILE * stream, int indent, int fwidth)
{
const H5O_layout_t *mesg = (const H5O_layout_t *) _mesg;
- unsigned u;
+ size_t u;
FUNC_ENTER_NOAPI_NOINIT_NOERR
@@ -709,7 +709,7 @@ H5O_layout_debug(H5F_t H5_ATTR_UNUSED *f, hid_t H5_ATTR_UNUSED dxpl_id, const vo
"Number of dimensions:",
(unsigned long)(mesg->u.chunk.ndims));
HDfprintf(stream, "%*s%-*s {", indent, "", fwidth, "Size:");
- for(u = 0; u < mesg->u.chunk.ndims; u++)
+ for(u = 0; u < (size_t)mesg->u.chunk.ndims; u++)
HDfprintf(stream, "%s%lu", u ? ", " : "", (unsigned long)(mesg->u.chunk.dim[u]));
HDfprintf(stream, "}\n");
diff --git a/src/H5Pdcpl.c b/src/H5Pdcpl.c
index 9bf58e4..fe13ac5 100644
--- a/src/H5Pdcpl.c
+++ b/src/H5Pdcpl.c
@@ -478,7 +478,7 @@ static herr_t
H5P__dcrt_layout_dec(const void **_pp, void *value)
{
const H5O_layout_t *layout; /* Storage layout */
- H5O_layout_t chunk_layout; /* Layout structure for chunk info */
+ H5O_layout_t tmp_layout; /* Temporary local layout structure */
H5D_layout_t type; /* Layout type */
const uint8_t **pp = (const uint8_t **)_pp;
herr_t ret_value = SUCCEED; /* Return value */
@@ -518,15 +518,15 @@ H5P__dcrt_layout_dec(const void **_pp, void *value)
unsigned u; /* Local index variable */
/* Initialize to default values */
- chunk_layout = H5D_def_layout_chunk_g;
+ tmp_layout = H5D_def_layout_chunk_g;
/* Set rank & dimensions */
- chunk_layout.u.chunk.ndims = (unsigned)ndims;
+ tmp_layout.u.chunk.ndims = (unsigned)ndims;
for(u = 0; u < ndims; u++)
- UINT32DECODE(*pp, chunk_layout.u.chunk.dim[u])
+ UINT32DECODE(*pp, tmp_layout.u.chunk.dim[u])
/* Point at the newly set up struct */
- layout = &chunk_layout;
+ layout = &tmp_layout;
} /* end else */
}
break;
@@ -1185,8 +1185,10 @@ H5P__init_def_layout(void)
FUNC_ENTER_STATIC_NOERR
/* Initialize the default layout info for non-contigous layouts */
+ H5D_def_layout_compact_g.storage.type = H5D_COMPACT;
H5D_def_layout_compact_g.storage.u.compact = def_store_compact;
H5D_def_layout_chunk_g.u.chunk = def_layout_chunk;
+ H5D_def_layout_chunk_g.storage.type = H5D_CHUNKED;
H5D_def_layout_chunk_g.storage.u.chunk = def_store_chunk;
/* Note that we've initialized the default values */
diff --git a/tools/h5dump/errfiles/tdset-2.err b/tools/h5dump/errfiles/tdset-2.err
index d9b92f3..775351e 100644
--- a/tools/h5dump/errfiles/tdset-2.err
+++ b/tools/h5dump/errfiles/tdset-2.err
@@ -1,17 +1,20 @@
HDF5-DIAG: Error detected in HDF5 (version (number)) thread (IDs):
- #000: (file name) line (number) in H5Dopen2(): not found
+ #000: (file name) line (number) in H5Dopen2(): unable to open dataset
+ major: Dataset
+ minor: Can't open object
+ #001: (file name) line (number) in H5D__open_name(): not found
major: Dataset
minor: Object not found
- #001: (file name) line (number) in H5G_loc_find(): can't find object
+ #002: (file name) line (number) in H5G_loc_find(): can't find object
major: Symbol table
minor: Object not found
- #002: (file name) line (number) in H5G_traverse(): internal path traversal failed
+ #003: (file name) line (number) in H5G_traverse(): internal path traversal failed
major: Symbol table
minor: Object not found
- #003: (file name) line (number) in H5G_traverse_real(): traversal operator failed
+ #004: (file name) line (number) in H5G_traverse_real(): traversal operator failed
major: Symbol table
minor: Callback failed
- #004: (file name) line (number) in H5G_loc_find_cb(): object 'dset3' doesn't exist
+ #005: (file name) line (number) in H5G_loc_find_cb(): object 'dset3' doesn't exist
major: Symbol table
minor: Object not found
HDF5-DIAG: Error detected in HDF5 (version (number)) thread (IDs):
diff --git a/tools/h5dump/errfiles/tperror.err b/tools/h5dump/errfiles/tperror.err
index 19a7a73..29f9e7f 100644
--- a/tools/h5dump/errfiles/tperror.err
+++ b/tools/h5dump/errfiles/tperror.err
@@ -1,17 +1,20 @@
HDF5-DIAG: Error detected in HDF5 (version (number)) thread (IDs):
- #000: (file name) line (number) in H5Dopen2(): not found
+ #000: (file name) line (number) in H5Dopen2(): unable to open dataset
+ major: Dataset
+ minor: Can't open object
+ #001: (file name) line (number) in H5D__open_name(): not found
major: Dataset
minor: Object not found
- #001: (file name) line (number) in H5G_loc_find(): can't find object
+ #002: (file name) line (number) in H5G_loc_find(): can't find object
major: Symbol table
minor: Object not found
- #002: (file name) line (number) in H5G_traverse(): internal path traversal failed
+ #003: (file name) line (number) in H5G_traverse(): internal path traversal failed
major: Symbol table
minor: Object not found
- #003: (file name) line (number) in H5G_traverse_real(): traversal operator failed
+ #004: (file name) line (number) in H5G_traverse_real(): traversal operator failed
major: Symbol table
minor: Callback failed
- #004: (file name) line (number) in H5G_loc_find_cb(): object 'bogus' doesn't exist
+ #005: (file name) line (number) in H5G_loc_find_cb(): object 'bogus' doesn't exist
major: Symbol table
minor: Object not found
HDF5-DIAG: Error detected in HDF5 (version (number)) thread (IDs):
diff --git a/tools/h5dump/errfiles/tslink-D.err b/tools/h5dump/errfiles/tslink-D.err
index b98e324..924e9cf 100644
--- a/tools/h5dump/errfiles/tslink-D.err
+++ b/tools/h5dump/errfiles/tslink-D.err
@@ -1,25 +1,28 @@
HDF5-DIAG: Error detected in HDF5 (version (number)) thread (IDs):
- #000: (file name) line (number) in H5Dopen2(): not found
+ #000: (file name) line (number) in H5Dopen2(): unable to open dataset
+ major: Dataset
+ minor: Can't open object
+ #001: (file name) line (number) in H5D__open_name(): not found
major: Dataset
minor: Object not found
- #001: (file name) line (number) in H5G_loc_find(): can't find object
+ #002: (file name) line (number) in H5G_loc_find(): can't find object
major: Symbol table
minor: Object not found
- #002: (file name) line (number) in H5G_traverse(): internal path traversal failed
+ #003: (file name) line (number) in H5G_traverse(): internal path traversal failed
major: Symbol table
minor: Object not found
- #003: (file name) line (number) in H5G_traverse_real(): special link traversal failed
+ #004: (file name) line (number) in H5G_traverse_real(): special link traversal failed
major: Links
minor: Link traversal failure
- #004: (file name) line (number) in H5G__traverse_special(): symbolic link traversal failed
+ #005: (file name) line (number) in H5G__traverse_special(): symbolic link traversal failed
major: Links
minor: Link traversal failure
- #005: (file name) line (number) in H5G_traverse_slink(): unable to follow symbolic link
+ #006: (file name) line (number) in H5G_traverse_slink(): unable to follow symbolic link
major: Symbol table
minor: Object not found
- #006: (file name) line (number) in H5G_traverse_real(): traversal operator failed
+ #007: (file name) line (number) in H5G_traverse_real(): traversal operator failed
major: Symbol table
minor: Callback failed
- #007: (file name) line (number) in H5G_traverse_slink_cb(): component not found
+ #008: (file name) line (number) in H5G_traverse_slink_cb(): component not found
major: Symbol table
minor: Object not found