summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--release_docs/RELEASE.txt4
-rw-r--r--src/H5Zscaleoffset.c307
-rw-r--r--test/be_data.h5bin9424 -> 40320 bytes
-rwxr-xr-xtest/cross_read.c737
-rwxr-xr-xtest/gen_cross.c671
-rw-r--r--test/le_data.h5bin9424 -> 40320 bytes
-rw-r--r--test/vms_data.h5bin9424 -> 40320 bytes
7 files changed, 1493 insertions, 226 deletions
diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt
index 024d76b..9ae4575 100644
--- a/release_docs/RELEASE.txt
+++ b/release_docs/RELEASE.txt
@@ -257,6 +257,10 @@ Bug Fixes since HDF5-1.8.0 release
Library
-------
+ - Fixed a bug that caused big endian machines to generate corrupt files
+ when using the scale-offset filter with floating point data or
+ fill values. Note that such datasets will no longer be readable
+ by any machine after this patch. (NAF - 2010/02/02)
- Retrieving a link's name by index in the case where the link is
external and the file that the link refers to doesn't exist will
now fail gracefully rather than cause a segmentation fault.
diff --git a/src/H5Zscaleoffset.c b/src/H5Zscaleoffset.c
index eb3c6e6..c524141 100644
--- a/src/H5Zscaleoffset.c
+++ b/src/H5Zscaleoffset.c
@@ -53,16 +53,16 @@ static size_t H5Z_filter_scaleoffset(unsigned flags, size_t cd_nelmts,
static void H5Z_scaleoffset_convert(void *buf, unsigned d_nelmts, size_t dtype_size);
static unsigned H5Z_scaleoffset_log2(unsigned long long num);
static void H5Z_scaleoffset_precompress_i(void *data, unsigned d_nelmts,
- enum H5Z_scaleoffset_t type, unsigned filavail, const void *filval_buf,
+ enum H5Z_scaleoffset_t type, unsigned filavail, const unsigned cd_values[],
uint32_t *minbits, unsigned long long *minval);
static void H5Z_scaleoffset_postdecompress_i(void *data, unsigned d_nelmts,
- enum H5Z_scaleoffset_t type, unsigned filavail, const void *filval_buf,
+ enum H5Z_scaleoffset_t type, unsigned filavail, const unsigned cd_values[],
uint32_t minbits, unsigned long long minval);
static herr_t H5Z_scaleoffset_precompress_fd(void *data, unsigned d_nelmts,
- enum H5Z_scaleoffset_t type, unsigned filavail, const void *filval_buf,
+ enum H5Z_scaleoffset_t type, unsigned filavail, const unsigned cd_values[],
uint32_t *minbits, unsigned long long *minval, double D_val);
static herr_t H5Z_scaleoffset_postdecompress_fd(void *data, unsigned d_nelmts,
- enum H5Z_scaleoffset_t type, unsigned filavail, const void *filval_buf,
+ enum H5Z_scaleoffset_t type, unsigned filavail, const unsigned cd_values[],
uint32_t minbits, unsigned long long minval, double D_val);
static void H5Z_scaleoffset_next_byte(size_t *j, unsigned *buf_len);
static void H5Z_scaleoffset_decompress_one_byte(unsigned char *data, size_t data_offset,
@@ -119,24 +119,71 @@ H5Z_class2_t H5Z_SCALEOFFSET[1] = {{
/* Store fill value in cd_values[] */
#define H5Z_scaleoffset_save_filval(type, cd_values, fill_val) \
{ \
- unsigned char *fill_parm; /* Pointer to fill value parameter */ \
+ unsigned _i = H5Z_SCALEOFFSET_PARM_FILVAL; /* index into cd_values */ \
+ uint32_t _cd_value; /* Current cd_value */ \
+ char *_fv_p; /* Pointer to current byte in fill_val */ \
+ size_t _copy_size = 4; /* # of bytes to copy this iteration */ \
+ size_t _size_rem = sizeof(type); /* # of bytes left to copy to cd_values */ \
\
/* Store the fill value as the last entry in cd_values[] \
* Store byte by byte from least significant byte to most significant byte \
* Plenty of space left for the fill value (from index 8 to 19) \
+ * H5O_pline_encode will byte-swap each individual cd value, but we still \
+ * need to swap the cd values as a whole if we are on a BE machine. Note \
+ * that we need to make sure to put the data only in the lowest 4 bytes of \
+ * each, if sizeof(unsigned) > 4. \
*/ \
- fill_parm = (unsigned char *)&cd_values[H5Z_SCALEOFFSET_PARM_FILVAL]; \
- if(H5T_native_order_g == H5T_ORDER_LE) \
- HDmemcpy(fill_parm, &fill_val, sizeof(type)); \
- else { \
- unsigned char *fill_buf; /* Pointer to fill value in memory */ \
- unsigned u; /* index */ \
+ if(H5T_native_order_g == H5T_ORDER_LE) { \
+ _fv_p = (char *)&(fill_val); \
+ /* Copy 4 bytes at a time to each cd value */ \
+ do { \
+ if(_size_rem < 4) { \
+ /* Amount left to copy is smaller than a cd_value, adjust copy \
+ * size and initialize cd_value as it will not be fully \
+ * overwritten */ \
+ _copy_size = _size_rem; \
+ _cd_value = (uint32_t)0; \
+ } /* end if */ \
+ \
+ /* Copy the value */ \
+ HDmemcpy(&_cd_value, _fv_p, _copy_size); \
+ (cd_values)[_i] = (unsigned)_cd_value; \
\
+ /* Next field */ \
+ _i++; \
+ _fv_p += _copy_size; \
+ _size_rem -= _copy_size; \
+ } while(_size_rem); \
+ } /* end if */ \
+ else { \
HDassert(H5T_native_order_g == H5T_ORDER_BE); \
\
- fill_buf = (unsigned char *)&fill_val; \
- for(u = 0; u < sizeof(type); u++) \
- fill_parm[u] = fill_buf[sizeof(type) - (u + 1)]; \
+ /* Copy 4 bytes at a time to each cd value, but start at the end \
+ * (highest address) of fill_val */ \
+ _fv_p = ((char *)&(fill_val)) + sizeof(type) - MIN(4, _size_rem); \
+ while(_size_rem >= 4) { \
+ /* Copy the value */ \
+ HDmemcpy(&_cd_value, _fv_p, _copy_size); \
+ (cd_values)[_i] = (unsigned)_cd_value; \
+ \
+ /* Next field */ \
+ _i++; \
+ _size_rem -= 4; \
+ if(_size_rem >= 4) \
+ _fv_p -= 4; \
+ else \
+ _fv_p -= _size_rem; \
+ } /* end while */ \
+ \
+ HDassert(_fv_p == (char *)&(fill_val)); \
+ if(_size_rem) { \
+ /* Amount left to copy is smaller than a cd_value, initialize \
+ * _cd_value as it will not be fully overwritten and copy to the end \
+ * of _cd value as it is BE. */ \
+ _cd_value = (uint32_t)0; \
+ HDmemcpy((char *)&_cd_value + 4 - _size_rem, _fv_p, _size_rem); \
+ (cd_values)[_i] = (unsigned)_cd_value; \
+ } /* end if */ \
} /* end else */ \
}
@@ -180,7 +227,7 @@ H5Z_class2_t H5Z_SCALEOFFSET[1] = {{
HGOTO_ERROR(H5E_PLINE, H5E_CANTGET, FAIL, "unable to get fill value") \
\
/* Store the fill value as the last entry in cd_values[] */ \
- ((unsigned char *)&cd_values[H5Z_SCALEOFFSET_PARM_FILVAL])[0] = (unsigned char)fill_val; \
+ (cd_values)[H5Z_SCALEOFFSET_PARM_FILVAL] = (unsigned)((unsigned char)fill_val); \
}
/* Set the fill value parameter in cd_values[] for floating-point type */
@@ -199,33 +246,78 @@ H5Z_class2_t H5Z_SCALEOFFSET[1] = {{
}
/* Get the fill value for integer type */
-#define H5Z_scaleoffset_get_filval_1(type, filval_buf, filval) \
-{ \
- const unsigned char *fill_parm; /* Pointer to fill value parameter */ \
- \
- /* retrieve fill value from corresponding positions of cd_values[] \
- * retrieve them corresponding to how they are stored \
- */ \
- fill_parm = (const unsigned char *)filval_buf; \
- if(H5T_native_order_g == H5T_ORDER_LE) \
- HDmemcpy(&filval, fill_parm, sizeof(type)); \
- else { \
- unsigned char *fill_buf; /* Pointer to fill value in memory */ \
- unsigned u; /* index */ \
- \
- HDassert(H5T_native_order_g == H5T_ORDER_BE); \
- \
- fill_buf = (unsigned char *)&filval; \
- for(u = 0; u < sizeof(type); u++) \
- fill_buf[u] = fill_parm[sizeof(type) - (u + 1)]; \
- } /* end else */ \
+#define H5Z_scaleoffset_get_filval_1(type, cd_values, fill_val) \
+{ \
+ unsigned _i = H5Z_SCALEOFFSET_PARM_FILVAL; /* index into cd_values */ \
+ uint32_t _cd_value; /* Current cd_value */ \
+ char *_fv_p; /* Pointer to current byte in fill_val */ \
+ size_t _copy_size = 4; /* # of bytes to copy this iteration */ \
+ size_t _size_rem = sizeof(type); /* # of bytes left to copy to filval */ \
+ \
+ /* Retrieve the fill value from the last entry in cd_values[] \
+ * Store byte by byte from least significant byte to most significant byte \
+ * Plenty of space left for the fill value (from index 8 to 19) \
+ * H5O_pline_encode will byte-swap each individual cd value, but we still \
+ * need to swap the cd values as a whole if we are on a BE machine. Note \
+ * that we need to make sure to put the data only in the lowest 4 bytes of \
+ * each, if sizeof(unsigned) > 4. \
+ */ \
+ if(H5T_native_order_g == H5T_ORDER_LE) { \
+ _fv_p = (char *)&(fill_val); \
+ /* Copy 4 bytes at a time to each cd value */ \
+ do { \
+ if(_size_rem < 4) \
+ /* Amount left to copy is smaller than a cd_value, adjust copy \
+ * size and initialize cd_value as it will not be fully \
+ * overwritten */ \
+ _copy_size = _size_rem; \
+ \
+ /* Copy the value */ \
+ _cd_value = (uint32_t)(cd_values)[_i]; \
+ HDmemcpy(_fv_p, &_cd_value, _copy_size); \
+ \
+ /* Next field */ \
+ _i++; \
+ _fv_p += _copy_size; \
+ _size_rem -= _copy_size; \
+ } while(_size_rem); \
+ } /* end if */ \
+ else { \
+ HDassert(H5T_native_order_g == H5T_ORDER_BE); \
+ \
+ /* Copy 4 bytes at a time to each cd value, but start at the end \
+ * (highest address) of fill_val */ \
+ _fv_p = ((char *)&(fill_val)) + sizeof(type) - MIN(4, _size_rem); \
+ while(_size_rem >= 4) { \
+ /* Copy the value */ \
+ _cd_value = (uint32_t)(cd_values)[_i]; \
+ HDmemcpy(_fv_p, &_cd_value, _copy_size); \
+ \
+ /* Next field */ \
+ _i++; \
+ _size_rem -= 4; \
+ if(_size_rem >=4) \
+ _fv_p -= 4; \
+ else \
+ _fv_p -= _size_rem; \
+ } /* end while */ \
+ \
+ HDassert(_fv_p == (char *)&(fill_val)); \
+ if(_size_rem) { \
+ /* Amount left to copy is smaller than a cd_value, initialize \
+ * _cd_value as it will not be fully overwritten and copy to the end \
+ * of _cd value as it is BE. */ \
+ _cd_value = (uint32_t)(cd_values)[_i]; \
+ HDmemcpy(_fv_p, (char *)&_cd_value + 4 - _size_rem, _size_rem); \
+ } /* end if */ \
+ } /* end else */ \
}
/* Get the fill value for floating-point type */
-#define H5Z_scaleoffset_get_filval_2(type, filval_buf, filval) \
+#define H5Z_scaleoffset_get_filval_2(type, cd_values, filval) \
{ \
if(sizeof(type) <= sizeof(long long)) \
- H5Z_scaleoffset_get_filval_1(type, filval_buf, filval) \
+ H5Z_scaleoffset_get_filval_1(type, cd_values, filval) \
else \
HGOTO_ERROR(H5E_PLINE, H5E_BADTYPE, FAIL, "cannot find matched integer dataype") \
}
@@ -258,7 +350,7 @@ H5Z_class2_t H5Z_SCALEOFFSET[1] = {{
i = 0; while(i < d_nelmts && HDfabs(buf[i] - filval) < HDpow(10.0, -D_val)) i++; \
if(i < d_nelmts) min = max = buf[i]; \
for(; i < d_nelmts; i++) { \
- if(HDfabs(buf[i] - filval) < HDpow(10.0, -D_val)) \
+ if(HDfabs(buf[i] - filval) < HDpow(10.0, -D_val)) \
continue; /* ignore fill value */ \
if(buf[i] > max) max = buf[i]; \
if(buf[i] < min) min = buf[i]; \
@@ -321,13 +413,13 @@ H5Z_class2_t H5Z_SCALEOFFSET[1] = {{
}
/* Precompress for unsigned integer type */
-#define H5Z_scaleoffset_precompress_1(type, data, d_nelmts, filavail, filval_buf, minbits, minval)\
+#define H5Z_scaleoffset_precompress_1(type, data, d_nelmts, filavail, cd_values, minbits, minval)\
{ \
type *buf = (type *)data, min = 0, max = 0, span, filval = 0; \
unsigned i; \
\
if(filavail == H5Z_SCALEOFFSET_FILL_DEFINED) { /* fill value defined */ \
- H5Z_scaleoffset_get_filval_1(type, filval_buf, filval) \
+ H5Z_scaleoffset_get_filval_1(type, cd_values, filval) \
if(*minbits == H5Z_SO_INT_MINBITS_DEFAULT) { /* minbits not set yet, calculate max, min, and minbits */ \
H5Z_scaleoffset_max_min_1(i, d_nelmts, buf, filval, max, min) \
H5Z_scaleoffset_check_1(type, max, min, minbits) \
@@ -354,13 +446,13 @@ H5Z_class2_t H5Z_SCALEOFFSET[1] = {{
}
/* Precompress for signed integer type */
-#define H5Z_scaleoffset_precompress_2(type, data, d_nelmts, filavail, filval_buf, minbits, minval)\
+#define H5Z_scaleoffset_precompress_2(type, data, d_nelmts, filavail, cd_values, minbits, minval)\
{ \
type *buf = (type *)data, min = 0, max = 0, filval = 0; \
unsigned type span; unsigned i; \
\
if(filavail == H5Z_SCALEOFFSET_FILL_DEFINED) { /* fill value defined */ \
- H5Z_scaleoffset_get_filval_1(type, filval_buf, filval) \
+ H5Z_scaleoffset_get_filval_1(type, cd_values, filval) \
if(*minbits == H5Z_SO_INT_MINBITS_DEFAULT) { /* minbits not set yet, calculate max, min, and minbits */ \
H5Z_scaleoffset_max_min_1(i, d_nelmts, buf, filval, max, min) \
H5Z_scaleoffset_check_2(type, max, min, minbits) \
@@ -423,15 +515,15 @@ H5Z_class2_t H5Z_SCALEOFFSET[1] = {{
if(sizeof(type)==sizeof(int)) \
for(i = 0; i < d_nelmts; i++) \
*(int *)&buf[i] = H5Z_scaleoffset_rnd( \
- buf[i]*HDpow(10.0, D_val) - min*HDpow(10.0, D_val)); \
+ buf[i]*HDpow(10.0, D_val) - min*HDpow(10.0, D_val)); \
else if(sizeof(type)==sizeof(long)) \
for(i = 0; i < d_nelmts; i++) \
*(long *)&buf[i] = H5Z_scaleoffset_rnd( \
- buf[i]*HDpow(10.0, D_val) - min*HDpow(10.0, D_val)); \
+ buf[i]*HDpow(10.0, D_val) - min*HDpow(10.0, D_val)); \
else if(sizeof(type)==sizeof(long long)) \
for(i = 0; i < d_nelmts; i++) \
*(long long *)&buf[i] = H5Z_scaleoffset_rnd( \
- buf[i]*HDpow(10.0, D_val) - min*HDpow(10.0, D_val)); \
+ buf[i]*HDpow(10.0, D_val) - min*HDpow(10.0, D_val)); \
else \
HGOTO_ERROR(H5E_PLINE, H5E_BADTYPE, FAIL, "cannot find matched integer dataype")\
}
@@ -439,38 +531,33 @@ H5Z_class2_t H5Z_SCALEOFFSET[1] = {{
/* Save the minimum value for floating-point type */
#define H5Z_scaleoffset_save_min(i, type, minval, min) \
{ \
- if(sizeof(type) <= sizeof(long long)) { \
- unsigned char *min_parm; /* Pointer to min value parameter */ \
- \
- min_parm = (unsigned char *)minval; \
+ if(sizeof(type) <= sizeof(long long)) \
+ /* Save min value to corresponding position \
+ * byte-order will be swapped as appropriate, but be sure to \
+ * account for offset in BE if sizes differ \
+ */ \
if(H5T_native_order_g == H5T_ORDER_LE) \
- HDmemcpy(min_parm, &min, sizeof(type)); \
+ HDmemcpy(minval, &min, sizeof(type)); \
else { \
- unsigned char *min_buf; /* Pointer to min value in memory */ \
- unsigned u; /* index */ \
- \
HDassert(H5T_native_order_g == H5T_ORDER_BE); \
- \
- min_buf = (unsigned char *)&min; \
- for(u = 0; u < sizeof(type); u++) \
- min_parm[u] = min_buf[sizeof(type) - (u + 1)]; \
+ HDmemcpy(((char *)minval) + (sizeof(long long) - sizeof(type)), \
+ &min, sizeof(type)); \
} /* end else */ \
- } /* end if */ \
else \
HGOTO_ERROR(H5E_PLINE, H5E_BADTYPE, FAIL, "cannot find matched integer dataype") \
}
/* Precompress for floating-point type using variable-minimum-bits method */
-#define H5Z_scaleoffset_precompress_3(type, data, d_nelmts, filavail, filval_buf, \
+#define H5Z_scaleoffset_precompress_3(type, data, d_nelmts, filavail, cd_values, \
minbits, minval, D_val) \
{ \
- type *buf = (type *)data, min = 0, max = 0, filval = 0; \
+ type *buf = (type *)data, min = 0, max = 0, filval = 0; \
unsigned long long span; \
unsigned i; \
\
*minval = 0; \
if(filavail == H5Z_SCALEOFFSET_FILL_DEFINED) { /* fill value defined */ \
- H5Z_scaleoffset_get_filval_2(type, filval_buf, filval) \
+ H5Z_scaleoffset_get_filval_2(type, cd_values, filval) \
H5Z_scaleoffset_max_min_3(i, d_nelmts, buf, filval, max, min, D_val) \
H5Z_scaleoffset_check_3(i, type, max, min, minbits, D_val) \
span = H5Z_scaleoffset_rnd(max * HDpow(10.0, D_val) - min * HDpow(10.0, D_val)) + 1; \
@@ -489,12 +576,12 @@ H5Z_class2_t H5Z_SCALEOFFSET[1] = {{
}
/* Postdecompress for unsigned integer type */
-#define H5Z_scaleoffset_postdecompress_1(type, data, d_nelmts, filavail, filval_buf, minbits, minval)\
+#define H5Z_scaleoffset_postdecompress_1(type, data, d_nelmts, filavail, cd_values, minbits, minval)\
{ \
type *buf = (type *)data, filval = 0; unsigned i; \
\
if(filavail == H5Z_SCALEOFFSET_FILL_DEFINED) { /* fill value defined */ \
- H5Z_scaleoffset_get_filval_1(type, filval_buf, filval) \
+ H5Z_scaleoffset_get_filval_1(type, cd_values, filval) \
for(i = 0; i < d_nelmts; i++) \
buf[i] = (type)((buf[i] == (((type)1 << minbits) - 1)) ? filval : (buf[i] + minval)); \
} else /* fill value undefined */ \
@@ -502,13 +589,13 @@ H5Z_class2_t H5Z_SCALEOFFSET[1] = {{
}
/* Postdecompress for signed integer type */
-#define H5Z_scaleoffset_postdecompress_2(type, data, d_nelmts, filavail, filval_buf, minbits, minval)\
+#define H5Z_scaleoffset_postdecompress_2(type, data, d_nelmts, filavail, cd_values, minbits, minval)\
{ \
type *buf = (type *)data, filval = 0; \
unsigned i; \
\
if(filavail == H5Z_SCALEOFFSET_FILL_DEFINED) { /* fill value defined */ \
- H5Z_scaleoffset_get_filval_1(type, filval_buf, filval) \
+ H5Z_scaleoffset_get_filval_1(type, cd_values, filval) \
for(i = 0; i < d_nelmts; i++) \
buf[i] = (type)(((unsigned type)buf[i] == (((unsigned type)1 << minbits) - 1)) ? filval : (buf[i] + minval));\
} else /* fill value undefined */ \
@@ -519,26 +606,18 @@ H5Z_class2_t H5Z_SCALEOFFSET[1] = {{
/* Retrive minimum value of floating-point type */
#define H5Z_scaleoffset_get_min(type, minval, min) \
{ \
- if(sizeof(type) <= sizeof(long long)) { \
- const unsigned char *min_parm; /* Pointer to min value parameter */ \
- \
- /* retrieve min value from corresponding positions \
- * retrieve them corresponding to how they are stored \
+ if(sizeof(type) <= sizeof(long long)) \
+ /* retrieve min value from corresponding position \
+ * byte-order has already been swapped as appropriate, but be sure to \
+ * account for offset in BE if sizes differ \
*/ \
- min_parm = (const unsigned char *)&minval; \
if(H5T_native_order_g == H5T_ORDER_LE) \
- HDmemcpy(&min, min_parm, sizeof(type)); \
+ HDmemcpy(&min, &minval, sizeof(type)); \
else { \
- unsigned char *min_buf; /* Pointer to min value in memory */ \
- unsigned u; /* index */ \
- \
HDassert(H5T_native_order_g == H5T_ORDER_BE); \
- \
- min_buf = (unsigned char *)&min; \
- for(u = 0; u < sizeof(type); u++) \
- min_buf[u] = min_parm[sizeof(type) - (u + 1)]; \
+ HDmemcpy(&min, ((char *)&minval) + (sizeof(long long) \
+ - sizeof(type)), sizeof(type)); \
} /* end else */ \
- } /* end if */ \
else \
HGOTO_ERROR(H5E_PLINE, H5E_BADTYPE, FAIL, "cannot find matched integer dataype") \
}
@@ -579,7 +658,7 @@ H5Z_class2_t H5Z_SCALEOFFSET[1] = {{
}
/* Postdecompress for floating-point type using variable-minimum-bits method */
-#define H5Z_scaleoffset_postdecompress_3(type, data, d_nelmts, filavail, filval_buf, \
+#define H5Z_scaleoffset_postdecompress_3(type, data, d_nelmts, filavail, cd_values, \
minbits, minval, D_val) \
{ \
type *buf = (type *)data, filval = 0, min = 0; \
@@ -588,7 +667,7 @@ H5Z_class2_t H5Z_SCALEOFFSET[1] = {{
H5Z_scaleoffset_get_min(type, minval, min) \
\
if(filavail == H5Z_SCALEOFFSET_FILL_DEFINED) { /* fill value defined */ \
- H5Z_scaleoffset_get_filval_2(type, filval_buf, filval) \
+ H5Z_scaleoffset_get_filval_2(type, cd_values, filval) \
H5Z_scaleoffset_modify_3(i, type, buf, d_nelmts, filval, minbits, min, D_val) \
} else /* fill value undefined */ \
H5Z_scaleoffset_modify_4(i, type, buf, d_nelmts, min, D_val) \
@@ -1125,12 +1204,12 @@ H5Z_filter_scaleoffset(unsigned flags, size_t cd_nelmts, const unsigned cd_value
/* postprocess after decompression */
if(dtype_class==H5Z_SCALEOFFSET_CLS_INTEGER)
H5Z_scaleoffset_postdecompress_i(outbuf, d_nelmts, type, filavail,
- &cd_values[H5Z_SCALEOFFSET_PARM_FILVAL], minbits, minval);
+ cd_values, minbits, minval);
if(dtype_class==H5Z_SCALEOFFSET_CLS_FLOAT)
if(scale_type==0) { /* variable-minimum-bits method */
if(H5Z_scaleoffset_postdecompress_fd(outbuf, d_nelmts, type, filavail,
- &cd_values[H5Z_SCALEOFFSET_PARM_FILVAL], minbits, minval, D_val)==FAIL)
+ cd_values, minbits, minval, D_val)==FAIL)
HGOTO_ERROR(H5E_PLINE, H5E_BADTYPE, 0, "post-decompression failed")
}
@@ -1153,12 +1232,12 @@ H5Z_filter_scaleoffset(unsigned flags, size_t cd_nelmts, const unsigned cd_value
/* preprocess before compression */
if(dtype_class==H5Z_SCALEOFFSET_CLS_INTEGER)
H5Z_scaleoffset_precompress_i(*buf, d_nelmts, type, filavail,
- &cd_values[H5Z_SCALEOFFSET_PARM_FILVAL], &minbits, &minval);
+ cd_values, &minbits, &minval);
if(dtype_class==H5Z_SCALEOFFSET_CLS_FLOAT)
if(scale_type==0) { /* variable-minimum-bits method */
if(H5Z_scaleoffset_precompress_fd(*buf, d_nelmts, type, filavail,
- &cd_values[H5Z_SCALEOFFSET_PARM_FILVAL], &minbits, &minval, D_val)==FAIL)
+ cd_values, &minbits, &minval, D_val)==FAIL)
HGOTO_ERROR(H5E_PLINE, H5E_BADTYPE, 0, "pre-compression failed")
}
@@ -1308,30 +1387,30 @@ H5Z_scaleoffset_log2(unsigned long long num)
/* precompress for integer type */
static void
H5Z_scaleoffset_precompress_i(void *data, unsigned d_nelmts, enum H5Z_scaleoffset_t type,
- unsigned filavail, const void *filval_buf, uint32_t *minbits, unsigned long long *minval)
+ unsigned filavail, const unsigned cd_values[], uint32_t *minbits, unsigned long long *minval)
{
if(type == t_uchar)
H5Z_scaleoffset_precompress_1(unsigned char, data, d_nelmts,
- filavail, filval_buf, minbits, minval)
+ filavail, cd_values, minbits, minval)
else if(type == t_ushort)
H5Z_scaleoffset_precompress_1(unsigned short, data, d_nelmts,
- filavail, filval_buf, minbits, minval)
+ filavail, cd_values, minbits, minval)
else if(type == t_uint)
H5Z_scaleoffset_precompress_1(unsigned int, data, d_nelmts,
- filavail, filval_buf, minbits, minval)
+ filavail, cd_values, minbits, minval)
else if(type == t_ulong)
H5Z_scaleoffset_precompress_1(unsigned long, data, d_nelmts,
- filavail, filval_buf, minbits, minval)
+ filavail, cd_values, minbits, minval)
else if(type == t_ulong_long)
H5Z_scaleoffset_precompress_1(unsigned long long, data, d_nelmts,
- filavail, filval_buf, minbits, minval)
+ filavail, cd_values, minbits, minval)
else if(type == t_schar) {
signed char *buf = (signed char *)data, min = 0, max = 0, filval = 0;
unsigned char span;
unsigned i;
if(filavail == H5Z_SCALEOFFSET_FILL_DEFINED) { /* fill value defined */
- H5Z_scaleoffset_get_filval_1(signed char, filval_buf, filval);
+ H5Z_scaleoffset_get_filval_1(signed char, cd_values, filval);
if(*minbits == H5Z_SO_INT_MINBITS_DEFAULT) { /* minbits not set yet, calculate max, min, and minbits */
H5Z_scaleoffset_max_min_1(i, d_nelmts, buf, filval, max, min)
if((unsigned char)(max - min) > (unsigned char)(~(unsigned char)0 - 2)) {
@@ -1365,46 +1444,46 @@ H5Z_scaleoffset_precompress_i(void *data, unsigned d_nelmts, enum H5Z_scaleoffse
}
else if(type == t_short)
H5Z_scaleoffset_precompress_2(short, data, d_nelmts,
- filavail, filval_buf, minbits, minval)
+ filavail, cd_values, minbits, minval)
else if(type == t_int)
H5Z_scaleoffset_precompress_2(int, data, d_nelmts,
- filavail, filval_buf, minbits, minval)
+ filavail, cd_values, minbits, minval)
else if(type == t_long)
H5Z_scaleoffset_precompress_2(long, data, d_nelmts,
- filavail, filval_buf, minbits, minval)
+ filavail, cd_values, minbits, minval)
else if(type == t_long_long)
H5Z_scaleoffset_precompress_2(long long, data, d_nelmts,
- filavail, filval_buf, minbits, minval)
+ filavail, cd_values, minbits, minval)
}
/* postdecompress for integer type */
static void
H5Z_scaleoffset_postdecompress_i(void *data, unsigned d_nelmts, enum H5Z_scaleoffset_t type,
- unsigned filavail, const void *filval_buf, uint32_t minbits, unsigned long long minval)
+ unsigned filavail, const unsigned cd_values[], uint32_t minbits, unsigned long long minval)
{
long long sminval = *(long long*)&minval; /* for signed integer types */
if(type == t_uchar)
H5Z_scaleoffset_postdecompress_1(unsigned char, data, d_nelmts, filavail,
- filval_buf, minbits, minval)
+ cd_values, minbits, minval)
else if(type == t_ushort)
H5Z_scaleoffset_postdecompress_1(unsigned short, data, d_nelmts, filavail,
- filval_buf, minbits, minval)
+ cd_values, minbits, minval)
else if(type == t_uint)
H5Z_scaleoffset_postdecompress_1(unsigned int, data, d_nelmts, filavail,
- filval_buf, minbits, minval)
+ cd_values, minbits, minval)
else if(type == t_ulong)
H5Z_scaleoffset_postdecompress_1(unsigned long, data, d_nelmts, filavail,
- filval_buf, minbits, minval)
+ cd_values, minbits, minval)
else if(type == t_ulong_long)
H5Z_scaleoffset_postdecompress_1(unsigned long long, data, d_nelmts, filavail,
- filval_buf, minbits, minval)
+ cd_values, minbits, minval)
else if(type == t_schar) {
signed char *buf = (signed char *)data, filval = 0;
unsigned i;
if(filavail == H5Z_SCALEOFFSET_FILL_DEFINED) { /* fill value defined */
- H5Z_scaleoffset_get_filval_1(signed char, filval_buf, filval)
+ H5Z_scaleoffset_get_filval_1(signed char, cd_values, filval)
for(i = 0; i < d_nelmts; i++)
buf[i] = (signed char)((buf[i] == (((unsigned char)1 << minbits) - 1)) ? filval : (buf[i] + sminval));
} else /* fill value undefined */
@@ -1413,23 +1492,23 @@ H5Z_scaleoffset_postdecompress_i(void *data, unsigned d_nelmts, enum H5Z_scaleof
}
else if(type == t_short)
H5Z_scaleoffset_postdecompress_2(short, data, d_nelmts, filavail,
- filval_buf, minbits, sminval)
+ cd_values, minbits, sminval)
else if(type == t_int)
H5Z_scaleoffset_postdecompress_2(int, data, d_nelmts, filavail,
- filval_buf, minbits, sminval)
+ cd_values, minbits, sminval)
else if(type == t_long)
H5Z_scaleoffset_postdecompress_2(long, data, d_nelmts, filavail,
- filval_buf, minbits, sminval)
+ cd_values, minbits, sminval)
else if(type == t_long_long)
H5Z_scaleoffset_postdecompress_2(long long, data, d_nelmts, filavail,
- filval_buf, minbits, sminval)
+ cd_values, minbits, sminval)
}
/* precompress for floating-point type, variable-minimum-bits method
success: non-negative, failure: negative 4/15/05 */
static herr_t
H5Z_scaleoffset_precompress_fd(void *data, unsigned d_nelmts, enum H5Z_scaleoffset_t type,
- unsigned filavail, const void *filval_buf, uint32_t *minbits,
+ unsigned filavail, const unsigned cd_values[], uint32_t *minbits,
unsigned long long *minval, double D_val)
{
herr_t ret_value=SUCCEED; /* Return value */
@@ -1438,10 +1517,10 @@ H5Z_scaleoffset_precompress_fd(void *data, unsigned d_nelmts, enum H5Z_scaleoffs
if(type == t_float)
H5Z_scaleoffset_precompress_3(float, data, d_nelmts,
- filavail, filval_buf, minbits, minval, D_val)
+ filavail, cd_values, minbits, minval, D_val)
else if(type == t_double)
H5Z_scaleoffset_precompress_3(double, data, d_nelmts,
- filavail, filval_buf, minbits, minval, D_val)
+ filavail, cd_values, minbits, minval, D_val)
done:
FUNC_LEAVE_NOAPI(ret_value)
@@ -1451,7 +1530,7 @@ done:
success: non-negative, failure: negative 4/15/05 */
static herr_t
H5Z_scaleoffset_postdecompress_fd(void *data, unsigned d_nelmts, enum H5Z_scaleoffset_t type,
- unsigned filavail, const void *filval_buf, uint32_t minbits,
+ unsigned filavail, const unsigned cd_values[], uint32_t minbits,
unsigned long long minval, double D_val)
{
long long sminval = (long long)minval; /* for signed integer types */
@@ -1461,10 +1540,10 @@ H5Z_scaleoffset_postdecompress_fd(void *data, unsigned d_nelmts, enum H5Z_scaleo
if(type == t_float)
H5Z_scaleoffset_postdecompress_3(float, data, d_nelmts, filavail,
- filval_buf, minbits, sminval, D_val)
+ cd_values, minbits, sminval, D_val)
else if(type == t_double)
H5Z_scaleoffset_postdecompress_3(double, data, d_nelmts, filavail,
- filval_buf, minbits, sminval, D_val)
+ cd_values, minbits, sminval, D_val)
done:
FUNC_LEAVE_NOAPI(ret_value)
diff --git a/test/be_data.h5 b/test/be_data.h5
index 7fc9ef7..0feefa3 100644
--- a/test/be_data.h5
+++ b/test/be_data.h5
Binary files differ
diff --git a/test/cross_read.c b/test/cross_read.c
index 279d102..6588031 100755
--- a/test/cross_read.c
+++ b/test/cross_read.c
@@ -32,9 +32,19 @@ const char *FILENAME[] = {
};
#define DATASETNAME "Array"
-#define DATASETNAME2 "Scale_offset_double_data"
-#define DATASETNAME3 "Scale_offset_int_data"
-#define NX 6
+#define DATASETNAME2 "Scale_offset_float_data_le"
+#define DATASETNAME3 "Scale_offset_float_data_be"
+#define DATASETNAME4 "Scale_offset_double_data_le"
+#define DATASETNAME5 "Scale_offset_double_data_be"
+#define DATASETNAME6 "Scale_offset_char_data_le"
+#define DATASETNAME7 "Scale_offset_char_data_be"
+#define DATASETNAME8 "Scale_offset_short_data_le"
+#define DATASETNAME9 "Scale_offset_short_data_be"
+#define DATASETNAME10 "Scale_offset_int_data_le"
+#define DATASETNAME11 "Scale_offset_int_data_be"
+#define DATASETNAME12 "Scale_offset_long_long_data_le"
+#define DATASETNAME13 "Scale_offset_long_long_data_be"
+#define NX 6
#define NY 6
@@ -57,18 +67,13 @@ static int read_data(char *fname)
{
const char *pathname = H5_get_srcdir_filename(fname); /* Corrected test file name */
hid_t file, dataset; /* handles */
- hid_t datatype;
- hid_t dt;
- float data_in[NX][NY]; /* input buffer */
- float data_out[NX][NY]; /* output buffer */
- double double_data_in[NX][NY]; /* input buffer */
- double double_data_out[NX][NY]; /* output buffer */
- int int_data_in[NX][NY]; /* input buffer */
- int int_data_out[NX][NY]; /* output buffer */
+ double data_in[NX+1][NY]; /* input buffer */
+ double data_out[NX+1][NY]; /* output buffer */
+ long long int_data_in[NX+1][NY]; /* input buffer */
+ long long int_data_out[NX+1][NY]; /* output buffer */
int i, j;
unsigned nerrors = 0;
const char *not_supported= " Scaleoffset filter is not enabled.";
- const char *not_fixed= " Scaleoffset filter bug (2131) is not fixed yet.";
/*
* Open the file.
@@ -76,8 +81,8 @@ static int read_data(char *fname)
if((file = H5Fopen(pathname, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0)
TEST_ERROR;
- TESTING(" regular dataset");
-
+ TESTING("regular dataset");
+
/*
* Open the regular dataset.
*/
@@ -93,6 +98,10 @@ static int read_data(char *fname)
data_out[j][i] = 0;
}
}
+ for (i = 0; i < NY; i++) {
+ data_in[NX][i] = -2.2;
+ data_out[NX][i] = 0;
+ }
/*
* 0 1 2 3 4 5
* 1 2 3 4 5 6
@@ -100,29 +109,80 @@ static int read_data(char *fname)
* 3 4 5 6 7 8
* 4 5 6 7 8 9
* 5 6 7 8 9 10
+ * -2.2 -2.2 -2.2 -2.2 -2.2 -2.2
*/
/*
- * Get datatype and dataspace handles and then query
- * dataset class, order, size, rank and dimensions.
+ * Read data from hyperslab in the file into the hyperslab in
+ * memory and display.
*/
- if((dt = H5Dget_type(dataset)) < 0) /* datatype handle */
+ if(H5Dread(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ data_out) < 0)
TEST_ERROR;
- if((datatype = H5Tget_native_type(dt, H5T_DIR_DEFAULT)) < 0)
+
+ /* Check results */
+ for (j=0; j<(NX+1); j++) {
+ for (i=0; i<NY; i++) {
+ /* if (data_out[j][i] != data_in[j][i]) { */
+ if (!FLT_ABS_EQUAL(data_out[j][i], data_in[j][i])) {
+ if (!nerrors++) {
+ H5_FAILED();
+ printf("element [%d][%d] is %g but should have been %g\n",
+ j, i, data_out[j][i], data_in[j][i]);
+ }
+ }
+ }
+ }
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Failure */
+ if (nerrors) {
+ printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
+ return 1;
+ }
+
+ PASSED();
+
+ TESTING("dataset of LE FLOAT with scale-offset filter");
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ /*
+ * Open the dataset with scale-offset filter.
+ */
+ if((dataset = H5Dopen2(file, DATASETNAME2, H5P_DEFAULT)) < 0)
TEST_ERROR;
/*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++) {
+ data_in[j][i] = ((double)(i + j + 1))/3;
+ data_out[j][i] = 0;
+ }
+ }
+ for (i = 0; i < NY; i++) {
+ data_in[NX][i] = -2.2;
+ data_out[NX][i] = 0;
+ }
+
+ /*
* Read data from hyperslab in the file into the hyperslab in
* memory and display.
*/
- if(H5Dread(dataset, datatype, H5S_ALL, H5S_ALL, H5P_DEFAULT, data_out) < 0)
+ if(H5Dread(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ data_out) < 0)
TEST_ERROR;
/* Check results */
- for (j=0; j<NX; j++) {
+ for (j=0; j<(NX+1); j++) {
for (i=0; i<NY; i++) {
- /* if (data_out[j][i] != data_in[j][i]) { */
- if (!DBL_ABS_EQUAL(data_out[j][i], data_in[j][i])) {
+ if (!DBL_REL_EQUAL(data_out[j][i], data_in[j][i], 0.001)) {
if (!nerrors++) {
H5_FAILED();
printf("element [%d][%d] is %g but should have been %g\n",
@@ -135,9 +195,8 @@ static int read_data(char *fname)
/*
* Close/release resources.
*/
- H5Tclose(dt);
- H5Tclose(datatype);
- H5Dclose(dataset);
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
/* Failure */
if (nerrors) {
@@ -146,14 +205,80 @@ static int read_data(char *fname)
}
PASSED();
+#else /*H5_HAVE_FILTER_SCALEOFFSET*/
+ SKIPPED();
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
+
+ TESTING("dataset of BE FLOAT with scale-offset filter");
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ /*
+ * Open the dataset with scale-offset filter.
+ */
+ if((dataset = H5Dopen2(file, DATASETNAME3, H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+
+ /*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++) {
+ data_in[j][i] = ((double)(i + j + 1))/3;
+ data_out[j][i] = 0;
+ }
+ }
+ for (i = 0; i < NY; i++) {
+ data_in[NX][i] = -2.2;
+ data_out[NX][i] = 0;
+ }
+
+ /*
+ * Read data from hyperslab in the file into the hyperslab in
+ * memory and display.
+ */
+ if(H5Dread(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ data_out) < 0)
+ TEST_ERROR;
+
+ /* Check results */
+ for (j=0; j<(NX+1); j++) {
+ for (i=0; i<NY; i++) {
+ if (!DBL_REL_EQUAL(data_out[j][i], data_in[j][i], 0.001)) {
+ if (!nerrors++) {
+ H5_FAILED();
+ printf("element [%d][%d] is %g but should have been %g\n",
+ j, i, data_out[j][i], data_in[j][i]);
+ }
+ }
+ }
+ }
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Failure */
+ if (nerrors) {
+ printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
+ return 1;
+ }
+
+ PASSED();
+#else /*H5_HAVE_FILTER_SCALEOFFSET*/
+ SKIPPED();
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
+
+ TESTING("dataset of LE DOUBLE with scale-offset filter");
- TESTING(" dataset of DOUBLE with scale-offset filter");
-#ifdef TMP
#ifdef H5_HAVE_FILTER_SCALEOFFSET
/*
* Open the dataset with scale-offset filter.
*/
- if((dataset = H5Dopen2(file, DATASETNAME2, H5P_DEFAULT)) < 0)
+ if((dataset = H5Dopen2(file, DATASETNAME4, H5P_DEFAULT)) < 0)
TEST_ERROR;
/*
@@ -161,35 +286,220 @@ static int read_data(char *fname)
*/
for (j = 0; j < NX; j++) {
for (i = 0; i < NY; i++) {
- double_data_in[j][i] = ((double)(i + j + 1))/3;
- double_data_out[j][i] = 0;
+ data_in[j][i] = ((double)(i + j + 1))/3;
+ data_out[j][i] = 0;
}
}
+ for (i = 0; i < NY; i++) {
+ data_in[NX][i] = -2.2;
+ data_out[NX][i] = 0;
+ }
/*
- * Get datatype and dataspace handles and then query
- * dataset class, order, size, rank and dimensions.
+ * Read data from hyperslab in the file into the hyperslab in
+ * memory and display.
*/
- if((dt = H5Dget_type(dataset)) < 0) /* datatype handle */
+ if(H5Dread(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ data_out) < 0)
TEST_ERROR;
- if((datatype = H5Tget_native_type(dt, H5T_DIR_DEFAULT)) < 0)
+
+ /* Check results */
+ for (j=0; j<(NX+1); j++) {
+ for (i=0; i<NY; i++) {
+ if (!DBL_REL_EQUAL(data_out[j][i], data_in[j][i], 0.001)) {
+ if (!nerrors++) {
+ H5_FAILED();
+ printf("element [%d][%d] is %g but should have been %g\n",
+ j, i, data_out[j][i], data_in[j][i]);
+ }
+ }
+ }
+ }
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Failure */
+ if (nerrors) {
+ printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
+ return 1;
+ }
+
+ PASSED();
+#else /*H5_HAVE_FILTER_SCALEOFFSET*/
+ SKIPPED();
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
+
+ TESTING("dataset of BE DOUBLE with scale-offset filter");
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ /*
+ * Open the dataset with scale-offset filter.
+ */
+ if((dataset = H5Dopen2(file, DATASETNAME5, H5P_DEFAULT)) < 0)
TEST_ERROR;
/*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++) {
+ data_in[j][i] = ((double)(i + j + 1))/3;
+ data_out[j][i] = 0;
+ }
+ }
+ for (i = 0; i < NY; i++) {
+ data_in[NX][i] = -2.2;
+ data_out[NX][i] = 0;
+ }
+
+ /*
* Read data from hyperslab in the file into the hyperslab in
* memory and display.
*/
- if(H5Dread(dataset, datatype, H5S_ALL, H5S_ALL, H5P_DEFAULT, double_data_out) < 0)
+ if(H5Dread(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ data_out) < 0)
TEST_ERROR;
/* Check results */
- for (j=0; j<NX; j++) {
+ for (j=0; j<(NX+1); j++) {
for (i=0; i<NY; i++) {
- if (!DBL_REL_EQUAL(double_data_out[j][i], double_data_in[j][i], 0.001)) {
+ if (!DBL_REL_EQUAL(data_out[j][i], data_in[j][i], 0.001)) {
if (!nerrors++) {
H5_FAILED();
printf("element [%d][%d] is %g but should have been %g\n",
- j, i, double_data_out[j][i], double_data_in[j][i]);
+ j, i, data_out[j][i], data_in[j][i]);
+ }
+ }
+ }
+ }
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Failure */
+ if (nerrors) {
+ printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
+ return 1;
+ }
+
+ PASSED();
+#else /*H5_HAVE_FILTER_SCALEOFFSET*/
+ SKIPPED();
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
+
+ TESTING("dataset of LE CHAR with scale-offset filter");
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ /*
+ * Open the dataset with scale-offset filter.
+ */
+ if((dataset = H5Dopen2(file, DATASETNAME6, H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+
+ /*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++) {
+ int_data_in[j][i] = i + j;
+ int_data_out[j][i] = 0;
+ }
+ }
+ for (i = 0; i < NY; i++) {
+ int_data_in[NX][i] = -2;
+ int_data_out[NX][i] = 0;
+ }
+
+ /*
+ * Read data from hyperslab in the file into the hyperslab in
+ * memory and display.
+ */
+ if(H5Dread(dataset, H5T_NATIVE_LLONG, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ int_data_out) < 0)
+ TEST_ERROR;
+
+ /* Check results */
+ for (j=0; j<(NX+1); j++) {
+ for (i=0; i<NY; i++) {
+ if (int_data_out[j][i] != int_data_in[j][i]) {
+ if (!nerrors++) {
+ H5_FAILED();
+ printf("element [%d][%d] is %d but should have been %d\n",
+ j, i, (int)int_data_out[j][i],
+ (int)int_data_in[j][i]);
+ }
+ }
+ }
+ }
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Failure */
+ if (nerrors) {
+ printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
+ return 1;
+ }
+
+ PASSED();
+
+#else /*H5_HAVE_FILTER_SCALEOFFSET*/
+ SKIPPED();
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
+
+ TESTING("dataset of BE CHAR with scale-offset filter");
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ /*
+ * Open the dataset with scale-offset filter.
+ */
+ if((dataset = H5Dopen2(file, DATASETNAME7, H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+
+ /*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++) {
+ int_data_in[j][i] = i + j;
+ int_data_out[j][i] = 0;
+ }
+ }
+ for (i = 0; i < NY; i++) {
+ int_data_in[NX][i] = -2;
+ int_data_out[NX][i] = 0;
+ }
+
+ /*
+ * Read data from hyperslab in the file into the hyperslab in
+ * memory and display.
+ */
+ if(H5Dread(dataset, H5T_NATIVE_LLONG, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ int_data_out) < 0)
+ TEST_ERROR;
+
+ /* Check results */
+ for (j=0; j<(NX+1); j++) {
+ for (i=0; i<NY; i++) {
+ if (int_data_out[j][i] != int_data_in[j][i]) {
+ if (!nerrors++) {
+ H5_FAILED();
+ printf("element [%d][%d] is %d but should have been %d\n",
+ j, i, (int)int_data_out[j][i],
+ (int)int_data_in[j][i]);
}
}
}
@@ -198,9 +508,8 @@ static int read_data(char *fname)
/*
* Close/release resources.
*/
- H5Tclose(dt);
- H5Tclose(datatype);
- H5Dclose(dataset);
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
/* Failure */
if (nerrors) {
@@ -209,22 +518,147 @@ static int read_data(char *fname)
}
PASSED();
+
#else /*H5_HAVE_FILTER_SCALEOFFSET*/
SKIPPED();
puts(not_supported);
#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
-#else /*TMP*/
+
+ TESTING("dataset of LE SHORT with scale-offset filter");
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ /*
+ * Open the dataset with scale-offset filter.
+ */
+ if((dataset = H5Dopen2(file, DATASETNAME8, H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+
+ /*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++) {
+ int_data_in[j][i] = i + j;
+ int_data_out[j][i] = 0;
+ }
+ }
+ for (i = 0; i < NY; i++) {
+ int_data_in[NX][i] = -2;
+ int_data_out[NX][i] = 0;
+ }
+
+ /*
+ * Read data from hyperslab in the file into the hyperslab in
+ * memory and display.
+ */
+ if(H5Dread(dataset, H5T_NATIVE_LLONG, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ int_data_out) < 0)
+ TEST_ERROR;
+
+ /* Check results */
+ for (j=0; j<(NX+1); j++) {
+ for (i=0; i<NY; i++) {
+ if (int_data_out[j][i] != int_data_in[j][i]) {
+ if (!nerrors++) {
+ H5_FAILED();
+ printf("element [%d][%d] is %d but should have been %d\n",
+ j, i, (int)int_data_out[j][i],
+ (int)int_data_in[j][i]);
+ }
+ }
+ }
+ }
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Failure */
+ if (nerrors) {
+ printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
+ return 1;
+ }
+
+ PASSED();
+
+#else /*H5_HAVE_FILTER_SCALEOFFSET*/
SKIPPED();
- puts(not_fixed);
-#endif /*TMP*/
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
- TESTING(" dataset of INT with scale-offset filter");
+ TESTING("dataset of BE SHORT with scale-offset filter");
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ /*
+ * Open the dataset with scale-offset filter.
+ */
+ if((dataset = H5Dopen2(file, DATASETNAME9, H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+
+ /*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++) {
+ int_data_in[j][i] = i + j;
+ int_data_out[j][i] = 0;
+ }
+ }
+ for (i = 0; i < NY; i++) {
+ int_data_in[NX][i] = -2;
+ int_data_out[NX][i] = 0;
+ }
+
+ /*
+ * Read data from hyperslab in the file into the hyperslab in
+ * memory and display.
+ */
+ if(H5Dread(dataset, H5T_NATIVE_LLONG, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ int_data_out) < 0)
+ TEST_ERROR;
+
+ /* Check results */
+ for (j=0; j<(NX+1); j++) {
+ for (i=0; i<NY; i++) {
+ if (int_data_out[j][i] != int_data_in[j][i]) {
+ if (!nerrors++) {
+ H5_FAILED();
+ printf("element [%d][%d] is %d but should have been %d\n",
+ j, i, (int)int_data_out[j][i],
+ (int)int_data_in[j][i]);
+ }
+ }
+ }
+ }
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Failure */
+ if (nerrors) {
+ printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
+ return 1;
+ }
+
+ PASSED();
+
+#else /*H5_HAVE_FILTER_SCALEOFFSET*/
+ SKIPPED();
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
+
+ TESTING("dataset of LE INT with scale-offset filter");
#ifdef H5_HAVE_FILTER_SCALEOFFSET
/*
* Open the dataset with scale-offset filter.
*/
- if((dataset = H5Dopen2(file, DATASETNAME3, H5P_DEFAULT)) < 0)
+ if((dataset = H5Dopen2(file, DATASETNAME10, H5P_DEFAULT)) < 0)
TEST_ERROR;
/*
@@ -236,31 +670,220 @@ static int read_data(char *fname)
int_data_out[j][i] = 0;
}
}
+ for (i = 0; i < NY; i++) {
+ int_data_in[NX][i] = -2;
+ int_data_out[NX][i] = 0;
+ }
+
+ /*
+ * Read data from hyperslab in the file into the hyperslab in
+ * memory and display.
+ */
+ if(H5Dread(dataset, H5T_NATIVE_LLONG, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ int_data_out) < 0)
+ TEST_ERROR;
+
+ /* Check results */
+ for (j=0; j<(NX+1); j++) {
+ for (i=0; i<NY; i++) {
+ if (int_data_out[j][i] != int_data_in[j][i]) {
+ if (!nerrors++) {
+ H5_FAILED();
+ printf("element [%d][%d] is %d but should have been %d\n",
+ j, i, (int)int_data_out[j][i],
+ (int)int_data_in[j][i]);
+ }
+ }
+ }
+ }
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Failure */
+ if (nerrors) {
+ printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
+ return 1;
+ }
+
+ PASSED();
+
+#else /*H5_HAVE_FILTER_SCALEOFFSET*/
+ SKIPPED();
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
+
+ TESTING("dataset of BE INT with scale-offset filter");
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ /*
+ * Open the dataset with scale-offset filter.
+ */
+ if((dataset = H5Dopen2(file, DATASETNAME11, H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+
+ /*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++) {
+ int_data_in[j][i] = i + j;
+ int_data_out[j][i] = 0;
+ }
+ }
+ for (i = 0; i < NY; i++) {
+ int_data_in[NX][i] = -2;
+ int_data_out[NX][i] = 0;
+ }
/*
- * Get datatype and dataspace handles and then query
- * dataset class, order, size, rank and dimensions.
+ * Read data from hyperslab in the file into the hyperslab in
+ * memory and display.
*/
- if((dt = H5Dget_type(dataset)) < 0) /* datatype handle */
+ if(H5Dread(dataset, H5T_NATIVE_LLONG, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ int_data_out) < 0)
TEST_ERROR;
- if((datatype = H5Tget_native_type(dt, H5T_DIR_DEFAULT)) < 0)
+
+ /* Check results */
+ for (j=0; j<(NX+1); j++) {
+ for (i=0; i<NY; i++) {
+ if (int_data_out[j][i] != int_data_in[j][i]) {
+ if (!nerrors++) {
+ H5_FAILED();
+ printf("element [%d][%d] is %d but should have been %d\n",
+ j, i, (int)int_data_out[j][i],
+ (int)int_data_in[j][i]);
+ }
+ }
+ }
+ }
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Failure */
+ if (nerrors) {
+ printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
+ return 1;
+ }
+
+ PASSED();
+
+#else /*H5_HAVE_FILTER_SCALEOFFSET*/
+ SKIPPED();
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
+
+ TESTING("dataset of LE LONG LONG with scale-offset filter");
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ /*
+ * Open the dataset with scale-offset filter.
+ */
+ if((dataset = H5Dopen2(file, DATASETNAME12, H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+
+ /*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++) {
+ int_data_in[j][i] = i + j;
+ int_data_out[j][i] = 0;
+ }
+ }
+ for (i = 0; i < NY; i++) {
+ int_data_in[NX][i] = -2;
+ int_data_out[NX][i] = 0;
+ }
+
+ /*
+ * Read data from hyperslab in the file into the hyperslab in
+ * memory and display.
+ */
+ if(H5Dread(dataset, H5T_NATIVE_LLONG, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ int_data_out) < 0)
+ TEST_ERROR;
+
+ /* Check results */
+ for (j=0; j<(NX+1); j++) {
+ for (i=0; i<NY; i++) {
+ if (int_data_out[j][i] != int_data_in[j][i]) {
+ if (!nerrors++) {
+ H5_FAILED();
+ printf("element [%d][%d] is %d but should have been %d\n",
+ j, i, (int)int_data_out[j][i],
+ (int)int_data_in[j][i]);
+ }
+ }
+ }
+ }
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Failure */
+ if (nerrors) {
+ printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
+ return 1;
+ }
+
+ PASSED();
+
+#else /*H5_HAVE_FILTER_SCALEOFFSET*/
+ SKIPPED();
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
+
+ TESTING("dataset of BE LONG LONG with scale-offset filter");
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ /*
+ * Open the dataset with scale-offset filter.
+ */
+ if((dataset = H5Dopen2(file, DATASETNAME13, H5P_DEFAULT)) < 0)
TEST_ERROR;
/*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++) {
+ int_data_in[j][i] = i + j;
+ int_data_out[j][i] = 0;
+ }
+ }
+ for (i = 0; i < NY; i++) {
+ int_data_in[NX][i] = -2;
+ int_data_out[NX][i] = 0;
+ }
+
+ /*
* Read data from hyperslab in the file into the hyperslab in
* memory and display.
*/
- if(H5Dread(dataset, datatype, H5S_ALL, H5S_ALL, H5P_DEFAULT, int_data_out) < 0)
+ if(H5Dread(dataset, H5T_NATIVE_LLONG, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ int_data_out) < 0)
TEST_ERROR;
/* Check results */
- for (j=0; j<NX; j++) {
+ for (j=0; j<(NX+1); j++) {
for (i=0; i<NY; i++) {
if (int_data_out[j][i] != int_data_in[j][i]) {
if (!nerrors++) {
H5_FAILED();
printf("element [%d][%d] is %d but should have been %d\n",
- j, i, int_data_out[j][i], int_data_in[j][i]);
+ j, i, (int)int_data_out[j][i],
+ (int)int_data_in[j][i]);
}
}
}
@@ -269,9 +892,8 @@ static int read_data(char *fname)
/*
* Close/release resources.
*/
- H5Tclose(dt);
- H5Tclose(datatype);
- H5Dclose(dataset);
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
/* Failure */
if (nerrors) {
@@ -286,7 +908,8 @@ static int read_data(char *fname)
puts(not_supported);
#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
- H5Fclose(file);
+ if(H5Fclose(file))
+ TEST_ERROR
return 0;
error:
diff --git a/test/gen_cross.c b/test/gen_cross.c
index 0bc3460..32be867 100755
--- a/test/gen_cross.c
+++ b/test/gen_cross.c
@@ -28,23 +28,37 @@
#define H5FILE_NAME "data.h5"
#define DATASETNAME "Array"
-#define DATASETNAME2 "Scale_offset_double_data"
-#define DATASETNAME3 "Scale_offset_int_data"
+#define DATASETNAME2 "Scale_offset_float_data_le"
+#define DATASETNAME3 "Scale_offset_float_data_be"
+#define DATASETNAME4 "Scale_offset_double_data_le"
+#define DATASETNAME5 "Scale_offset_double_data_be"
+#define DATASETNAME6 "Scale_offset_char_data_le"
+#define DATASETNAME7 "Scale_offset_char_data_be"
+#define DATASETNAME8 "Scale_offset_short_data_le"
+#define DATASETNAME9 "Scale_offset_short_data_be"
+#define DATASETNAME10 "Scale_offset_int_data_le"
+#define DATASETNAME11 "Scale_offset_int_data_be"
+#define DATASETNAME12 "Scale_offset_long_long_data_le"
+#define DATASETNAME13 "Scale_offset_long_long_data_be"
#define NX 6
#define NY 6
#define RANK 2
-#define CHUNK0 3
+#define CHUNK0 4
#define CHUNK1 3
-int create_normal_dset(hid_t fid, hid_t sid);
-int create_scale_offset_dset_double(hid_t fid, hid_t sid);
-int create_scale_offset_dset_int(hid_t fid, hid_t sid);
+int create_normal_dset(hid_t fid, hid_t fsid, hid_t msid);
+int create_scale_offset_dsets_float(hid_t fid, hid_t fsid, hid_t msid);
+int create_scale_offset_dsets_double(hid_t fid, hid_t fsid, hid_t msid);
+int create_scale_offset_dsets_char(hid_t fid, hid_t fsid, hid_t msid);
+int create_scale_offset_dsets_short(hid_t fid, hid_t fsid, hid_t msid);
+int create_scale_offset_dsets_int(hid_t fid, hid_t fsid, hid_t msid);
+int create_scale_offset_dsets_long_long(hid_t fid, hid_t fsid, hid_t msid);
/*-------------------------------------------------------------------------
* Function: create_normal_dset
*
- * Purpose: Create a regular dataset of FLOAT datatype.
+ * Purpose: Create a regular dataset of DOUBLE datatype.
*
* Return: Success: 0
* Failure: -1
@@ -57,11 +71,12 @@ int create_scale_offset_dset_int(hid_t fid, hid_t sid);
*-------------------------------------------------------------------------
*/
int
-create_normal_dset(hid_t fid, hid_t sid)
+create_normal_dset(hid_t fid, hid_t fsid, hid_t msid)
{
hid_t dataset; /* file and dataset handles */
- herr_t status;
+ hid_t dcpl;
float data[NX][NY]; /* data to write */
+ float fillvalue = -2.2;
int i, j;
/*
@@ -78,32 +93,155 @@ create_normal_dset(hid_t fid, hid_t sid)
* 3 4 5 6 7 8
* 4 5 6 7 8 9
* 5 6 7 8 9 10
+ * -2.2 -2.2 -2.2 -2.2 -2.2 -2.2
*/
/*
+ * Create the dataset creation property list, set the fill value.
+ */
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR
+ if(H5Pset_fill_value(dcpl, H5T_NATIVE_FLOAT, &fillvalue) < 0)
+ TEST_ERROR
+
+ /*
* Create a new dataset within the file using defined dataspace and
* datatype and default dataset creation properties.
*/
- dataset = H5Dcreate2(fid, DATASETNAME, H5T_NATIVE_FLOAT, sid,
- H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ if((dataset = H5Dcreate2(fid, DATASETNAME, H5T_NATIVE_DOUBLE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
/*
* Write the data to the dataset using default transfer properties.
*/
- status = H5Dwrite(dataset, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL,
- H5P_DEFAULT, data);
+ if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
/*
* Close/release resources.
*/
- H5Dclose(dataset);
+ if(H5Pclose(dcpl) < 0)
+ TEST_ERROR
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
return 0;
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dcpl);
+ H5Dclose(dataset);
+ } H5E_END_TRY;
+
+ return -1;
+#endif /* H5_HAVE_FILTER_SCALEOFFSET */
}
/*-------------------------------------------------------------------------
- * Function: create_scale_offset_dset_double
+ * Function: create_scale_offset_dsets_float
+ *
+ * Purpose: Create a dataset of FLOAT datatype with scale-offset filter
+ *
+ * Return: Success: 0
+ * Failure: -1
+ *
+ * Programmer: Neil Fortner
+ * 27 January 2011
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+int
+create_scale_offset_dsets_float(hid_t fid, hid_t fsid, hid_t msid)
+{
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ hid_t dataset; /* dataset handles */
+ hid_t dcpl;
+ float data[NX][NY]; /* data to write */
+ float fillvalue = -2.2;
+ hsize_t chunk[RANK] = {CHUNK0, CHUNK1};
+ int i, j;
+
+ /*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++)
+ data[j][i] = ((float)(i + j + 1))/3;
+ }
+
+ /*
+ * Create the dataset creation property list, add the Scale-Offset
+ * filter, set the chunk size, and set the fill value.
+ */
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR
+ if(H5Pset_scaleoffset(dcpl, H5Z_SO_FLOAT_DSCALE, 3) < 0)
+ TEST_ERROR
+ if(H5Pset_chunk(dcpl, RANK, chunk) < 0)
+ TEST_ERROR
+ if(H5Pset_fill_value(dcpl, H5T_NATIVE_FLOAT, &fillvalue) < 0)
+ TEST_ERROR
+
+ /*
+ * Create a new dataset within the file using defined dataspace, little
+ * endian datatype and default dataset creation properties.
+ */
+ if((dataset = H5Dcreate2(fid, DATASETNAME2, H5T_IEEE_F32LE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+
+ /*
+ * Write the data to the dataset using default transfer properties.
+ */
+ if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+
+ /* Close dataset */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Now create a dataset with a big-endian type */
+ if((dataset = H5Dcreate2(fid, DATASETNAME3, H5T_IEEE_F32BE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+ if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Pclose(dcpl) < 0)
+ TEST_ERROR
+
+#else /* H5_HAVE_FILTER_SCALEOFFSET */
+ const char *not_supported= "Scaleoffset filter is not enabled. Can't create the dataset.";
+
+ puts(not_supported);
+#endif /* H5_HAVE_FILTER_SCALEOFFSET */
+
+ return 0;
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dcpl);
+ H5Dclose(dataset);
+ } H5E_END_TRY;
+
+ return -1;
+#endif /* H5_HAVE_FILTER_SCALEOFFSET */
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: create_scale_offset_dsets_double
*
* Purpose: Create a dataset of DOUBLE datatype with scale-offset filter
*
@@ -118,13 +256,13 @@ create_normal_dset(hid_t fid, hid_t sid)
*-------------------------------------------------------------------------
*/
int
-create_scale_offset_dset_double(hid_t fid, hid_t sid)
+create_scale_offset_dsets_double(hid_t fid, hid_t fsid, hid_t msid)
{
#ifdef H5_HAVE_FILTER_SCALEOFFSET
hid_t dataset; /* dataset handles */
hid_t dcpl;
- herr_t status;
- double data[NX][NY]; /* data to write */
+ double data[NX][NY]; /* data to write */
+ double fillvalue = -2.2;
hsize_t chunk[RANK] = {CHUNK0, CHUNK1};
int i, j;
@@ -138,30 +276,265 @@ create_scale_offset_dset_double(hid_t fid, hid_t sid)
/*
* Create the dataset creation property list, add the Scale-Offset
- * filter and set the chunk size.
+ * filter, set the chunk size, and set the fill value.
*/
- dcpl = H5Pcreate (H5P_DATASET_CREATE);
- status = H5Pset_scaleoffset (dcpl, H5Z_SO_FLOAT_DSCALE, 3);
- status = H5Pset_chunk (dcpl, RANK, chunk);
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR
+ if(H5Pset_scaleoffset(dcpl, H5Z_SO_FLOAT_DSCALE, 3) < 0)
+ TEST_ERROR
+ if(H5Pset_chunk(dcpl, RANK, chunk) < 0)
+ TEST_ERROR
+ if(H5Pset_fill_value(dcpl, H5T_NATIVE_DOUBLE, &fillvalue) < 0)
+ TEST_ERROR
/*
- * Create a new dataset within the file using defined dataspace and
- * datatype and default dataset creation properties.
+ * Create a new dataset within the file using defined dataspace, little
+ * endian datatype and default dataset creation properties.
+ */
+ if((dataset = H5Dcreate2(fid, DATASETNAME4, H5T_IEEE_F64LE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+
+ /*
+ * Write the data to the dataset using default transfer properties.
+ */
+ if(H5Dwrite(dataset, H5T_NATIVE_DOUBLE, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+
+ /* Close dataset */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Now create a dataset with a big-endian type */
+ if((dataset = H5Dcreate2(fid, DATASETNAME5, H5T_IEEE_F64BE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+ if(H5Dwrite(dataset, H5T_NATIVE_DOUBLE, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Pclose(dcpl) < 0)
+ TEST_ERROR
+
+#else /* H5_HAVE_FILTER_SCALEOFFSET */
+ const char *not_supported= "Scaleoffset filter is not enabled. Can't create the dataset.";
+
+ puts(not_supported);
+#endif /* H5_HAVE_FILTER_SCALEOFFSET */
+
+ return 0;
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dcpl);
+ H5Dclose(dataset);
+ } H5E_END_TRY;
+
+ return -1;
+#endif /* H5_HAVE_FILTER_SCALEOFFSET */
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: create_scale_offset_dset_char
+ *
+ * Purpose: Create a dataset of CHAR datatype with scale-offset filter
+ *
+ * Return: Success: 0
+ * Failure: -1
+ *
+ * Programmer: Neil Fortner
+ * 27 January 2011
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+int
+create_scale_offset_dsets_char(hid_t fid, hid_t fsid, hid_t msid)
+{
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ hid_t dataset; /* dataset handles */
+ hid_t dcpl;
+ char data[NX][NY]; /* data to write */
+ char fillvalue = -2;
+ hsize_t chunk[RANK] = {CHUNK0, CHUNK1};
+ int i, j;
+
+ /*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++)
+ data[j][i] = i + j;
+ }
+ /*
+ * 0 1 2 3 4 5
+ * 1 2 3 4 5 6
+ * 2 3 4 5 6 7
+ * 3 4 5 6 7 8
+ * 4 5 6 7 8 9
+ * 5 6 7 8 9 10
+ */
+
+ /*
+ * Create the dataset creation property list, add the Scale-Offset
+ * filter, set the chunk size, and set the fill value.
+ */
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR
+ if(H5Pset_scaleoffset(dcpl, H5Z_SO_INT, H5Z_SO_INT_MINBITS_DEFAULT) < 0)
+ TEST_ERROR
+ if(H5Pset_chunk(dcpl, RANK, chunk) < 0)
+ TEST_ERROR
+ if(H5Pset_fill_value(dcpl, H5T_NATIVE_CHAR, &fillvalue) < 0)
+ TEST_ERROR
+
+ /*
+ * Create a new dataset within the file using defined dataspace, little
+ * endian datatype and default dataset creation properties.
+ */
+ if((dataset = H5Dcreate2(fid, DATASETNAME6, H5T_STD_I8LE, fsid, H5P_DEFAULT,
+ dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+
+ /*
+ * Write the data to the dataset using default transfer properties.
+ */
+ if(H5Dwrite(dataset, H5T_NATIVE_CHAR, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+
+ /* Close dataset */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Now create a dataset with a big-endian type */
+ if((dataset = H5Dcreate2(fid, DATASETNAME7, H5T_STD_I8BE, fsid, H5P_DEFAULT,
+ dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+ if(H5Dwrite(dataset, H5T_NATIVE_CHAR, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /*
+ * Close/release resources.
*/
- dataset = H5Dcreate2(fid, DATASETNAME2, H5T_NATIVE_DOUBLE, sid,
- H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ if(H5Pclose(dcpl) < 0)
+ TEST_ERROR
+
+#else /* H5_HAVE_FILTER_SCALEOFFSET */
+ const char *not_supported= "Scaleoffset filter is not enabled. Can't create the dataset.";
+
+ puts(not_supported);
+#endif /* H5_HAVE_FILTER_SCALEOFFSET */
+
+ return 0;
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dcpl);
+ H5Dclose(dataset);
+ } H5E_END_TRY;
+
+ return -1;
+#endif /* H5_HAVE_FILTER_SCALEOFFSET */
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: create_scale_offset_dset_short
+ *
+ * Purpose: Create a dataset of SHORT datatype with scale-offset filter
+ *
+ * Return: Success: 0
+ * Failure: -1
+ *
+ * Programmer: Neil Fortner
+ * 27 January 2011
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+int
+create_scale_offset_dsets_short(hid_t fid, hid_t fsid, hid_t msid)
+{
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ hid_t dataset; /* dataset handles */
+ hid_t dcpl;
+ short data[NX][NY]; /* data to write */
+ short fillvalue = -2;
+ hsize_t chunk[RANK] = {CHUNK0, CHUNK1};
+ int i, j;
+
+ /*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++)
+ data[j][i] = i + j;
+ }
+ /*
+ * 0 1 2 3 4 5
+ * 1 2 3 4 5 6
+ * 2 3 4 5 6 7
+ * 3 4 5 6 7 8
+ * 4 5 6 7 8 9
+ * 5 6 7 8 9 10
+ */
+
+ /*
+ * Create the dataset creation property list, add the Scale-Offset
+ * filter, set the chunk size, and set the fill value.
+ */
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR
+ if(H5Pset_scaleoffset(dcpl, H5Z_SO_INT, H5Z_SO_INT_MINBITS_DEFAULT) < 0)
+ TEST_ERROR
+ if(H5Pset_chunk(dcpl, RANK, chunk) < 0)
+ TEST_ERROR
+ if(H5Pset_fill_value(dcpl, H5T_NATIVE_SHORT, &fillvalue) < 0)
+ TEST_ERROR
+
+ /*
+ * Create a new dataset within the file using defined dataspace, little
+ * endian datatype and default dataset creation properties.
+ */
+ if((dataset = H5Dcreate2(fid, DATASETNAME8, H5T_STD_I16LE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
/*
* Write the data to the dataset using default transfer properties.
*/
- status = H5Dwrite(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL,
- H5P_DEFAULT, data);
+ if(H5Dwrite(dataset, H5T_NATIVE_SHORT, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+
+ /* Close dataset */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Now create a dataset with a big-endian type */
+ if((dataset = H5Dcreate2(fid, DATASETNAME9, H5T_STD_I16BE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+ if(H5Dwrite(dataset, H5T_NATIVE_SHORT, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
/*
* Close/release resources.
*/
- H5Pclose(dcpl);
- H5Dclose(dataset);
+ if(H5Pclose(dcpl) < 0)
+ TEST_ERROR
#else /* H5_HAVE_FILTER_SCALEOFFSET */
const char *not_supported= "Scaleoffset filter is not enabled. Can't create the dataset.";
@@ -170,6 +543,16 @@ create_scale_offset_dset_double(hid_t fid, hid_t sid)
#endif /* H5_HAVE_FILTER_SCALEOFFSET */
return 0;
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dcpl);
+ H5Dclose(dataset);
+ } H5E_END_TRY;
+
+ return -1;
+#endif /* H5_HAVE_FILTER_SCALEOFFSET */
}
@@ -189,13 +572,13 @@ create_scale_offset_dset_double(hid_t fid, hid_t sid)
*-------------------------------------------------------------------------
*/
int
-create_scale_offset_dset_int(hid_t fid, hid_t sid)
+create_scale_offset_dsets_int(hid_t fid, hid_t fsid, hid_t msid)
{
#ifdef H5_HAVE_FILTER_SCALEOFFSET
hid_t dataset; /* dataset handles */
hid_t dcpl;
- herr_t status;
int data[NX][NY]; /* data to write */
+ int fillvalue = -2;
hsize_t chunk[RANK] = {CHUNK0, CHUNK1};
int i, j;
@@ -217,30 +600,158 @@ create_scale_offset_dset_int(hid_t fid, hid_t sid)
/*
* Create the dataset creation property list, add the Scale-Offset
- * filter and set the chunk size.
+ * filter, set the chunk size, and set the fill value.
*/
- dcpl = H5Pcreate (H5P_DATASET_CREATE);
- status = H5Pset_scaleoffset (dcpl, H5Z_SO_INT, H5Z_SO_INT_MINBITS_DEFAULT);
- status = H5Pset_chunk (dcpl, RANK, chunk);
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR
+ if(H5Pset_scaleoffset(dcpl, H5Z_SO_INT, H5Z_SO_INT_MINBITS_DEFAULT) < 0)
+ TEST_ERROR
+ if(H5Pset_chunk(dcpl, RANK, chunk) < 0)
+ TEST_ERROR
+ if(H5Pset_fill_value(dcpl, H5T_NATIVE_INT, &fillvalue) < 0)
+ TEST_ERROR
/*
- * Create a new dataset within the file using defined dataspace and
- * datatype and default dataset creation properties.
+ * Create a new dataset within the file using defined dataspace, little
+ * endian datatype and default dataset creation properties.
+ */
+ if((dataset = H5Dcreate2(fid, DATASETNAME10, H5T_STD_I32LE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+
+ /*
+ * Write the data to the dataset using default transfer properties.
+ */
+ if(H5Dwrite(dataset, H5T_NATIVE_INT, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+
+ /* Close dataset */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Now create a dataset with a big-endian type */
+ if((dataset = H5Dcreate2(fid, DATASETNAME11, H5T_STD_I32BE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+ if(H5Dwrite(dataset, H5T_NATIVE_INT, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /*
+ * Close/release resources.
*/
- dataset = H5Dcreate2(fid, DATASETNAME3, H5T_NATIVE_INT, sid,
- H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ if(H5Pclose(dcpl) < 0)
+ TEST_ERROR
+
+#else /* H5_HAVE_FILTER_SCALEOFFSET */
+ const char *not_supported= "Scaleoffset filter is not enabled. Can't create the dataset.";
+
+ puts(not_supported);
+#endif /* H5_HAVE_FILTER_SCALEOFFSET */
+
+ return 0;
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dcpl);
+ H5Dclose(dataset);
+ } H5E_END_TRY;
+
+ return -1;
+#endif /* H5_HAVE_FILTER_SCALEOFFSET */
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: create_scale_offset_dset_long_long
+ *
+ * Purpose: Create a dataset of LONG LONG datatype with scale-offset
+ * filter
+ *
+ * Return: Success: 0
+ * Failure: -1
+ *
+ * Programmer: Neil Fortner
+ * 27 January 2011
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+int
+create_scale_offset_dsets_long_long(hid_t fid, hid_t fsid, hid_t msid)
+{
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ hid_t dataset; /* dataset handles */
+ hid_t dcpl;
+ long long data[NX][NY]; /* data to write */
+ long long fillvalue = -2;
+ hsize_t chunk[RANK] = {CHUNK0, CHUNK1};
+ int i, j;
+
+ /*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++)
+ data[j][i] = i + j;
+ }
+ /*
+ * 0 1 2 3 4 5
+ * 1 2 3 4 5 6
+ * 2 3 4 5 6 7
+ * 3 4 5 6 7 8
+ * 4 5 6 7 8 9
+ * 5 6 7 8 9 10
+ */
+
+ /*
+ * Create the dataset creation property list, add the Scale-Offset
+ * filter, set the chunk size, and set the fill value.
+ */
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR
+ if(H5Pset_scaleoffset(dcpl, H5Z_SO_INT, H5Z_SO_INT_MINBITS_DEFAULT) < 0)
+ TEST_ERROR
+ if(H5Pset_chunk(dcpl, RANK, chunk) < 0)
+ TEST_ERROR
+ if(H5Pset_fill_value(dcpl, H5T_NATIVE_LLONG, &fillvalue) < 0)
+ TEST_ERROR
+
+ /*
+ * Create a new dataset within the file using defined dataspace, little
+ * endian datatype and default dataset creation properties.
+ */
+ if((dataset = H5Dcreate2(fid, DATASETNAME12, H5T_STD_I64LE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
/*
* Write the data to the dataset using default transfer properties.
*/
- status = H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL,
- H5P_DEFAULT, data);
+ if(H5Dwrite(dataset, H5T_NATIVE_LLONG, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+
+ /* Close dataset */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Now create a dataset with a big-endian type */
+ if((dataset = H5Dcreate2(fid, DATASETNAME13, H5T_STD_I64BE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+ if(H5Dwrite(dataset, H5T_NATIVE_LLONG, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
/*
* Close/release resources.
*/
- H5Pclose(dcpl);
- H5Dclose(dataset);
+ if(H5Pclose(dcpl) < 0)
+ TEST_ERROR
#else /* H5_HAVE_FILTER_SCALEOFFSET */
const char *not_supported= "Scaleoffset filter is not enabled. Can't create the dataset.";
@@ -249,6 +760,16 @@ create_scale_offset_dset_int(hid_t fid, hid_t sid)
#endif /* H5_HAVE_FILTER_SCALEOFFSET */
return 0;
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dcpl);
+ H5Dclose(dataset);
+ } H5E_END_TRY;
+
+ return -1;
+#endif /* H5_HAVE_FILTER_SCALEOFFSET */
}
@@ -268,39 +789,79 @@ create_scale_offset_dset_int(hid_t fid, hid_t sid)
int
main (void)
{
- hid_t file; /* file and dataset handles */
- hid_t dataspace;
+ hid_t file = -1;
+ hid_t filespace = -1;
+ hid_t memspace = -1;
hsize_t dimsf[RANK];
+ hsize_t start[RANK] = {0, 0};
/*
* Create a new file using H5F_ACC_TRUNC access,
* default file creation properties, and default file
* access properties.
*/
- file = H5Fcreate(H5FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ if((file = H5Fcreate(H5FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT))
+ < 0)
+ {H5_FAILED(); AT(); return 1;}
/*
* Describe the size of the array and create the data space for fixed
- * size dataset.
+ * size dataset. Increase the size in the X direction to have some fill
+ * values.
*/
- dimsf[0] = NX;
+ dimsf[0] = NX + 1;
dimsf[1] = NY;
- dataspace = H5Screate_simple(RANK, dimsf, NULL);
+ if((filespace = H5Screate_simple(RANK, dimsf, NULL)) < 0)
+ {H5_FAILED(); AT(); return 1;}
+ dimsf[0] = NX;
+ if(H5Sselect_hyperslab(filespace, H5S_SELECT_SET, start, NULL, dimsf, NULL)
+ < 0)
+ {H5_FAILED(); AT(); return 1;}
+
+ /* Create memory space. This does not include the extra row for fill
+ * values. */
+ HDassert(dimsf[0] == NX);
+ HDassert(dimsf[1] == NY);
+ if((memspace = H5Screate_simple(RANK, dimsf, NULL)) < 0)
+ {H5_FAILED(); AT(); return 1;}
/* Create a regular dataset */
- create_normal_dset(file, dataspace);
+ if(create_normal_dset(file, filespace, memspace) < 0)
+ {H5_FAILED(); AT(); return 1;}
+
+ /* Create a dataset of FLOAT with scale-offset filter */
+ if(create_scale_offset_dsets_float(file, filespace, memspace) < 0)
+ {H5_FAILED(); AT(); return 1;}
/* Create a dataset of DOUBLE with scale-offset filter */
- create_scale_offset_dset_double(file, dataspace);
+ if(create_scale_offset_dsets_double(file, filespace, memspace) < 0)
+ {H5_FAILED(); AT(); return 1;}
+
+ /* Create a dataset of CHAR with scale-offset filter */
+ if(create_scale_offset_dsets_char(file, filespace, memspace) < 0)
+ {H5_FAILED(); AT(); return 1;}
+
+ /* Create a dataset of SHORT with scale-offset filter */
+ if(create_scale_offset_dsets_short(file, filespace, memspace) < 0)
+ {H5_FAILED(); AT(); return 1;}
/* Create a dataset of INT with scale-offset filter */
- create_scale_offset_dset_int(file, dataspace);
+ if(create_scale_offset_dsets_int(file, filespace, memspace) < 0)
+ {H5_FAILED(); AT(); return 1;}
+
+ /* Create a dataset of LONG LONG with scale-offset filter */
+ if(create_scale_offset_dsets_long_long(file, filespace, memspace) < 0)
+ {H5_FAILED(); AT(); return 1;}
/*
* Close/release resources.
*/
- H5Sclose(dataspace);
- H5Fclose(file);
+ if(H5Sclose(memspace) < 0)
+ {H5_FAILED(); AT(); return 1;}
+ if(H5Sclose(filespace) < 0)
+ {H5_FAILED(); AT(); return 1;}
+ if(H5Fclose(file) < 0)
+ {H5_FAILED(); AT(); return 1;}
return 0;
}
diff --git a/test/le_data.h5 b/test/le_data.h5
index 6bb0e46..1225d21 100644
--- a/test/le_data.h5
+++ b/test/le_data.h5
Binary files differ
diff --git a/test/vms_data.h5 b/test/vms_data.h5
index 5f07082..14aeef2 100644
--- a/test/vms_data.h5
+++ b/test/vms_data.h5
Binary files differ