summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorRaymond Lu <songyulu@hdfgroup.org>2010-11-19 19:11:35 (GMT)
committerRaymond Lu <songyulu@hdfgroup.org>2010-11-19 19:11:35 (GMT)
commit7dd819a85159bbffd24a3a443bd24962884d0319 (patch)
treebea6044f1b922a8a2779b5429105792ca0511889
parent2f5d9e1c8a3ff11dc41394bc41d8634698c1e799 (diff)
downloadhdf5-7dd819a85159bbffd24a3a443bd24962884d0319.zip
hdf5-7dd819a85159bbffd24a3a443bd24962884d0319.tar.gz
hdf5-7dd819a85159bbffd24a3a443bd24962884d0319.tar.bz2
[svn-r19832] Bug fix - In the code of N-bit filter, one line (the last line of H5Znbit.c in H5Z_nbit_compress -
"*buffer_size = j + 1;" was mistakenly taken out by someone. It is necessary to update the new size. I put it back and made 2 test cases for integer and float to verify the correct dataset size. Tested on jam, heiwa, and amani.
-rw-r--r--src/H5Znbit.c18
-rw-r--r--test/dsets.c383
2 files changed, 394 insertions, 7 deletions
diff --git a/src/H5Znbit.c b/src/H5Znbit.c
index bcdd549..263b2cd 100644
--- a/src/H5Znbit.c
+++ b/src/H5Znbit.c
@@ -1365,17 +1365,17 @@ static void H5Z_nbit_compress_one_compound(unsigned char *data, size_t data_offs
static void H5Z_nbit_compress(unsigned char *data, unsigned d_nelmts, unsigned char *buffer,
size_t *buffer_size, const unsigned parms[])
{
- /* i: index of data, j: index of buffer,
+ /* i: index of data, new_size: index of buffer,
buf_len: number of bits to be filled in current byte */
- size_t i, j, size;
+ size_t i, size;
+ size_t new_size = 0;
int buf_len;
parms_atomic p;
/* must initialize buffer to be zeros */
- for(j = 0; j < *buffer_size; j++) buffer[j] = 0;
+ HDmemset(buffer, 0, *buffer_size);
/* initialization before the loop */
- j = 0;
buf_len = sizeof(unsigned char) * 8;
switch(parms[3]) {
@@ -1387,14 +1387,14 @@ static void H5Z_nbit_compress(unsigned char *data, unsigned d_nelmts, unsigned c
p.offset = parms[7];
for(i = 0; i < d_nelmts; i++) {
- H5Z_nbit_compress_one_atomic(data, i*p.size, buffer, &j, &buf_len, p);
+ H5Z_nbit_compress_one_atomic(data, i*p.size, buffer, &new_size, &buf_len, p);
}
break;
case H5Z_NBIT_ARRAY:
size = parms[4];
parms_index = 4;
for(i = 0; i < d_nelmts; i++) {
- H5Z_nbit_compress_one_array(data, i*size, buffer, &j, &buf_len, parms);
+ H5Z_nbit_compress_one_array(data, i*size, buffer, &new_size, &buf_len, parms);
parms_index = 4;
}
break;
@@ -1402,10 +1402,14 @@ static void H5Z_nbit_compress(unsigned char *data, unsigned d_nelmts, unsigned c
size = parms[4];
parms_index = 4;
for(i = 0; i < d_nelmts; i++) {
- H5Z_nbit_compress_one_compound(data, i*size, buffer, &j, &buf_len, parms);
+ H5Z_nbit_compress_one_compound(data, i*size, buffer, &new_size, &buf_len, parms);
parms_index = 4;
}
break;
} /* end switch */
+
+ /* Update the size to the new value after compression. If there are any bits hanging over in
+ * the last byte, increment the value by 1. */
+ *buffer_size = new_size + 1;
}
#endif /* H5_HAVE_FILTER_NBIT */
diff --git a/test/dsets.c b/test/dsets.c
index 1d81e1d..682dfce 100644
--- a/test/dsets.c
+++ b/test/dsets.c
@@ -53,6 +53,7 @@ const char *FILENAME[] = {
NULL
};
#define FILENAME_BUF_SIZE 1024
+#define KB 1024
#define FILE_DEFLATE_NAME "deflate.h5"
@@ -99,6 +100,8 @@ const char *FILENAME[] = {
#define DSET_NBIT_COMPOUND_NAME "nbit_compound"
#define DSET_NBIT_COMPOUND_NAME_2 "nbit_compound_2"
#define DSET_NBIT_COMPOUND_NAME_3 "nbit_compound_3"
+#define DSET_NBIT_INT_SIZE_NAME "nbit_int_size"
+#define DSET_NBIT_FLT_SIZE_NAME "nbit_flt_size"
#define DSET_SCALEOFFSET_INT_NAME "scaleoffset_int"
#define DSET_SCALEOFFSET_INT_NAME_2 "scaleoffset_int_2"
#define DSET_SCALEOFFSET_FLOAT_NAME "scaleoffset_float"
@@ -3946,6 +3949,384 @@ error:
/*-------------------------------------------------------------------------
+ * Function: test_nbit_int_size
+ *
+ * Purpose: Tests the correct size of the integer datatype for nbit filter
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Raymond Lu
+ * 19 November 2010
+ *
+ *-------------------------------------------------------------------------
+ */
+static herr_t
+test_nbit_int_size(hid_t file)
+{
+#ifdef H5_HAVE_FILTER_NBIT
+ hid_t dataspace, dataset, datatype, mem_datatype, dset_create_props;
+ hsize_t dims[2], chunk_size[2];
+ hsize_t dset_size = 0;
+ int orig_data[DSET_DIM1][DSET_DIM2];
+ int i, j;
+ size_t precision, offset;
+#else /* H5_HAVE_FILTER_NBIT */
+ const char *not_supported= " Nbit is not enabled.";
+#endif /* H5_HAVE_FILTER_NBIT */
+
+ TESTING(" nbit integer dataset size");
+#ifdef H5_HAVE_FILTER_NBIT
+
+ /* Define dataset datatype (integer), and set precision, offset */
+ if((datatype = H5Tcopy(H5T_NATIVE_INT)) < 0) {
+ H5_FAILED();
+ printf(" line %d: H5Tcopy failed\n",__LINE__);
+ goto error;
+ } /* end if */
+
+ precision = 16; /* precision includes sign bit */
+ if(H5Tset_precision(datatype,precision)<0) {
+ H5_FAILED();
+ printf(" line %d: H5Pset_precision failed\n",__LINE__);
+ goto error;
+ } /* end if */
+
+ offset = 8;
+ if(H5Tset_offset(datatype,offset)<0) {
+ H5_FAILED();
+ printf(" line %d: H5Tset_offset failed\n",__LINE__);
+ goto error;
+ } /* end if */
+
+ /* Copy to memory datatype */
+ if((mem_datatype = H5Tcopy(datatype)) < 0) {
+ H5_FAILED();
+ printf(" line %d: H5Tcopy failed\n",__LINE__);
+ goto error;
+ } /* end if */
+
+ /* Set order of dataset datatype */
+ if(H5Tset_order(datatype, H5T_ORDER_BE)<0) {
+ H5_FAILED();
+ printf(" line %d: H5Pset_order failed\n",__LINE__);
+ goto error;
+ } /* end if */
+
+ if(H5Tset_size(datatype, 4)<0) {
+ H5_FAILED();
+ printf(" line %d: H5Pset_size failed\n",__LINE__);
+ goto error;
+ } /* end if */
+
+ /* Initiliaze data buffer with random data within correct range
+ * corresponding to the memory datatype's precision and offset.
+ */
+ for (i=0; i < DSET_DIM1; i++)
+ for (j=0; j < DSET_DIM2; j++)
+ orig_data[i][j] = rand() % (int)pow(2, precision-1) <<offset;
+
+
+ /* Describe the dataspace. */
+ dims[0] = DSET_DIM1;
+ dims[1] = DSET_DIM2;
+ if((dataspace = H5Screate_simple (2, dims, NULL))<0) {
+ H5_FAILED();
+ printf(" line %d: H5Pcreate failed\n",__LINE__);
+ goto error;
+ } /* end if */
+
+ /*
+ * Set the dataset creation property list to specify the chunks
+ */
+ chunk_size[0] = DSET_DIM1/10;
+ chunk_size[1] = DSET_DIM2/10;
+ if((dset_create_props = H5Pcreate (H5P_DATASET_CREATE))<0) {
+ H5_FAILED();
+ printf(" line %d: H5Pcreate failed\n",__LINE__);
+ goto error;
+ } /* end if */
+
+ if(H5Pset_chunk (dset_create_props, 2, chunk_size)<0) {
+ H5_FAILED();
+ printf(" line %d: H5Pset_chunk failed\n",__LINE__);
+ goto error;
+ } /* end if */
+
+ /*
+ * Set for n-bit compression
+ */
+ if(H5Pset_nbit (dset_create_props)<0) {
+ H5_FAILED();
+ printf(" line %d: H5Pset_nbit failed\n",__LINE__);
+ goto error;
+ } /* end if */
+
+ /*
+ * Create a new dataset within the file.
+ */
+ if((dataset = H5Dcreate2 (file, DSET_NBIT_INT_SIZE_NAME, datatype,
+ dataspace, H5P_DEFAULT,
+ dset_create_props, H5P_DEFAULT))<0) {
+ H5_FAILED();
+ printf(" line %d: H5dwrite failed\n",__LINE__);
+ goto error;
+ } /* end if */
+
+ /*
+ * Write the array to the file.
+ */
+ if(H5Dwrite (dataset, mem_datatype, H5S_ALL, H5S_ALL,
+ H5P_DEFAULT, orig_data)<0) {
+ H5_FAILED();
+ printf(" Line %d: H5Dwrite failed\n",__LINE__);
+ goto error;
+ } /* end if */
+
+ /*
+ * Get the precision of the data type
+ */
+ if((precision = H5Tget_precision(datatype)) == 0) {
+ H5_FAILED();
+ printf(" Line %d: wrong precision size: %d\n",__LINE__, precision);
+ goto error;
+ } /* end if */
+
+ /*
+ * The size of the dataset after compression should around 2 * DSET_DIM1 * DSET_DIM2
+ */
+ if((dset_size = H5Dget_storage_size(dataset)) < DSET_DIM1*DSET_DIM2*(precision/8) ||
+ dset_size > DSET_DIM1*DSET_DIM2*(precision/8) + 1*KB) {
+ H5_FAILED();
+ printf(" Line %d: wrong dataset size: %d\n",__LINE__, dset_size);
+ goto error;
+ } /* end if */
+
+ H5Tclose (datatype);
+ H5Tclose (mem_datatype);
+ H5Dclose (dataset);
+ H5Sclose (dataspace);
+ H5Pclose (dset_create_props);
+
+ PASSED();
+#else
+ SKIPPED();
+ puts(not_supported);
+#endif
+
+ return 0;
+error:
+ return -1;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: test_nbit_flt_size
+ *
+ * Purpose: Tests the correct size of the floating-number datatype for
+ * nbit filter
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Raymond Lu
+ * 19 November 2010
+ *
+ *-------------------------------------------------------------------------
+ */
+static herr_t
+test_nbit_flt_size(hid_t file)
+{
+#ifdef H5_HAVE_FILTER_NBIT
+ hid_t dataspace, dataset, datatype, dset_create_props;
+ hsize_t dims[2], chunk_size[2];
+ hsize_t dset_size = 0;
+ float orig_data[DSET_DIM1][DSET_DIM2];
+ int i, j;
+ size_t precision, offset;
+ size_t spos, epos, esize, mpos, msize;
+#else /* H5_HAVE_FILTER_NBIT */
+ const char *not_supported= " Nbit is not enabled.";
+#endif /* H5_HAVE_FILTER_NBIT */
+
+ TESTING(" nbit floating-number dataset size");
+#ifdef H5_HAVE_FILTER_NBIT
+
+ /* Define floating-point type for dataset
+ *-------------------------------------------------------------------
+ * size=4 byte, precision=16 bits, offset=8 bits,
+ * mantissa size=9 bits, mantissa position=8,
+ * exponent size=6 bits, exponent position=17,
+ * exponent bias=31.
+ * It can be illustrated in little-endian order as:
+ * (S - sign bit, E - exponent bit, M - mantissa bit,
+ * ? - padding bit)
+ *
+ * 3 2 1 0
+ * ???????? SEEEEEEM MMMMMMMM ????????
+ *
+ * To create a new floating-point type, the following
+ * properties must be set in the order of
+ * set fields -> set offset -> set precision -> set size.
+ * All these properties must be set before the type can function.
+ * Other properties can be set anytime. Derived type size cannot
+ * be expanded bigger than original size but can be decreased.
+ * There should be no holes among the significant bits. Exponent
+ * bias usually is set 2^(n-1)-1, where n is the exponent size.
+ *-------------------------------------------------------------------*/
+ if((datatype = H5Tcopy(H5T_IEEE_F32LE)) < 0) {
+ H5_FAILED();
+ printf(" line %d: H5Tcopy failed\n",__LINE__);
+ goto error;
+ } /* end if */
+
+ msize = 9;
+ spos = 23;
+ epos = 17;
+ esize = 6;
+ mpos = 8;
+ offset = 8;
+ precision = 16;
+
+ if(H5Tset_fields(datatype, spos, epos, esize, mpos, msize)<0) {
+ H5_FAILED();
+ printf(" line %d: H5Tset_fields failed\n",__LINE__);
+ goto error;
+ } /* end if */
+
+ if(H5Tset_offset(datatype,offset)<0) {
+ H5_FAILED();
+ printf(" line %d: H5Tset_offset failed\n",__LINE__);
+ goto error;
+ } /* end if */
+
+ if(H5Tset_precision(datatype,precision)<0) {
+ H5_FAILED();
+ printf(" line %d: H5Tset_precision failed\n",__LINE__);
+ goto error;
+ } /* end if */
+
+ if(H5Tset_size(datatype, 4)<0) {
+ H5_FAILED();
+ printf(" line %d: H5Pset_size failed\n",__LINE__);
+ goto error;
+ } /* end if */
+
+ /* Set order of dataset datatype */
+ if(H5Tset_order(datatype, H5T_ORDER_BE)<0) {
+ H5_FAILED();
+ printf(" line %d: H5Pset_order failed\n",__LINE__);
+ goto error;
+ } /* end if */
+
+ if(H5Tset_ebias(datatype, 31)<0) {
+ H5_FAILED();
+ printf(" line %d: H5Pset_size failed\n",__LINE__);
+ goto error;
+ } /* end if */
+
+ /*
+ * Initiliaze data buffer with random data
+ */
+ for (i=0; i < DSET_DIM1; i++)
+ for (j=0; j < DSET_DIM2; j++)
+ orig_data[i][j] = (rand() % 1234567) / 2;
+
+
+ /* Describe the dataspace. */
+ dims[0] = DSET_DIM1;
+ dims[1] = DSET_DIM2;
+ if((dataspace = H5Screate_simple (2, dims, NULL))<0) {
+ H5_FAILED();
+ printf(" line %d: H5Pcreate failed\n",__LINE__);
+ goto error;
+ } /* end if */
+
+ /*
+ * Set the dataset creation property list to specify the chunks
+ */
+ chunk_size[0] = DSET_DIM1/10;
+ chunk_size[1] = DSET_DIM2/10;
+ if((dset_create_props = H5Pcreate (H5P_DATASET_CREATE))<0) {
+ H5_FAILED();
+ printf(" line %d: H5Pcreate failed\n",__LINE__);
+ goto error;
+ } /* end if */
+
+ if(H5Pset_chunk (dset_create_props, 2, chunk_size)<0) {
+ H5_FAILED();
+ printf(" line %d: H5Pset_chunk failed\n",__LINE__);
+ goto error;
+ } /* end if */
+
+ /*
+ * Set for n-bit compression
+ */
+ if(H5Pset_nbit (dset_create_props)<0) {
+ H5_FAILED();
+ printf(" line %d: H5Pset_nbit failed\n",__LINE__);
+ goto error;
+ } /* end if */
+
+ /*
+ * Create a new dataset within the file.
+ */
+ if((dataset = H5Dcreate2 (file, DSET_NBIT_FLT_SIZE_NAME, datatype,
+ dataspace, H5P_DEFAULT,
+ dset_create_props, H5P_DEFAULT))<0) {
+ H5_FAILED();
+ printf(" line %d: H5dwrite failed\n",__LINE__);
+ goto error;
+ } /* end if */
+
+ /*
+ * Write the array to the file.
+ */
+ if(H5Dwrite (dataset, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL,
+ H5P_DEFAULT, orig_data)<0) {
+ H5_FAILED();
+ printf(" Line %d: H5Dwrite failed\n",__LINE__);
+ goto error;
+ } /* end if */
+
+ /*
+ * Get the precision of the data type
+ */
+ if((precision = H5Tget_precision(datatype)) == 0) {
+ H5_FAILED();
+ printf(" Line %d: wrong precision size: %d\n",__LINE__, precision);
+ goto error;
+ } /* end if */
+
+ /*
+ * The size of the dataset after compression should around 2 * DSET_DIM1 * DSET_DIM2
+ */
+ if((dset_size = H5Dget_storage_size(dataset)) < DSET_DIM1*DSET_DIM2*(precision/8) ||
+ dset_size > DSET_DIM1*DSET_DIM2*(precision/8) + 1*KB) {
+ H5_FAILED();
+ printf(" Line %d: wrong dataset size: %d\n",__LINE__, dset_size);
+ goto error;
+ } /* end if */
+
+ H5Tclose (datatype);
+ H5Dclose (dataset);
+ H5Sclose (dataspace);
+ H5Pclose (dset_create_props);
+
+ PASSED();
+#else
+ SKIPPED();
+ puts(not_supported);
+#endif
+
+ return 0;
+error:
+ return -1;
+}
+
+/*-------------------------------------------------------------------------
* Function: test_scaleoffset_int
*
* Purpose: Tests the integer datatype for scaleoffset filter
@@ -7853,6 +8234,8 @@ main(void)
nerrors += (test_nbit_compound(file) < 0 ? 1 : 0);
nerrors += (test_nbit_compound_2(file) < 0 ? 1 : 0);
nerrors += (test_nbit_compound_3(file) < 0 ? 1 : 0);
+ nerrors += (test_nbit_int_size(file) < 0 ? 1 : 0);
+ nerrors += (test_nbit_flt_size(file) < 0 ? 1 : 0);
nerrors += (test_scaleoffset_int(file) < 0 ? 1 : 0);
nerrors += (test_scaleoffset_int_2(file) < 0 ? 1 : 0);
nerrors += (test_scaleoffset_float(file) < 0 ? 1 : 0);