summaryrefslogtreecommitdiffstats
path: root/test
diff options
context:
space:
mode:
authorQuincey Koziol <koziol@hdfgroup.org>2009-07-02 21:17:02 (GMT)
committerQuincey Koziol <koziol@hdfgroup.org>2009-07-02 21:17:02 (GMT)
commitb0df711c3c1a31d8ddfc6600308b42ee81c8a27f (patch)
tree2882be3f917c6ae660b7168dd27082e64833987e /test
parent55a945c985692dfd2c8e1b2915d413b1b704905a (diff)
downloadhdf5-b0df711c3c1a31d8ddfc6600308b42ee81c8a27f.zip
hdf5-b0df711c3c1a31d8ddfc6600308b42ee81c8a27f.tar.gz
hdf5-b0df711c3c1a31d8ddfc6600308b42ee81c8a27f.tar.bz2
[svn-r17147] Description:
Bring r17146 from trunk to 1.8 branch: Bring a bunch of misc. improvements & tweaks from the revise_chunks branch back to the trunk, so that future merges won't be so painful. Tested on: FreeBSd/32 6.3 (duty) (h5committested on trunk)
Diffstat (limited to 'test')
-rw-r--r--test/dsets.c42
-rw-r--r--test/tfile.c17
2 files changed, 30 insertions, 29 deletions
diff --git a/test/dsets.c b/test/dsets.c
index 3357a4e..bd02c24 100644
--- a/test/dsets.c
+++ b/test/dsets.c
@@ -138,10 +138,12 @@ const char *FILENAME[] = {
#define FILTER_HS_SIZE2 50
/* Names for noencoder test */
+#ifdef H5_HAVE_FILTER_SZIP
#define NOENCODER_FILENAME "noencoder.h5"
#define NOENCODER_TEST_DATASET "noencoder_tdset.h5"
#define NOENCODER_SZIP_DATASET "noencoder_szip_dset.h5"
#define NOENCODER_SZIP_SHUFF_FLETCH_DATASET "noencoder_szip_shuffle_fletcher_dset.h5"
+#endif /* H5_HAVE_FILTER_SZIP */
/* Names for zero-dim test */
#define ZERODIM_DATASET "zerodim"
@@ -974,9 +976,9 @@ test_tconv(hid_t file)
hsize_t dims[1];
hid_t space, dataset;
- out = HDmalloc((size_t)(4 * 1000 * 1000));
+ out = (char *)HDmalloc((size_t)(4 * 1000 * 1000));
HDassert(out);
- in = HDmalloc((size_t)(4 * 1000 * 1000));
+ in = (char *)HDmalloc((size_t)(4 * 1000 * 1000));
HDassert(in);
TESTING("data type conversion");
@@ -1131,7 +1133,7 @@ set_local_bogus2(hid_t dcpl_id, hid_t type_id, hid_t UNUSED space_id)
return(FAIL);
/* Set "local" parameters for this dataset */
- cd_values[2]=(add_on>0); /* Flag to indicate data is modified */
+ cd_values[2]=(unsigned)(add_on>0); /* Flag to indicate data is modified */
cd_values[3]=add_on; /* Amount the data was modified by */
/* Modify the filter's parameters for this dataset */
@@ -1185,12 +1187,12 @@ filter_bogus2(unsigned int flags, size_t cd_nelmts,
/* "Compressing" */
else {
unsigned add_on=cd_values[3]; /* Get "add on" value */
- int *int_ptr=*buf; /* Pointer to the data values */
+ int *int_ptr=(int *)*buf; /* Pointer to the data values */
size_t buf_left=*buf_size; /* Amount of data buffer left to process */
/* Add the "add on" value to all the data values */
while(buf_left>0) {
- *int_ptr++ += add_on;
+ *int_ptr++ += (int)add_on;
buf_left -= sizeof(int);
} /* end while */
} /* end else */
@@ -2661,7 +2663,7 @@ test_nbit_int(hid_t file)
/* Check that the values read are the same as the values written
* Use mask for checking the significant bits, ignoring the padding bits
*/
- mask = ~(~0 << (precision + offset)) & (~0 << offset);
+ mask = ~((unsigned)~0 << (precision + offset)) & ((unsigned)~0 << offset);
for(i=0; i<(size_t)size[0]; i++) {
for(j=0; j<(size_t)size[1]; j++) {
if((new_data[i][j] & mask) != (orig_data[i][j] & mask)) {
@@ -3220,7 +3222,7 @@ test_nbit_compound(hid_t file)
/* some even-numbered integer values are negtive */
if((i*size[1]+j+1)%2 == 0) {
orig_data[i][j].i = -orig_data[i][j].i;
- orig_data[i][j].s = -orig_data[i][j].s;
+ orig_data[i][j].s = (short)-orig_data[i][j].s;
}
}
@@ -3262,9 +3264,9 @@ test_nbit_compound(hid_t file)
/* Check that the values read are the same as the values written
* Use mask for checking the significant bits, ignoring the padding bits
*/
- i_mask = ~(~0 << (precision[0] + offset[0])) & (~0 << offset[0]);
- c_mask = ~(~0 << (precision[1] + offset[1])) & (~0 << offset[1]);
- s_mask = ~(~0 << (precision[2] + offset[2])) & (~0 << offset[2]);
+ i_mask = ~((unsigned)~0 << (precision[0] + offset[0])) & ((unsigned)~0 << offset[0]);
+ c_mask = ~((unsigned)~0 << (precision[1] + offset[1])) & ((unsigned)~0 << offset[1]);
+ s_mask = ~((unsigned)~0 << (precision[2] + offset[2])) & ((unsigned)~0 << offset[2]);
for(i=0; i<size[0]; i++) {
for(j=0; j<size[1]; j++) {
if((new_data[i][j].i & i_mask) != (orig_data[i][j].i & i_mask) ||
@@ -3522,10 +3524,10 @@ test_nbit_compound_2(hid_t file)
/* Check that the values read are the same as the values written
* Use mask for checking the significant bits, ignoring the padding bits
*/
- i_mask = ~(~0 << (precision[0] + offset[0])) & (~0 << offset[0]);
- c_mask = ~(~0 << (precision[1] + offset[1])) & (~0 << offset[1]);
- s_mask = ~(~0 << (precision[2] + offset[2])) & (~0 << offset[2]);
- b_mask = ~(~0 << (precision[4] + offset[4])) & (~0 << offset[4]);
+ i_mask = ~((unsigned)~0 << (precision[0] + offset[0])) & ((unsigned)~0 << offset[0]);
+ c_mask = ~((unsigned)~0 << (precision[1] + offset[1])) & ((unsigned)~0 << offset[1]);
+ s_mask = ~((unsigned)~0 << (precision[2] + offset[2])) & ((unsigned)~0 << offset[2]);
+ b_mask = ~((unsigned)~0 << (precision[4] + offset[4])) & ((unsigned)~0 << offset[4]);
for(i=0; i<(size_t)size[0]; i++) {
for(j=0; j<(size_t)size[1]; j++) {
b_failed = 0;
@@ -5904,7 +5906,7 @@ test_missing_chunk(hid_t file)
/* Initialize data */
for(u=0; u<MISSING_CHUNK_DIM; u++) {
- wdata[u]=u;
+ wdata[u]=(int)u;
rdata[u]=911;
} /* end for */
@@ -6037,8 +6039,8 @@ test_random_chunks(hid_t fapl)
/* Generate random point coordinates. Only one point is selected per chunk */
for(i=0; i<NPOINTS; i++){
do {
- chunk_row = (int)HDrandom () % (dsize[0]/csize[0]);
- chunk_col = (int)HDrandom () % (dsize[1]/csize[1]);
+ chunk_row = (int)HDrandom () % (int)(dsize[0]/csize[0]);
+ chunk_col = (int)HDrandom () % (int)(dsize[1]/csize[1]);
} while (check2[chunk_row][chunk_col]);
wbuf[i] = check2[chunk_row][chunk_col] = chunk_row+chunk_col+1;
@@ -6127,8 +6129,8 @@ test_random_chunks(hid_t fapl)
/* Generate random point coordinates. Only one point is selected per chunk */
for(i = 0; i < NPOINTS; i++){
do {
- chunk_row = (int)HDrandom() % (nsize[0] / csize[0]);
- chunk_col = (int)HDrandom() % (nsize[1] / csize[1]);
+ chunk_row = (int)HDrandom() % (int)(nsize[0] / csize[0]);
+ chunk_col = (int)HDrandom() % (int)(nsize[1] / csize[1]);
} while (check2[chunk_row][chunk_col]);
wbuf[i] = check2[chunk_row][chunk_col] = chunk_row + chunk_col + 1;
@@ -7370,7 +7372,7 @@ main(void)
} /* end for */
/* Close 2nd FAPL */
- H5Pclose(fapl2);
+ if(H5Pclose(fapl2) < 0) TEST_ERROR
if(nerrors)
goto error;
diff --git a/test/tfile.c b/test/tfile.c
index 68c7f6b..2116a2c 100644
--- a/test/tfile.c
+++ b/test/tfile.c
@@ -233,7 +233,7 @@ test_file_create(void)
{
hid_t dataset_id, dataspace_id; /* identifiers */
hsize_t dims[F2_RANK];
- int data[F2_DIM0][F2_DIM1];
+ unsigned data[F2_DIM0][F2_DIM1];
unsigned i,j;
/* Create the data space for the dataset. */
@@ -243,7 +243,7 @@ test_file_create(void)
CHECK(dataspace_id, FAIL, "H5Screate_simple");
/* Create the dataset. */
- dataset_id = H5Dcreate2(fid2, F2_DSET, H5T_NATIVE_INT, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ dataset_id = H5Dcreate2(fid2, F2_DSET, H5T_NATIVE_UINT, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
CHECK(dataset_id, FAIL, "H5Dcreate2");
for(i = 0; i < F2_DIM0; i++)
@@ -251,7 +251,7 @@ test_file_create(void)
data[i][j] = i * 10 + j;
/* Write data to the new dataset */
- ret = H5Dwrite(dataset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, data);
+ ret = H5Dwrite(dataset_id, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, data);
CHECK(ret, FAIL, "H5Dwrite");
/* End access to the dataset and release resources used by it. */
@@ -804,7 +804,7 @@ create_objects(hid_t fid1, hid_t fid2, hid_t *ret_did, hid_t *ret_gid1,
{
hid_t dataset_id, dataspace_id; /* identifiers */
hsize_t dims[F2_RANK];
- int data[F2_DIM0][F2_DIM1];
+ unsigned data[F2_DIM0][F2_DIM1];
unsigned i,j;
/* Create the data space for the dataset. */
@@ -814,7 +814,7 @@ create_objects(hid_t fid1, hid_t fid2, hid_t *ret_did, hid_t *ret_gid1,
CHECK(dataspace_id, FAIL, "H5Screate_simple");
/* Create the dataset. */
- dataset_id = H5Dcreate2(fid1, "/dset", H5T_NATIVE_INT, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ dataset_id = H5Dcreate2(fid1, "/dset", H5T_NATIVE_UINT, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
CHECK(dataset_id, FAIL, "H5Dcreate2");
for(i = 0; i < F2_DIM0; i++)
@@ -822,8 +822,7 @@ create_objects(hid_t fid1, hid_t fid2, hid_t *ret_did, hid_t *ret_gid1,
data[i][j] = i * 10 + j;
/* Write data to the new dataset */
- ret = H5Dwrite(dataset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL,
- H5P_DEFAULT, data);
+ ret = H5Dwrite(dataset_id, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, data);
CHECK(ret, FAIL, "H5Dwrite");
if(ret_did != NULL)
@@ -2014,7 +2013,7 @@ test_cached_stab_info(void)
/* Reopen file */
file_id = H5Fopen(FILE1, H5F_ACC_RDONLY, H5P_DEFAULT);
- CHECK(ret, FAIL, "H5Fopen");
+ CHECK(file_id, FAIL, "H5Fopen");
/* Verify the cached symbol table information */
ret = H5F_check_cached_stab_test(file_id);
@@ -2058,7 +2057,7 @@ test_file(void)
#endif /*H5_CANNOT_OPEN_TWICE*/
test_userblock_file_size(); /* Tests that files created with a userblock have the correct size */
test_cached_stab_info(); /* Tests that files are created with cached stab info in the superblock */
-} /* test_file() */
+} /* test_file() */
/*-------------------------------------------------------------------------