diff options
author | Quincey Koziol <koziol@hdfgroup.org> | 2007-03-27 03:06:48 (GMT) |
---|---|---|
committer | Quincey Koziol <koziol@hdfgroup.org> | 2007-03-27 03:06:48 (GMT) |
commit | c64ac252cdd9fe40b96313e2435551f16428b9d6 (patch) | |
tree | 9ff6633ac3ee8fe9529620a0ecfc99bbbab451f8 /test | |
parent | ddf436469153cc5deb7cadfdb9a1b985c605774f (diff) | |
download | hdf5-c64ac252cdd9fe40b96313e2435551f16428b9d6.zip hdf5-c64ac252cdd9fe40b96313e2435551f16428b9d6.tar.gz hdf5-c64ac252cdd9fe40b96313e2435551f16428b9d6.tar.bz2 |
[svn-r13549] Description:
Check in changes from Elena and I to get pgcc compiler working again.
Primarily (all?) changes to move from using 'hsize_t' as array index to using
something else ('size_t') mostly.
Tested on:
Linux/32 2.4 kagiso w/pgcc
Diffstat (limited to 'test')
-rw-r--r-- | test/dsets.c | 174 | ||||
-rw-r--r-- | test/dtypes.c | 6 | ||||
-rw-r--r-- | test/enum.c | 15 | ||||
-rw-r--r-- | test/external.c | 2 | ||||
-rw-r--r-- | test/flush1.c | 8 | ||||
-rw-r--r-- | test/flush2.c | 6 | ||||
-rw-r--r-- | test/istore.c | 2 | ||||
-rw-r--r-- | test/tsohm.c | 6 |
8 files changed, 111 insertions, 108 deletions
diff --git a/test/dsets.c b/test/dsets.c index 794f9c2..1d055d2 100644 --- a/test/dsets.c +++ b/test/dsets.c @@ -1312,7 +1312,7 @@ test_filter_internal(hid_t fid, const char *name, hid_t dcpl, int if_fletcher32, const hsize_t hs_offset[2] = {FILTER_HS_OFFSET1, FILTER_HS_OFFSET2}; /* Hyperslab offset */ const hsize_t hs_size[2] = {FILTER_HS_SIZE1, FILTER_HS_SIZE2}; /* Hyperslab size */ void *tconv_buf = NULL; /* Temporary conversion buffer */ - hsize_t i, j, n; /* Local index variables */ + size_t i, j, n; /* Local index variables */ herr_t status; /* Error status */ /* Create the data space */ @@ -1357,8 +1357,8 @@ test_filter_internal(hid_t fid, const char *name, hid_t dcpl, int if_fletcher32, if (H5Dread (dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check)<0) goto error; - for (i=0; i<size[0]; i++) { - for (j=0; j<size[1]; j++) { + for (i=0; i<(size_t)size[0]; i++) { + for (j=0; j<(size_t)size[1]; j++) { if (0!=check[i][j]) { H5_FAILED(); printf(" Read a non-zero value.\n"); @@ -1557,9 +1557,9 @@ test_filter_internal(hid_t fid, const char *name, hid_t dcpl, int if_fletcher32, */ TESTING(" filters (partial I/O)"); - for (i=0; i<hs_size[0]; i++) { - for (j=0; j<hs_size[1]; j++) { - points[hs_offset[0]+i][hs_offset[1]+j] = (int)HDrandom(); + for (i=0; i<(size_t)hs_size[0]; i++) { + for (j=0; j<(size_t)hs_size[1]; j++) { + points[(size_t)hs_offset[0]+i][(size_t)hs_offset[1]+j] = (int)HDrandom(); } } if (H5Sselect_hyperslab(sid, H5S_SELECT_SET, hs_offset, NULL, hs_size, @@ -1593,19 +1593,19 @@ test_filter_internal(hid_t fid, const char *name, hid_t dcpl, int if_fletcher32, TEST_ERROR; /* Check that the values read are the same as the values written */ - for (i=0; i<hs_size[0]; i++) { - for (j=0; j<hs_size[1]; j++) { - if (points[hs_offset[0]+i][hs_offset[1]+j] != - check[hs_offset[0]+i][hs_offset[1]+j]) { + for (i=0; i<(size_t)hs_size[0]; i++) { + for (j=0; j<(size_t)hs_size[1]; j++) { + if (points[(size_t)hs_offset[0]+i][(size_t)hs_offset[1]+j] != + check[(size_t)hs_offset[0]+i][(size_t)hs_offset[1]+j]) { H5_FAILED(); fprintf(stderr," Read different values than written.\n"); fprintf(stderr," At index %lu,%lu\n", - (unsigned long)(hs_offset[0]+i), - (unsigned long)(hs_offset[1]+j)); + (unsigned long)((size_t)hs_offset[0]+i), + (unsigned long)((size_t)hs_offset[1]+j)); fprintf(stderr," At original: %d\n", - (int)points[hs_offset[0]+i][hs_offset[1]+j]); + (int)points[(size_t)hs_offset[0]+i][(size_t)hs_offset[1]+j]); fprintf(stderr," At returned: %d\n", - (int)check[hs_offset[0]+i][hs_offset[1]+j]); + (int)check[(size_t)hs_offset[0]+i][(size_t)hs_offset[1]+j]); goto error; } } @@ -2166,7 +2166,7 @@ test_missing_filter(hid_t file) const hsize_t dims[2] = {DSET_DIM1, DSET_DIM2}; /* Dataspace dimensions */ const hsize_t chunk_dims[2] = {2, 25}; /* Chunk dimensions */ hsize_t dset_size; /* Dataset size */ - hsize_t i,j; /* Local index variables */ + size_t i,j; /* Local index variables */ herr_t ret; /* Generic return value */ char testfile[512]=""; /* Buffer to hold name of existing test file */ char *srcdir = HDgetenv("srcdir"); /* The source directory, if we are using the --srcdir configure option */ @@ -2278,8 +2278,8 @@ test_missing_filter(hid_t file) /* Compare data */ /* Check that the values read are the same as the values written */ - for (i=0; i<dims[0]; i++) { - for (j=0; j<dims[1]; j++) { + for (i=0; i<(size_t)dims[0]; i++) { + for (j=0; j<(size_t)dims[1]; j++) { if (points[i][j] != check[i][j]) { H5_FAILED(); printf(" Line %d: Read different values than written.\n",__LINE__); @@ -2418,7 +2418,7 @@ test_onebyte_shuffle(hid_t file) const hsize_t chunk_size[2] = {10, 20}; unsigned char orig_data[10][20]; unsigned char new_data[10][20]; - hsize_t i, j; + size_t i, j; #else /* H5_HAVE_FILTER_SHUFFLE */ const char *not_supported= " Data shuffling is not enabled."; #endif /* H5_HAVE_FILTER_SHUFFLE */ @@ -2479,8 +2479,8 @@ test_onebyte_shuffle(hid_t file) goto error; /* Check that the values read are the same as the values written */ - for (i=0; i<size[0]; i++) { - for (j=0; j<size[1]; j++) { + for (i=0; i<(size_t)size[0]; i++) { + for (j=0; j<(size_t)size[1]; j++) { if (new_data[i][j] != orig_data[i][j]) { H5_FAILED(); printf(" Read different values than written.\n"); @@ -2538,7 +2538,7 @@ test_nbit_int(hid_t file) int new_data[2][5]; unsigned int mask; size_t precision, offset; - hsize_t i, j; + size_t i, j; #else /* H5_HAVE_FILTER_NBIT */ const char *not_supported= " Nbit is not enabled."; #endif /* H5_HAVE_FILTER_NBIT */ @@ -2572,8 +2572,8 @@ test_nbit_int(hid_t file) space,dc))<0) goto error; /* Initialize data, assuming size of long_long >= size of int */ - for (i= 0;i< size[0]; i++) - for (j = 0; j < size[1]; j++) { + for (i= 0;i< (size_t)size[0]; i++) + for (j = 0; j < (size_t)size[1]; j++) { orig_data[i][j] = (int)(((long_long)HDrandom() % (long_long)HDpow(2.0, (double)(precision - 1))) << offset); @@ -2621,8 +2621,8 @@ test_nbit_int(hid_t file) * Use mask for checking the significant bits, ignoring the padding bits */ mask = ~(~0 << (precision + offset)) & (~0 << offset); - for (i=0; i<size[0]; i++) { - for (j=0; j<size[1]; j++) { + for (i=0; i<(size_t)size[0]; i++) { + for (j=0; j<(size_t)size[1]; j++) { if ((new_data[i][j] & mask) != (orig_data[i][j] & mask)) { H5_FAILED(); printf(" Read different values than written.\n"); @@ -2683,7 +2683,7 @@ test_nbit_float(hid_t file) (float)5.2045898}, {(float)-49140.000, (float)2350.2500, (float)-3.2110596e-1, (float)6.4998865e-5, (float)-0.0000000}}; float new_data[2][5]; size_t precision, offset; - hsize_t i, j; + size_t i, j; #else /* H5_HAVE_FILTER_NBIT */ const char *not_supported= " Nbit is not enabled."; #endif /* H5_HAVE_FILTER_NBIT */ @@ -2750,8 +2750,8 @@ test_nbit_float(hid_t file) /* Check that the values read are the same as the values written * Assume size of int = size of float */ - for (i=0; i<size[0]; i++) { - for (j=0; j<size[1]; j++) { + for (i=0; i<(size_t)size[0]; i++) { + for (j=0; j<(size_t)size[1]; j++) { if (!(orig_data[i][j]==orig_data[i][j])) continue; /* skip if value is NaN */ if (new_data[i][j] != orig_data[i][j]) { H5_FAILED(); @@ -2816,7 +2816,7 @@ test_nbit_double(hid_t file) 6.6562295504670740e-3, -1.5747263393432150, 1.0711093225222612, -9.8971679387636870e-1}}; double new_data[2][5]; size_t precision, offset; - hsize_t i, j; + size_t i, j; #else /* H5_HAVE_FILTER_NBIT */ const char *not_supported= " Nbit is not enabled."; #endif /* H5_HAVE_FILTER_NBIT */ @@ -2883,8 +2883,8 @@ test_nbit_double(hid_t file) /* Check that the values read are the same as the values written * Assume size of long_long = size of double */ - for (i=0; i<size[0]; i++) { - for (j=0; j<size[1]; j++) { + for (i=0; i<(size_t)size[0]; i++) { + for (j=0; j<(size_t)size[1]; j++) { if (!(orig_data[i][j]==orig_data[i][j])) continue; /* skip if value is NaN */ if (new_data[i][j] != orig_data[i][j]) { H5_FAILED(); @@ -2945,7 +2945,7 @@ test_nbit_array(hid_t file) unsigned int orig_data[2][5][3][2]; unsigned int new_data[2][5][3][2]; size_t precision, offset; - hsize_t i, j, m, n; + size_t i, j, m, n; #else /* H5_HAVE_FILTER_NBIT */ const char *not_supported= " Nbit is not enabled."; #endif /* H5_HAVE_FILTER_NBIT */ @@ -2984,10 +2984,10 @@ test_nbit_array(hid_t file) space,dc))<0) goto error; /* Initialize data, assuming size of long_long >= size of unsigned int */ - for (i= 0;i< size[0]; i++) - for (j = 0; j < size[1]; j++) - for (m = 0; m < adims[0]; m++) - for (n = 0; n < adims[1]; n++) + for (i= 0;i< (size_t)size[0]; i++) + for (j = 0; j < (size_t)size[1]; j++) + for (m = 0; m < (size_t)adims[0]; m++) + for (n = 0; n < (size_t)adims[1]; n++) orig_data[i][j][m][n] = (unsigned int)(((long_long)HDrandom() % (long_long)HDpow(2.0, (double)precision)) << offset); PASSED(); @@ -3028,10 +3028,10 @@ test_nbit_array(hid_t file) /* Check that the values read are the same as the values written */ - for (i=0; i<size[0]; i++) - for (j=0; j<size[1]; j++) - for (m = 0; m < adims[0]; m++) - for (n = 0; n < adims[1]; n++) { + for (i=0; i<(size_t)size[0]; i++) + for (j=0; j<(size_t)size[1]; j++) + for (m = 0; m < (size_t)adims[0]; m++) + for (n = 0; n < (size_t)adims[1]; n++) { if (new_data[i][j][m][n]!= orig_data[i][j][m][n]) { H5_FAILED(); printf(" Read different values than written.\n"); @@ -3104,7 +3104,7 @@ test_nbit_compound(hid_t file) atomic orig_data[2][5]; atomic new_data[2][5]; unsigned int i_mask, s_mask, c_mask; - hsize_t i, j; + size_t i, j; #else /* H5_HAVE_FILTER_NBIT */ const char *not_supported= " Nbit is not enabled."; @@ -3166,8 +3166,8 @@ test_nbit_compound(hid_t file) space,dc))<0) goto error; /* Initialize data, assuming size of long_long >= size of member datatypes */ - for (i= 0;i< size[0]; i++) - for (j = 0; j < size[1]; j++) { + for (i= 0;i< (size_t)size[0]; i++) + for (j = 0; j < (size_t)size[1]; j++) { orig_data[i][j].i = (int)(((long_long)HDrandom() % (long_long)HDpow(2.0, (double)(precision[0]-1))) << offset[0]); orig_data[i][j].c = (char)(((long_long)HDrandom() % @@ -3319,7 +3319,7 @@ test_nbit_compound_2(hid_t file) complex orig_data[2][5]; complex new_data[2][5]; unsigned int i_mask, s_mask, c_mask, b_mask; - hsize_t i, j, m, n, b_failed, d_failed; + size_t i, j, m, n, b_failed, d_failed; #else /* H5_HAVE_FILTER_NBIT */ const char *not_supported= " Nbit is not enabled."; @@ -3413,8 +3413,8 @@ test_nbit_compound_2(hid_t file) space,dc))<0) goto error; /* Initialize data, assuming size of long_long >= size of member datatypes */ - for (i= 0;i< size[0]; i++) - for (j = 0; j < size[1]; j++) { + for (i= 0;i< (size_t)size[0]; i++) + for (j = 0; j < (size_t)size[1]; j++) { orig_data[i][j].a.i = (int)(((long_long)HDrandom() % (long_long)HDpow(2.0, (double)(precision[0]-1))) << offset[0]); orig_data[i][j].a.c = (char)(((long_long)HDrandom() % @@ -3426,13 +3426,13 @@ test_nbit_compound_2(hid_t file) orig_data[i][j].v = (unsigned int)(((long_long)HDrandom() % (long_long)HDpow(2.0, (double)precision[3])) << offset[3]); - for(m = 0; m < array_dims[0]; m++) - for(n = 0; n < array_dims[1]; n++) + for(m = 0; m < (size_t)array_dims[0]; m++) + for(n = 0; n < (size_t)array_dims[1]; n++) orig_data[i][j].b[m][n] = (char)(((long_long)HDrandom() % (long_long)HDpow(2.0, (double)(precision[4]-1))) << offset[4]); - for(m = 0; m < array_dims[0]; m++) - for(n = 0; n < array_dims[1]; n++) { + for(m = 0; m < (size_t)array_dims[0]; m++) + for(n = 0; n < (size_t)array_dims[1]; n++) { orig_data[i][j].d[m][n].i = (int)(-((long_long)HDrandom() % (long_long)HDpow(2.0, (double)(precision[0]-1))) << offset[0]); orig_data[i][j].d[m][n].c = (char)(((long_long)HDrandom() % @@ -3485,20 +3485,20 @@ test_nbit_compound_2(hid_t file) c_mask = ~(~0 << (precision[1] + offset[1])) & (~0 << offset[1]); s_mask = ~(~0 << (precision[2] + offset[2])) & (~0 << offset[2]); b_mask = ~(~0 << (precision[4] + offset[4])) & (~0 << offset[4]); - for (i=0; i<size[0]; i++) { - for (j=0; j<size[1]; j++) { + for (i=0; i<(size_t)size[0]; i++) { + for (j=0; j<(size_t)size[1]; j++) { b_failed = 0; d_failed = 0; - for(m = 0; m < array_dims[0]; m++) - for(n = 0; n < array_dims[1]; n++) + for(m = 0; m < (size_t)array_dims[0]; m++) + for(n = 0; n < (size_t)array_dims[1]; n++) if((new_data[i][j].b[m][n]&b_mask)!=(orig_data[i][j].b[m][n]&b_mask)) { b_failed = 1; goto out; } - for(m = 0; m < array_dims[0]; m++) - for(n = 0; n < array_dims[1]; n++) + for(m = 0; m < (size_t)array_dims[0]; m++) + for(n = 0; n < (size_t)array_dims[1]; n++) if((new_data[i][j].d[m][n].i & i_mask)!=(orig_data[i][j].d[m][n].i & i_mask)|| (new_data[i][j].d[m][n].c & c_mask)!=(orig_data[i][j].d[m][n].c & c_mask)|| (new_data[i][j].d[m][n].s & s_mask)!=(orig_data[i][j].d[m][n].s & s_mask)|| @@ -3591,7 +3591,7 @@ test_nbit_compound_3(hid_t file) const hsize_t chunk_size[1] = {5}; atomic orig_data[5]; atomic new_data[5]; - hsize_t i, k, j; + size_t i, k, j; #else /* H5_HAVE_FILTER_NBIT */ const char *not_supported= " Nbit is not enabled."; @@ -3641,7 +3641,7 @@ test_nbit_compound_3(hid_t file) space, H5P_DEFAULT))<0) goto error; /* Initialize data */ - for(i = 0; i < size[0]; i++) { + for(i = 0; i < (size_t)size[0]; i++) { orig_data[i].i = HDrandom() % (long)HDpow(2.0, 17.0 - 1.0); HDstrcpy(orig_data[i].str, "fixed-length C string"); orig_data[i].vl_str = HDstrdup("variable-length C string"); @@ -3692,7 +3692,7 @@ test_nbit_compound_3(hid_t file) goto error; /* Check that the values read are the same as the values written */ - for (i = 0; i < size[0]; i++) { + for (i = 0; i < (size_t)size[0]; i++) { if(new_data[i].i != orig_data[i].i || strcmp(new_data[i].str, orig_data[i].str) !=0 || strcmp(new_data[i].vl_str, orig_data[i].vl_str) !=0 || @@ -3779,7 +3779,7 @@ test_scaleoffset_int(hid_t file) const hsize_t chunk_size[2] = {2,5}; int orig_data[2][5]; int new_data[2][5]; - hsize_t i, j; + size_t i, j; #else /* H5_HAVE_FILTER_SCALEOFFSET */ const char *not_supported= " Scaleoffset is not enabled."; #endif /* H5_HAVE_FILTER_SCALEOFFSET */ @@ -3810,8 +3810,8 @@ test_scaleoffset_int(hid_t file) space,dc))<0) goto error; /* Initialize data */ - for (i= 0;i< size[0]; i++) - for (j = 0; j < size[1]; j++) { + for (i= 0;i< (size_t)size[0]; i++) + for (j = 0; j < (size_t)size[1]; j++) { orig_data[i][j] = HDrandom() % 10000; /* even-numbered values are negtive */ @@ -3853,8 +3853,8 @@ test_scaleoffset_int(hid_t file) new_data)<0) goto error; /* Check that the values read are the same as the values written */ - for (i=0; i<size[0]; i++) { - for (j=0; j<size[1]; j++) { + for (i=0; i<(size_t)size[0]; i++) { + for (j=0; j<(size_t)size[1]; j++) { if (new_data[i][j] != orig_data[i][j]) { H5_FAILED(); printf(" Read different values than written.\n"); @@ -3915,7 +3915,7 @@ test_scaleoffset_int_2(hid_t file) hsize_t count[2]; /* Block count */ hsize_t block[2]; /* Block sizes */ int fillval; - hsize_t j; + size_t j; #else /* H5_HAVE_FILTER_SCALEOFFSET */ const char *not_supported= " Scaleoffset is not enabled."; #endif /* H5_HAVE_FILTER_SCALEOFFSET */ @@ -3959,7 +3959,7 @@ test_scaleoffset_int_2(hid_t file) stride, count, block)<0) goto error; /* Initialize data of hyperslab */ - for (j = 0; j < size[1]; j++) { + for (j = 0; j < (size_t)size[1]; j++) { orig_data[0][j] = (int)HDrandom() % 10000; /* even-numbered values are negtive */ @@ -4002,7 +4002,7 @@ test_scaleoffset_int_2(hid_t file) new_data)<0) goto error; /* Check that the values read are the same as the values written */ - for (j=0; j<size[1]; j++) { + for (j=0; j<(size_t)size[1]; j++) { if (new_data[0][j] != orig_data[0][j]) { H5_FAILED(); printf(" Read different values than written.\n"); @@ -4057,7 +4057,7 @@ test_scaleoffset_float(hid_t file) const hsize_t chunk_size[2] = {2,5}; float orig_data[2][5]; float new_data[2][5]; - hsize_t i, j; + size_t i, j; #else /* H5_HAVE_FILTER_SCALEOFFSET */ const char *not_supported= " Scaleoffset is not enabled."; #endif /* H5_HAVE_FILTER_SCALEOFFSET */ @@ -4089,8 +4089,8 @@ test_scaleoffset_float(hid_t file) space,dc))<0) goto error; /* Initialize data */ - for (i= 0;i< size[0]; i++) - for (j = 0; j < size[1]; j++) { + for (i= 0;i< (size_t)size[0]; i++) + for (j = 0; j < (size_t)size[1]; j++) { orig_data[i][j] = (float)((HDrandom() % 100000) / (float)1000.0); /* even-numbered values are negtive */ @@ -4132,8 +4132,8 @@ test_scaleoffset_float(hid_t file) new_data)<0) goto error; /* Check that the values read are the same as the values written */ - for (i=0; i<size[0]; i++) { - for (j=0; j<size[1]; j++) { + for (i=0; i<(size_t)size[0]; i++) { + for (j=0; j<(size_t)size[1]; j++) { if (HDfabs(new_data[i][j]-orig_data[i][j]) > HDpow(10.0, -3.0)) { H5_FAILED(); printf(" Read different values than written.\n"); @@ -4194,7 +4194,7 @@ test_scaleoffset_float_2(hid_t file) hsize_t stride[2]; /* Stride of hyperslab */ hsize_t count[2]; /* Block count */ hsize_t block[2]; /* Block sizes */ - hsize_t j; + size_t j; #else /* H5_HAVE_FILTER_SCALEOFFSET */ const char *not_supported= " Scaleoffset is not enabled."; #endif /* H5_HAVE_FILTER_SCALEOFFSET */ @@ -4240,7 +4240,7 @@ test_scaleoffset_float_2(hid_t file) stride, count, block)<0) goto error; /* Initialize data of hyperslab */ - for (j = 0; j < size[1]; j++) { + for (j = 0; j < (size_t)size[1]; j++) { orig_data[0][j] = (float)((HDrandom() % 100000) / (float)1000.0); /* even-numbered values are negtive */ @@ -4283,7 +4283,7 @@ test_scaleoffset_float_2(hid_t file) new_data)<0) goto error; /* Check that the values read are the same as the values written */ - for (j=0; j<size[1]; j++) { + for (j=0; j<(size_t)size[1]; j++) { if (HDfabs(new_data[0][j]-orig_data[0][j]) > HDpow(10.0, -3.0)) { H5_FAILED(); printf(" Read different values than written.\n"); @@ -4337,7 +4337,7 @@ test_scaleoffset_double(hid_t file) const hsize_t chunk_size[2] = {2,5}; double orig_data[2][5]; double new_data[2][5]; - hsize_t i, j; + size_t i, j; #else /* H5_HAVE_FILTER_SCALEOFFSET */ const char *not_supported= " Scaleoffset is not enabled."; #endif /* H5_HAVE_FILTER_SCALEOFFSET */ @@ -4369,8 +4369,8 @@ test_scaleoffset_double(hid_t file) space,dc))<0) goto error; /* Initialize data */ - for (i= 0;i< size[0]; i++) - for (j = 0; j < size[1]; j++) { + for (i= 0;i< (size_t)size[0]; i++) + for (j = 0; j < (size_t)size[1]; j++) { orig_data[i][j] = (HDrandom() % 10000000) / 10000000.0; /* even-numbered values are negtive */ @@ -4412,8 +4412,8 @@ test_scaleoffset_double(hid_t file) new_data)<0) goto error; /* Check that the values read are the same as the values written */ - for (i=0; i<size[0]; i++) { - for (j=0; j<size[1]; j++) { + for (i=0; i<(size_t)size[0]; i++) { + for (j=0; j<(size_t)size[1]; j++) { if (HDfabs(new_data[i][j]-orig_data[i][j]) > HDpow(10.0, -7.0)) { H5_FAILED(); printf(" Read different values than written.\n"); @@ -4474,7 +4474,7 @@ test_scaleoffset_double_2(hid_t file) hsize_t stride[2]; /* Stride of hyperslab */ hsize_t count[2]; /* Block count */ hsize_t block[2]; /* Block sizes */ - hsize_t j; + size_t j; #else /* H5_HAVE_FILTER_SCALEOFFSET */ const char *not_supported= " Scaleoffset is not enabled."; #endif /* H5_HAVE_FILTER_SCALEOFFSET */ @@ -4520,7 +4520,7 @@ test_scaleoffset_double_2(hid_t file) stride, count, block)<0) goto error; /* Initialize data of hyperslab */ - for (j = 0; j < size[1]; j++) { + for (j = 0; j < (size_t)size[1]; j++) { orig_data[0][j] = (HDrandom() % 10000000) / 10000000.0; /* even-numbered values are negtive */ @@ -4563,7 +4563,7 @@ test_scaleoffset_double_2(hid_t file) new_data)<0) goto error; /* Check that the values read are the same as the values written */ - for (j=0; j<size[1]; j++) { + for (j=0; j<(size_t)size[1]; j++) { if (HDfabs(new_data[0][j]-orig_data[0][j]) > HDpow(10.0, -7.0)) { H5_FAILED(); printf(" Read different values than written.\n"); @@ -4794,7 +4794,7 @@ test_can_apply(hid_t file) const hsize_t dims[2] = {DSET_DIM1, DSET_DIM2}; /* Dataspace dimensions */ const hsize_t chunk_dims[2] = {2, 25}; /* Chunk dimensions */ hsize_t dset_size; /* Dataset size */ - hsize_t i,j; /* Local index variables */ + size_t i,j; /* Local index variables */ TESTING("dataset filter 'can apply' callback"); @@ -4883,8 +4883,8 @@ test_can_apply(hid_t file) /* Compare data */ /* Check that the values read are the same as the values written */ - for (i=0; i<dims[0]; i++) { - for (j=0; j<dims[1]; j++) { + for (i=0; i<(size_t)dims[0]; i++) { + for (j=0; j<(size_t)dims[1]; j++) { if (points[i][j] != check[i][j]) { H5_FAILED(); printf(" Line %d: Read different values than written.\n",__LINE__); @@ -5156,7 +5156,7 @@ test_set_local(hid_t fapl) const hsize_t chunk_dims[2] = {2, 25}; /* Chunk dimensions */ hsize_t dset_size; /* Dataset size */ unsigned cd_values[2]={BOGUS2_PARAM_1, BOGUS2_PARAM_2}; /* Parameters for Bogus2 filter */ - hsize_t i,j; /* Local index variables */ + size_t i,j; /* Local index variables */ double n; /* Local index variables */ TESTING("dataset filter 'set local' callback"); diff --git a/test/dtypes.c b/test/dtypes.c index e32a0a9..75dde75 100644 --- a/test/dtypes.c +++ b/test/dtypes.c @@ -2589,7 +2589,7 @@ test_named (hid_t fapl) hid_t file=-1, type=-1, space=-1, dset=-1, t2=-1, t3=-1, attr1=-1; herr_t status; static hsize_t ds_size[2] = {10, 20}; - hsize_t i,j; + size_t i,j; unsigned attr_data[10][20]; char filename[1024]; @@ -2645,8 +2645,8 @@ test_named (hid_t fapl) /* It should be possible to define an attribute for the named type */ if ((attr1=H5Acreate (type, "attr1", H5T_NATIVE_UCHAR, space, H5P_DEFAULT))<0) goto error; - for (i=0; i<ds_size[0]; i++) - for (j=0; j<ds_size[1]; j++) + for (i=0; i<(size_t)ds_size[0]; i++) + for (j=0; j<(size_t)ds_size[1]; j++) attr_data[i][j] = (int)(i*ds_size[1]+j); if (H5Awrite(attr1, H5T_NATIVE_UINT, attr_data)<0) goto error; if (H5Aclose (attr1)<0) goto error; diff --git a/test/enum.c b/test/enum.c index b565cb0..ae746c4 100644 --- a/test/enum.c +++ b/test/enum.c @@ -133,7 +133,8 @@ test_noconv(hid_t file) E1_RED, E1_BLUE, E1_GREEN, E1_BLACK, E1_WHITE, E1_RED, E1_WHITE, E1_GREEN, E1_GREEN, E1_BLUE}; c_e1 data2[NELMTS(data1)]; - hsize_t i, ds_size[1]={NELMTS(data1)}; + hsize_t ds_size[1]={NELMTS(data1)}; + size_t i; TESTING("no-conversion datasets"); if ((cwg=H5Gcreate(file, "test_noconv", 0))<0) goto error; @@ -151,7 +152,7 @@ test_noconv(hid_t file) if (H5Dwrite(dset, type, space, space, H5P_DEFAULT, data1)<0) goto error; if (H5Dread(dset, type, space, space, H5P_DEFAULT, data2)<0) goto error; - for (i=0; i<ds_size[0]; i++) { + for (i=0; i<(size_t)ds_size[0]; i++) { if (data1[i]!=data2[i]) { H5_FAILED(); printf(" data1[%lu]=%d, data2[%lu]=%d (should be same)\n", @@ -202,7 +203,8 @@ static int test_tr1(hid_t file) { hid_t cwg=-1, m_type=-1, f_type=-1, space=-1, dset=-1; - hsize_t i, ds_size[1]={10}; + hsize_t ds_size[1]={10}; + size_t i; c_e1 eval; int ival; static c_e1 data1[10]={E1_RED, E1_GREEN, E1_BLUE, E1_GREEN, E1_WHITE, @@ -238,7 +240,7 @@ test_tr1(hid_t file) if (H5Dwrite(dset, m_type, space, space, H5P_DEFAULT, data1)<0) goto error; if (H5Dread(dset, m_type, space, space, H5P_DEFAULT, data2)<0) goto error; - for (i=0; i<ds_size[0]; i++) { + for (i=0; i<(size_t)ds_size[0]; i++) { if (data1[i]!=data2[i]) { H5_FAILED(); printf(" data1[%lu]=%d, data2[%lu]=%d (should be same)\n", @@ -293,7 +295,8 @@ static int test_tr2(hid_t file) { hid_t cwg=-1, m_type=-1, f_type=-1, space=-1, dset=-1; - hsize_t i, ds_size[1]={10}; + hsize_t ds_size[1]={10}; + size_t i; c_e1 val1; int val2; static c_e1 data1[10]={E1_RED, E1_GREEN, E1_BLUE, E1_GREEN, E1_WHITE, @@ -328,7 +331,7 @@ test_tr2(hid_t file) if (H5Dwrite(dset, m_type, space, space, H5P_DEFAULT, data1)<0) goto error; if (H5Dread(dset, m_type, space, space, H5P_DEFAULT, data2)<0) goto error; - for (i=0; i<ds_size[0]; i++) { + for (i=0; i<(size_t)ds_size[0]; i++) { if (data1[i]!=data2[i]) { H5_FAILED(); printf(" data1[%lu]=%d, data2[%lu]=%d (should be same)\n", diff --git a/test/external.c b/test/external.c index e46f5e3..db57afb 100644 --- a/test/external.c +++ b/test/external.c @@ -588,7 +588,7 @@ test_2 (hid_t fapl) hid_t dset=-1; /*dataset */ hid_t grp=-1; /*group to emit diagnostics */ int fd; /*external file descriptors */ - hsize_t i, j; /*miscellaneous counters */ + size_t i, j; /*miscellaneous counters */ hssize_t n; /*bytes of I/O */ char filename[1024]; /*file names */ int part[25], whole[100]; /*raw data buffers */ diff --git a/test/flush1.c b/test/flush1.c index 2086e3f..5f8eefc 100644 --- a/test/flush1.c +++ b/test/flush1.c @@ -56,7 +56,7 @@ create_file(char* name, hid_t fapl) hid_t file, dcpl, space, dset, groups, grp; hsize_t ds_size[2] = {100, 100}; hsize_t ch_size[2] = {5, 5}; - hsize_t i, j; + size_t i, j; if ((file=H5Fcreate(name, H5F_ACC_TRUNC, H5P_DEFAULT, fapl))<0) goto error; @@ -74,7 +74,7 @@ create_file(char* name, hid_t fapl) * for the Win32 version 5.0 compiler. * 1998-11-06 ptl */ - for (j=0; j<ds_size[1]; j++) { + for (j=0; j<(size_t)ds_size[1]; j++) { the_data[i][j] = (double)(hssize_t)i/(hssize_t)(j+1); } } @@ -119,7 +119,7 @@ extend_file(hid_t file) hid_t dcpl, space, dset; hsize_t ds_size[2] = {100, 100}; hsize_t ch_size[2] = {5, 5}; - hsize_t i, j; + size_t i, j; /* Create a chunked dataset */ if ((dcpl=H5Pcreate(H5P_DATASET_CREATE))<0) goto error; @@ -135,7 +135,7 @@ extend_file(hid_t file) * for the Win32 version 5.0 compiler. * 1998-11-06 ptl */ - for (j=0; j<ds_size[1]; j++) { + for (j=0; j<(size_t)ds_size[1]; j++) { the_data[i][j] = (double)(hssize_t)i/(hssize_t)(j+1); } } diff --git a/test/flush2.c b/test/flush2.c index f0b760e..2b8cf7a 100644 --- a/test/flush2.c +++ b/test/flush2.c @@ -54,7 +54,7 @@ check_dset(hid_t file, const char* name) hid_t space, dset; hsize_t ds_size[2] = {100, 100}; double error; - hsize_t i, j; + size_t i, j; /* Open the dataset */ if ((dset=H5Dopen(file, name))<0) goto error; @@ -65,8 +65,8 @@ check_dset(hid_t file, const char* name) /* Read some data */ if (H5Dread(dset, H5T_NATIVE_DOUBLE, space, space, H5P_DEFAULT, the_data)<0) goto error; - for (i=0; i<ds_size[0]; i++) { - for (j=0; j<ds_size[1]; j++) { + for (i=0; i<(size_t)ds_size[0]; i++) { + for (j=0; j<(size_t)ds_size[1]; j++) { /* * The extra cast in the following statement is a bug workaround * for the Win32 version 5.0 compiler. diff --git a/test/istore.c b/test/istore.c index 3c066aa..8764643 100644 --- a/test/istore.c +++ b/test/istore.c @@ -225,7 +225,7 @@ test_extend(hid_t f, const char *prefix, hid_t dataset; /* Dataset ID */ hid_t fspace; /* Dataset's file dataspace */ hid_t mspace; /* Dataset's memory dataspace */ - hsize_t i, j, k, ctr; + size_t i, j, k, ctr; int ndims; uint8_t *buf = NULL, *check = NULL, *whole = NULL; char dims[64], s[256], name[256]; diff --git a/test/tsohm.c b/test/tsohm.c index d0c0629..010311f 100644 --- a/test/tsohm.c +++ b/test/tsohm.c @@ -971,7 +971,7 @@ static void sohm_attr_helper(hid_t fcpl_id) int wdata[2] = {7, 42}; int rdata[2]; herr_t ret; - hsize_t x; + size_t x; /* Create a file using the fcpl */ file_id = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl_id, H5P_DEFAULT); @@ -1005,7 +1005,7 @@ static void sohm_attr_helper(hid_t fcpl_id) memset(rdata, 0, sizeof(rdata)); ret = H5Aread(attr_id, H5T_NATIVE_INT, rdata); CHECK_I(ret, "H5Aread"); - for(x=0; x<dims; ++x) { + for(x=0; x<(size_t)dims; ++x) { VERIFY(rdata[x], wdata[x], "H5Aread"); } @@ -1041,7 +1041,7 @@ static void sohm_attr_helper(hid_t fcpl_id) memset(rdata, 0, sizeof(rdata)); ret = H5Aread(attr_id, H5T_NATIVE_INT, rdata); CHECK_I(ret, "H5Aread"); - for(x=0; x<dims; ++x) { + for(x=0; x<(size_t)dims; ++x) { VERIFY(rdata[x], wdata[x], "H5Aread"); } |