/* * Copyright (C) 1997 NCSA * All rights reserved. * * Programmer: Robb Matzke * Tuesday, December 9, 1997 * * Purpose: Tests the dataset interface (H5D) */ #include "h5test.h" #include "H5Zprivate.h" /* Filter functions */ const char *FILENAME[] = { "dataset", "compact_dataset", "dset_offset", NULL }; #define FILE_DEFLATE_NAME "deflate.h5" #define DSET_DEFAULT_NAME "default" #define DSET_CHUNKED_NAME "chunked" #define DSET_COMPACT_NAME "compact" #define DSET_SIMPLE_IO_NAME "simple_io" #define DSET_COMPACT_IO_NAME "compact_io" #define DSET_TCONV_NAME "tconv" #define DSET_DEFLATE_NAME "deflate" #define DSET_SHUFFLE_NAME "shuffle" #define DSET_SHUFFLE_DEFLATE_NAME "shuffle+deflate" #define DSET_BOGUS_NAME "bogus" #define DSET_MISSING_NAME "missing" #define DSET_ONEBYTE_SHUF_NAME "onebyte_shuffle" #define USER_BLOCK 512 #define H5Z_BOGUS 305 /* Shared global arrays */ int points[100][200], check[100][200]; /*------------------------------------------------------------------------- * Function: test_create * * Purpose: Attempts to create a dataset. * * Return: Success: 0 * * Failure: -1 * * Programmer: Robb Matzke * Tuesday, December 9, 1997 * * Modifications: * Added test for compact dataset creation. * Raymond Lu * August 8, 2002 * *------------------------------------------------------------------------- */ static herr_t test_create(hid_t file) { hid_t dataset, space, small_space, create_parms; hsize_t dims[2], small_dims[2]; herr_t status; hsize_t csize[2]; TESTING("create, open, close"); /* Create the data space */ dims[0] = 256; dims[1] = 512; space = H5Screate_simple(2, dims, NULL); assert(space>=0); /* Create a small data space for compact dataset */ small_dims[0] = 16; small_dims[1] = 8; small_space = H5Screate_simple(2, small_dims, NULL); assert(space>=0); /* * Create a dataset using the default dataset creation properties. We're * not sure what they are, so we won't check. */ dataset = H5Dcreate(file, DSET_DEFAULT_NAME, H5T_NATIVE_DOUBLE, space, H5P_DEFAULT); if (dataset<0) goto error; /* Close the dataset */ if (H5Dclose(dataset) < 0) goto error; /* Add a comment to the dataset */ status = H5Gset_comment(file, DSET_DEFAULT_NAME, "This is a dataset"); if (status<0) goto error; /* * Try creating a dataset that already exists. This should fail since a * dataset can only be created once. Temporarily turn off error * reporting. */ H5E_BEGIN_TRY { dataset = H5Dcreate(file, DSET_DEFAULT_NAME, H5T_NATIVE_DOUBLE, space, H5P_DEFAULT); } H5E_END_TRY; if (dataset >= 0) { H5_FAILED(); puts(" Library allowed overwrite of existing dataset."); goto error; } /* * Open the dataset we created above and then close it. This is how * existing datasets are accessed. */ if ((dataset = H5Dopen(file, DSET_DEFAULT_NAME))<0) goto error; if (H5Dclose(dataset) < 0) goto error; /* * Try opening a non-existent dataset. This should fail since new datasets * cannot be created with this function. Temporarily turn off error * reporting. */ H5E_BEGIN_TRY { dataset = H5Dopen(file, "does_not_exist"); } H5E_END_TRY; if (dataset >= 0) { H5_FAILED(); puts(" Opened a non-existent dataset."); goto error; } /* * Create a new dataset that uses chunked storage instead of the default * layout. */ create_parms = H5Pcreate(H5P_DATASET_CREATE); assert(create_parms >= 0); /* Attempt to create a dataset with invalid chunk sizes */ csize[0] = dims[0]*2; csize[1] = dims[1]*2; status = H5Pset_chunk(create_parms, 2, csize); assert(status >= 0); H5E_BEGIN_TRY { dataset = H5Dcreate(file, DSET_CHUNKED_NAME, H5T_NATIVE_DOUBLE, space, create_parms); } H5E_END_TRY; if (dataset >= 0) { H5_FAILED(); puts(" Opened a dataset with incorrect chunking parameters."); goto error; } csize[0] = 5; csize[1] = 100; status = H5Pset_chunk(create_parms, 2, csize); assert(status >= 0); dataset = H5Dcreate(file, DSET_CHUNKED_NAME, H5T_NATIVE_DOUBLE, space, create_parms); if (dataset < 0) goto error; H5Pclose (create_parms); /* Test dataset address. Should be undefined. */ if(H5Dget_offset(dataset)!=HADDR_UNDEF) goto error; /* * Close the chunked dataset. */ if (H5Dclose(dataset) < 0) goto error; /* * Create a compact dataset, then close it. */ create_parms = H5Pcreate(H5P_DATASET_CREATE); assert(create_parms >= 0); status = H5Pset_layout(create_parms, H5D_COMPACT); assert(status >= 0); status = H5Pset_alloc_time(create_parms, H5D_ALLOC_TIME_EARLY); assert(status >= 0); dataset = H5Dcreate(file, DSET_COMPACT_NAME, H5T_NATIVE_DOUBLE, small_space, create_parms); if(dataset < 0) goto error; H5Pclose(create_parms); if(H5Dclose(dataset) <0) goto error; PASSED(); return 0; error: return -1; } /*------------------------------------------------------------------------- * Function: test_simple_io * * Purpose: Tests simple I/O. That is, reading and writing a complete * multi-dimensional array without data type or data space * conversions, without compression, and stored contiguously. * * Return: Success: 0 * * Failure: -1 * * Programmer: Robb Matzke * Wednesday, December 10, 1997 * * Modifications: * *------------------------------------------------------------------------- */ static herr_t test_simple_io(hid_t file, char *fname) { hid_t dataset, space, xfer; int i, j, n; hsize_t dims[2]; void *tconv_buf = NULL; int f; haddr_t offset; int rdata[100][200]; TESTING("simple I/O"); /* Initialize the dataset */ for (i = n = 0; i < 100; i++) { for (j = 0; j < 200; j++) { points[i][j] = n++; } } /* Create the data space */ dims[0] = 100; dims[1] = 200; if ((space = H5Screate_simple(2, dims, NULL))<0) goto error; /* Create a small conversion buffer to test strip mining */ tconv_buf = malloc (1000); xfer = H5Pcreate (H5P_DATASET_XFER); assert (xfer>=0); if (H5Pset_buffer (xfer, 1000, tconv_buf, NULL)<0) goto error; /* Create the dataset */ if ((dataset = H5Dcreate(file, DSET_SIMPLE_IO_NAME, H5T_NATIVE_INT, space, H5P_DEFAULT))<0) goto error; /* Test dataset address. Should be undefined. */ if(H5Dget_offset(dataset)!=HADDR_UNDEF) goto error; /* Write the data to the dataset */ if (H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, xfer, points)<0) goto error; /* Test dataset address in file. Open the same file as a C file, seek * the data position as H5Dget_offset points to, read the dataset, and * compare it with the data written in.*/ if((offset=H5Dget_offset(dataset))==HADDR_UNDEF) goto error; f = HDopen(fname, O_RDONLY, 0); HDlseek(f, (off_t)offset, SEEK_SET); HDread(f, rdata, sizeof(int)*100*200); /* Check that the values read are the same as the values written */ for (i = 0; i < 100; i++) { for (j = 0; j < 200; j++) { if (points[i][j] != rdata[i][j]) { H5_FAILED(); printf(" Read different values than written.\n"); printf(" At index %d,%d\n", i, j); goto error; } } } HDclose(f); /* Read the dataset back */ if (H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, xfer, check)<0) goto error; /* Check that the values read are the same as the values written */ for (i = 0; i < 100; i++) { for (j = 0; j < 200; j++) { if (points[i][j] != check[i][j]) { H5_FAILED(); printf(" Read different values than written.\n"); printf(" At index %d,%d\n", i, j); goto error; } } } i=H5Pclose (xfer); H5Dclose(dataset); free (tconv_buf); PASSED(); return 0; error: return -1; } /*------------------------------------------------------------------------- * Function: test_userblock_offset * * Purpose: Tests H5Dget_offset when user block exists. * * Return: Success: 0 * * Failure: -1 * * Programmer: Raymond Lu * Wednesday, November 27, 2002 * * Modifications: * *------------------------------------------------------------------------- */ static herr_t test_userblock_offset(hid_t fapl) { char filename[32]; hid_t file, fcpl, dataset, space; int i, j, n; hsize_t dims[2]; /* FILE *f;*/ int f; haddr_t offset; int rdata[100][200]; TESTING("dataset offset with user block"); h5_fixname(FILENAME[2], fapl, filename, sizeof filename); if((fcpl=H5Pcreate(H5P_FILE_CREATE))<0) goto error; if(H5Pset_userblock(fcpl, USER_BLOCK)<0) goto error; if ((file=H5Fcreate(filename, H5F_ACC_TRUNC, fcpl, fapl))<0) { goto error; } /* Initialize the dataset */ /*for (i = n = 0; i < 100; i++) { for (j = 0; j < 200; j++) { points[i][j] = n++; } }*/ /* Create the data space */ dims[0] = 100; dims[1] = 200; if ((space = H5Screate_simple(2, dims, NULL))<0) goto error; /* Create the dataset */ if ((dataset = H5Dcreate(file, DSET_SIMPLE_IO_NAME, H5T_NATIVE_INT, space, H5P_DEFAULT))<0) goto error; /* Write the data to the dataset */ if (H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, points)<0) goto error; /* Test dataset address in file. Open the same file as a C file, seek * the data position as H5Dget_offset points to, read the dataset, and * compare it with the data written in.*/ if((offset=H5Dget_offset(dataset))==HADDR_UNDEF) goto error; f = HDopen(filename, O_RDONLY, 0); HDlseek(f, (off_t)offset, SEEK_SET); HDread(f, rdata, sizeof(int)*100*200); /* Check that the values read are the same as the values written */ for (i = 0; i < 100; i++) { for (j = 0; j < 200; j++) { if (points[i][j] != rdata[i][j]) { H5_FAILED(); printf(" Read different values than written.\n"); printf(" At index %d,%d\n", i, j); goto error; } } } HDclose(f); H5Dclose(dataset); H5Fclose(file); PASSED(); return 0; error: return -1; } /*------------------------------------------------------------------------- * Function: test_compact_io * * Purpose: Tests compact dataset I/O. That is, reading and writing a * complete multi-dimensional array without data type or data * space conversions, without compression, and store in * compact dataset. * * Return: Success: 0 * * Failure: -1 * * Programmer: Raymond Lu * August 8, 2002 * * Modifications: * *------------------------------------------------------------------------- */ static herr_t test_compact_io(hid_t fapl) { hid_t file, dataset, space, plist; hsize_t dims[2]; herr_t status; int wbuf[16][8], rbuf[16][8]; char filename[1024]; int i, j, n; TESTING("compact dataset I/O"); /* Initialize data */ n=0; for(i=0; i<16; i++) { for(j=0; j<8; j++) { wbuf[i][j] = n++; } } /* Create a small data space for compact dataset */ dims[0] = 16; dims[1] = 8; space = H5Screate_simple(2, dims, NULL); assert(space>=0); /* Create a file */ h5_fixname(FILENAME[1], fapl, filename, sizeof filename); if((file=H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl))<0) goto error; /* Create property list for compact dataset creation */ plist = H5Pcreate(H5P_DATASET_CREATE); assert(plist >= 0); status = H5Pset_layout(plist, H5D_COMPACT); assert(status >= 0); status = H5Pset_alloc_time(plist, H5D_ALLOC_TIME_EARLY); assert(status >= 0); /* Create and write to a compact dataset */ if((dataset = H5Dcreate(file, DSET_COMPACT_IO_NAME, H5T_NATIVE_INT, space, plist))<0) goto error; /* Test dataset address. Should be undefined. */ if(H5Dget_offset(dataset)!=HADDR_UNDEF) goto error; if(H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf)<0) goto error; /* Test dataset address. Should be undefined. */ if(H5Dget_offset(dataset)!=HADDR_UNDEF) goto error; /* Close file */ H5Sclose(space); H5Pclose(plist); H5Dclose(dataset); H5Fclose(file); /* * Open the file and check data */ if((file=H5Fopen(filename, H5F_ACC_RDONLY, fapl))<0) goto error; if((dataset = H5Dopen(file, DSET_COMPACT_IO_NAME))<0) goto error; if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf)<0) goto error; /* Check that the values read are the same as the values written */ for (i = 0; i < 16; i++) { for (j = 0; j < 8; j++) { if (rbuf[i][j] != wbuf[i][j]) { H5_FAILED(); printf(" Read different values than written.\n"); printf(" At index %d,%d\n", i, j); goto error; } } } H5Dclose(dataset); H5Fclose(file); PASSED(); return 0; error: return -1; } /*------------------------------------------------------------------------- * Function: test_tconv * * Purpose: Test some simple data type conversion stuff. * * Return: Success: 0 * * Failure: -1 * * Programmer: Robb Matzke * Wednesday, January 14, 1998 * * Modifications: * *------------------------------------------------------------------------- */ static herr_t test_tconv(hid_t file) { char *out=NULL, *in=NULL; int i; hsize_t dims[1]; hid_t space, dataset; out = malloc (4*1000000); assert (out); in = malloc (4*1000000); assert (in); TESTING("data type conversion"); /* Initialize the dataset */ for (i = 0; i < 1000000; i++) { out[i*4+0] = 0x11; out[i*4+1] = 0x22; out[i*4+2] = 0x33; out[i*4+3] = 0x44; } /* Create the data space */ dims[0] = 1000000; if ((space = H5Screate_simple (1, dims, NULL))<0) goto error; /* Create the data set */ if ((dataset = H5Dcreate(file, DSET_TCONV_NAME, H5T_STD_I32LE, space, H5P_DEFAULT))<0) goto error; /* Write the data to the dataset */ if (H5Dwrite(dataset, H5T_STD_I32LE, H5S_ALL, H5S_ALL, H5P_DEFAULT, out)<0) goto error; /* Read data with byte order conversion */ if (H5Dread(dataset, H5T_STD_I32BE, H5S_ALL, H5S_ALL, H5P_DEFAULT, in)<0) goto error; /* Check */ for (i = 0; i < 1000000; i++) { if (in[4*i+0]!=out[4*i+3] || in[4*i+1]!=out[4*i+2] || in[4*i+2]!=out[4*i+1] || in[4*i+3]!=out[4*i+0]) { H5_FAILED(); puts(" Read with byte order conversion failed."); goto error; } } if (H5Dclose(dataset)<0) goto error; free (out); free (in); puts(" PASSED"); return 0; error: return -1; } /*------------------------------------------------------------------------- * Function: bogus * * Purpose: A bogus compression method that doesn't do anything. * * Return: Success: Data chunk size * * Failure: 0 * * Programmer: Robb Matzke * Tuesday, April 21, 1998 * * Modifications: * *------------------------------------------------------------------------- */ static size_t bogus(unsigned int UNUSED flags, size_t UNUSED cd_nelmts, const unsigned int UNUSED *cd_values, size_t nbytes, size_t UNUSED *buf_size, void UNUSED **buf) { return nbytes; } /*------------------------------------------------------------------------- * Function: test_compression_internal * * Purpose: Tests dataset compression. If compression is requested when * it hasn't been compiled into the library (such as when * updating an existing compressed dataset) then data is sent to * the file uncompressed but no errors are returned. * * Return: Success: 0 * Failure: -1 * * Programmer: Robb Matzke * Wednesday, April 15, 1998 * * Modifications: * Moved out of main test_compression routine * Quincey Koziol, November 14, 2002 * *------------------------------------------------------------------------- */ static herr_t test_compression_internal(hid_t fid, const char *name, hid_t dcpl, hsize_t *dset_size) { hid_t dataset; /* Dataset ID */ hid_t dxpl; /* Dataset xfer property list ID */ hid_t sid; /* Dataspace ID */ const hsize_t size[2] = {100, 200}; /* Dataspace dimensions */ const hssize_t hs_offset[2] = {7, 30}; /* Hyperslab offset */ const hsize_t hs_size[2] = {4, 50}; /* Hyperslab size */ void *tconv_buf = NULL; /* Temporary conversion buffer */ hsize_t i, j, n; /* Local index variables */ /* Create the data space */ if ((sid = H5Screate_simple(2, size, NULL))<0) goto error; /* * Create a small conversion buffer to test strip mining. We * might as well test all we can! */ if ((dxpl = H5Pcreate (H5P_DATASET_XFER))<0) goto error; tconv_buf = malloc (1000); if (H5Pset_buffer (dxpl, 1000, tconv_buf, NULL)<0) goto error; TESTING("compression (setup)"); /* Create the dataset */ if ((dataset = H5Dcreate(fid, name, H5T_NATIVE_INT, sid, dcpl))<0) goto error; PASSED(); /*---------------------------------------------------------------------- * STEP 1: Read uninitialized data. It should be zero. *---------------------------------------------------------------------- */ TESTING("compression (uninitialized read)"); if (H5Dread (dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check)<0) goto error; for (i=0; i=null_size) { H5_FAILED(); puts(" Deflated size greater than uncompressed size."); goto error; } /* end if */ /* Clean up objects used for this test */ if (H5Pclose (dc)<0) goto error; #else /* H5_HAVE_FILTER_DEFLATE */ TESTING("deflate filter"); SKIPPED(); puts("Deflate filter not enabled"); #endif /* H5_HAVE_FILTER_DEFLATE */ #ifdef H5_HAVE_FILTER_SHUFFLE /* Test shuffle I/O filter (by itself) */ puts("Testing shuffle filter"); if((dc = H5Pcreate(H5P_DATASET_CREATE))<0) goto error; if (H5Pset_chunk (dc, 2, chunk_size)<0) goto error; if (H5Pset_shuffle (dc, sizeof(int))<0) goto error; if(test_compression_internal(file,DSET_SHUFFLE_NAME,dc,&shuffle_size)<0) goto error; if(shuffle_size!=null_size) { H5_FAILED(); puts(" Shuffled size not the same as uncompressed size."); goto error; } /* end if */ /* Clean up objects used for this test */ if (H5Pclose (dc)<0) goto error; #else /* H5_HAVE_FILTER_SHUFFLE */ TESTING("shuffle filter"); SKIPPED(); puts("Shuffle filter not enabled"); #endif /* H5_HAVE_FILTER_SHUFFLE */ #if defined H5_HAVE_FILTER_DEFLATE && defined H5_HAVE_FILTER_SHUFFLE /* Test combination of deflate & shuffle I/O filters */ puts("Testing shuffle+deflate filters"); if((dc = H5Pcreate(H5P_DATASET_CREATE))<0) goto error; if (H5Pset_chunk (dc, 2, chunk_size)<0) goto error; if (H5Pset_shuffle (dc, sizeof(int))<0) goto error; if (H5Pset_deflate (dc, 6)<0) goto error; if(test_compression_internal(file,DSET_SHUFFLE_DEFLATE_NAME,dc,&shuff_def_size)<0) goto error; if(shuff_def_size>=deflate_size) { H5_FAILED(); puts(" Shuffle+deflate size greater than plain deflated size."); goto error; } /* end if */ /* Clean up objects used for this test */ if (H5Pclose (dc)<0) goto error; #else /* H5_HAVE_FILTER_DEFLATE && H5_HAVE_FILTER_SHUFFLE */ TESTING("shuffle+deflate filters"); SKIPPED(); puts("Deflate or shuffle filter not enabled"); #endif /* H5_HAVE_FILTER_DEFLATE && H5_HAVE_FILTER_SHUFFLE */ return 0; error: return -1; } /*------------------------------------------------------------------------- * Function: test_missing_filter * * Purpose: Tests library behavior when filter is missing * * Return: Success: 0 * Failure: -1 * * Programmer: Quincey Koziol * Thursday, November 14, 2002 * *------------------------------------------------------------------------- */ static herr_t test_missing_filter(hid_t file) { hid_t fid; /* File ID */ hid_t dsid; /* Dataset ID */ hid_t sid; /* Dataspace ID */ hid_t dcpl; /* Dataspace creation property list ID */ const hsize_t dims[2] = {100, 200}; /* Dataspace dimensions */ const hsize_t chunk_dims[2] = {2, 25}; /* Chunk dimensions */ hsize_t dset_size; /* Dataset size */ hsize_t i,j; /* Local index variables */ herr_t ret; /* Generic return value */ char testfile[512]=""; /* Buffer to hold name of existing test file */ char *srcdir = HDgetenv("srcdir"); /* The source directory, if we are using the --srcdir configure option */ TESTING("dataset access with missing filter"); /* Unregister the deflate filter */ #ifdef H5_HAVE_FILTER_DEFLATE /* Verify deflate filter is registered currently */ if(H5Zfilter_avail(H5Z_FILTER_DEFLATE)!=TRUE) { H5_FAILED(); printf(" Line %d: Deflate filter not available\n",__LINE__); goto error; } /* end if */ /* Unregister deflate filter (use internal function) */ if (H5Z_unregister(H5Z_FILTER_DEFLATE)<0) { H5_FAILED(); printf(" Line %d: Can't unregister deflate filter\n",__LINE__); goto error; } /* end if */ #endif /* H5_HAVE_FILTER_DEFLATE */ /* Verify deflate filter is not registered currently */ if(H5Zfilter_avail(H5Z_FILTER_DEFLATE)!=FALSE) { H5_FAILED(); printf(" Line %d: Deflate filter available\n",__LINE__); goto error; } /* end if */ /* Create dcpl with deflate filter */ if((dcpl = H5Pcreate(H5P_DATASET_CREATE))<0) { H5_FAILED(); printf(" Line %d: Can't create dcpl\n",__LINE__); goto error; } /* end if */ if(H5Pset_chunk(dcpl, 2, chunk_dims)<0) { H5_FAILED(); printf(" Line %d: Can't set chunk sizes\n",__LINE__); goto error; } /* end if */ if(H5Pset_deflate(dcpl, 9)<0) { H5_FAILED(); printf(" Line %d: Can't set deflate filter\n",__LINE__); goto error; } /* end if */ /* Create the data space */ if ((sid = H5Screate_simple(2, dims, NULL))<0) { H5_FAILED(); printf(" Line %d: Can't open dataspace\n",__LINE__); goto error; } /* end if */ /* Create new dataset */ if ((dsid = H5Dcreate(file, DSET_MISSING_NAME, H5T_NATIVE_INT, sid, dcpl))<0) { H5_FAILED(); printf(" Line %d: Can't create dataset\n",__LINE__); goto error; } /* end if */ /* Write data */ if (H5Dwrite(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, points)<0) { H5_FAILED(); printf(" Line %d: Error writing dataset data\n",__LINE__); goto error; } /* end if */ /* Flush the file (to clear the cache) */ if (H5Fflush(file, H5F_SCOPE_GLOBAL)<0) { H5_FAILED(); printf(" Line %d: Error flushing file\n",__LINE__); goto error; } /* end if */ /* Query the dataset's size on disk */ if((dset_size=H5Dget_storage_size(dsid))==0) { H5_FAILED(); printf(" Line %d: Error querying dataset size\n",__LINE__); goto error; } /* end if */ /* Verify that the size indicates data is uncompressed */ /* (i.e. the deflation filter we asked for was silently ignored) */ if((H5Tget_size(H5T_NATIVE_INT)*100*200)!=dset_size) { H5_FAILED(); printf(" Line %d: Incorrect dataset size: %lu\n",__LINE__,(unsigned long)dset_size); goto error; } /* end if */ /* Read data */ if (H5Dread(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, check)<0) { H5_FAILED(); printf(" Line %d: Error reading dataset data\n",__LINE__); goto error; } /* end if */ /* Compare data */ /* Check that the values read are the same as the values written */ for (i=0; i=0) { H5_FAILED(); printf(" Line %d: Error reading dataset data\n",__LINE__); goto error; } /* end if */ /* Close dataset */ if(H5Dclose(dsid)<0) { H5_FAILED(); printf(" Line %d: Can't close dataset\n",__LINE__); goto error; } /* end if */ /* Close existing file */ if(H5Fclose(fid)<0) { H5_FAILED(); printf(" Line %d: Can't close file\n",__LINE__); goto error; } /* end if */ /* Re-register the deflate filter */ /* Verify deflate filter is not registered currently */ if(H5Zfilter_avail(H5Z_FILTER_DEFLATE)!=FALSE) { H5_FAILED(); printf(" Line %d: Deflate filter available\n",__LINE__); goto error; } /* end if */ #ifdef H5_HAVE_FILTER_DEFLATE /* Register deflate filter (use internal function) */ if(H5Z_register(H5Z_FILTER_DEFLATE, "deflate", H5Z_filter_deflate)<0) { H5_FAILED(); printf(" Line %d: Can't unregister deflate filter\n",__LINE__); goto error; } /* end if */ /* Verify deflate filter is registered currently */ if(H5Zfilter_avail(H5Z_FILTER_DEFLATE)!=TRUE) { H5_FAILED(); printf(" Line %d: Deflate filter not available\n",__LINE__); goto error; } /* end if */ #endif /* H5_HAVE_FILTER_DEFLATE */ PASSED(); return 0; error: return -1; } /*------------------------------------------------------------------------- * Function: test_onebyte_shuffle * * Purpose: Tests the 8-bit array with shuffling algorithm. * The shuffled array should be the same result as * that before the shuffling. * * Return: Success: 0 * * Failure: -1 * * Programmer: Kent Yang * Wednesday, , 2002 Nov. 13th * * Modifications: * *------------------------------------------------------------------------- */ static herr_t test_onebyte_shuffle(hid_t file) { hid_t dataset, space,dc; const hsize_t size[2] = {10, 20}; const hsize_t chunk_size[2] = {10, 20}; unsigned char orig_data[10][20]; unsigned char new_data[10][20]; unsigned level; #ifndef H5_HAVE_FILTER_SHUFFLE const char *not_supported; #endif hsize_t i, j; #ifndef H5_HAVE_FILTER_SHUFFLE not_supported = " Data shuffling is not supported.\n" " The shuffling flag was not found when hdf5 was configured."; #endif TESTING("8-bit shuffling (setup)"); /* Create the data space */ if ((space = H5Screate_simple(2, size, NULL))<0) goto error; /* Use shuffling algorithm with 8-bit */ if((dc = H5Pcreate(H5P_DATASET_CREATE))<0) goto error; if (H5Pset_chunk (dc, 2, chunk_size)<0) goto error; level = sizeof(unsigned char); if (level != 1) goto error; if (H5Pset_shuffle (dc, level)<0) goto error; /* Create the dataset */ if ((dataset = H5Dcreate(file, DSET_ONEBYTE_SHUF_NAME, H5T_NATIVE_UCHAR, space,dc))<0) goto error; for (i= 0;i< 10; i++) for (j = 0; j < 20; j++) orig_data[i][j] = rand(); #ifdef H5_HAVE_FILTER_SHUFFLE PASSED(); #else SKIPPED(); puts(not_supported); #endif /*---------------------------------------------------------------------- * STEP 1: Test shuffling by setting up a chunked dataset and writing * to it. *---------------------------------------------------------------------- */ TESTING("8-bit shuffling (write)"); if (H5Dwrite(dataset, H5T_NATIVE_UCHAR, H5S_ALL, H5S_ALL, H5P_DEFAULT, orig_data)<0) goto error; #ifdef H5_HAVE_FILTER_SHUFFLE PASSED(); #else SKIPPED(); puts(not_supported); #endif /*---------------------------------------------------------------------- * STEP 2: Try to read the data we just wrote. *---------------------------------------------------------------------- */ TESTING("8-bit shuffling (read)"); /* Read the dataset back */ if (H5Dread(dataset, H5T_NATIVE_UCHAR, H5S_ALL, H5S_ALL, H5P_DEFAULT, new_data)<0) goto error; /* Check that the values read are the same as the values written */ for (i=0; i