/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * * terms governing use, modification, and redistribution, is contained in * * the files COPYING and Copyright.html. COPYING can be found at the root * * of the source code distribution tree; Copyright.html can be found at the * * root level of an installed copy of the electronic HDF5 document set and * * is linked from the top-level documents page. It can also be found at * * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * * access to either file, you may request a copy from help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ /* * Programmer: Raymond Lu * Thursday, March 23, 2006 * * Purpose: Check if floating-point data created on OpenVMS, big-endian, and * little-endian machines can be read on the machine running this test. */ #include "h5test.h" #include "H5srcdir.h" const char *FILENAME[] = { "vms_data", "le_data", "be_data", NULL }; #define DATASETNAME "Array" #define DATASETNAME2 "Scale_offset_float_data_le" #define DATASETNAME3 "Scale_offset_float_data_be" #define DATASETNAME4 "Scale_offset_double_data_le" #define DATASETNAME5 "Scale_offset_double_data_be" #define DATASETNAME6 "Scale_offset_char_data_le" #define DATASETNAME7 "Scale_offset_char_data_be" #define DATASETNAME8 "Scale_offset_short_data_le" #define DATASETNAME9 "Scale_offset_short_data_be" #define DATASETNAME10 "Scale_offset_int_data_le" #define DATASETNAME11 "Scale_offset_int_data_be" #define DATASETNAME12 "Scale_offset_long_long_data_le" #define DATASETNAME13 "Scale_offset_long_long_data_be" #define NX 6 #define NY 6 /*------------------------------------------------------------------------- * Function: read_data * * Purpose: Read data from a data file. * * Return: Success: 0 * Failure: -1 * * Programmer: Raymond Lu * 21 January 2011 * * Modifications: * *------------------------------------------------------------------------- */ static int read_data(char *fname) { const char *pathname = H5_get_srcdir_filename(fname); /* Corrected test file name */ hid_t file, dataset; /* handles */ double data_in[NX+1][NY]; /* input buffer */ double data_out[NX+1][NY]; /* output buffer */ long long int_data_in[NX+1][NY]; /* input buffer */ long long int_data_out[NX+1][NY]; /* output buffer */ int i, j; unsigned nerrors = 0; const char *not_supported= " Scaleoffset filter is not enabled."; /* * Open the file. */ if((file = H5Fopen(pathname, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0) TEST_ERROR; TESTING("regular dataset"); /* * Open the regular dataset. */ if((dataset = H5Dopen2(file, DATASETNAME, H5P_DEFAULT)) < 0) TEST_ERROR; /* * Data and output buffer initialization. */ for (j = 0; j < NX; j++) { for (i = 0; i < NY; i++) { data_in[j][i] = i + j; data_out[j][i] = 0; } } for (i = 0; i < NY; i++) { data_in[NX][i] = -2.2; data_out[NX][i] = 0; } /* * 0 1 2 3 4 5 * 1 2 3 4 5 6 * 2 3 4 5 6 7 * 3 4 5 6 7 8 * 4 5 6 7 8 9 * 5 6 7 8 9 10 * -2.2 -2.2 -2.2 -2.2 -2.2 -2.2 */ /* * Read data from hyperslab in the file into the hyperslab in * memory and display. */ if(H5Dread(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, data_out) < 0) TEST_ERROR; /* Check results */ for (j=0; j<(NX+1); j++) { for (i=0; i