summaryrefslogtreecommitdiffstats
path: root/c++/test/dsets.cpp
diff options
context:
space:
mode:
authorBinh-Minh Ribler <bmribler@hdfgroup.org>2010-02-26 16:55:49 (GMT)
committerBinh-Minh Ribler <bmribler@hdfgroup.org>2010-02-26 16:55:49 (GMT)
commitc90711e71362716cd9ab25d6b9167ca49edccfe8 (patch)
treeea7dc293ef04279362e74c014934fe0bc18c6c51 /c++/test/dsets.cpp
parent9f514c2b7de0d07cfcb26d3e2355e070033f7a11 (diff)
downloadhdf5-c90711e71362716cd9ab25d6b9167ca49edccfe8.zip
hdf5-c90711e71362716cd9ab25d6b9167ca49edccfe8.tar.gz
hdf5-c90711e71362716cd9ab25d6b9167ca49edccfe8.tar.bz2
[svn-r18335] Description:
Removed header file testhdf5.h from C++ tests to eliminate a non-standard problem on OpenVMS. It wasn't essential. Platforms tested: Linux/32 2.6 (jam) FreeBSD/64 6.3 (liberty) Ray agreed to test on OpenVMS.
Diffstat (limited to 'c++/test/dsets.cpp')
-rw-r--r--c++/test/dsets.cpp44
1 files changed, 21 insertions, 23 deletions
diff --git a/c++/test/dsets.cpp b/c++/test/dsets.cpp
index 6a5f223..e860f34 100644
--- a/c++/test/dsets.cpp
+++ b/c++/test/dsets.cpp
@@ -39,7 +39,6 @@
#endif // H5_NO_STD
#endif
-#include "testhdf5.h" // C test header file
#include "H5Cpp.h" // C++ API header file
#ifndef H5_NO_NAMESPACE
@@ -82,7 +81,7 @@ static size_t filter_bogus(unsigned int flags, size_t cd_nelmts,
static herr_t
test_create( H5File& file)
{
- TESTING("create, open, close");
+ SUBTEST("create, open, close");
// Setting this to NULL for cleaning up in failure situations
DataSet *dataset = NULL;
@@ -203,7 +202,7 @@ static herr_t
test_simple_io( H5File& file)
{
- TESTING("simple I/O");
+ SUBTEST("simple I/O");
int points[100][200];
int check[100][200];
@@ -294,7 +293,7 @@ test_tconv( H5File& file)
in = new char [4*1000000];
//assert (in);
- TESTING("data type conversion");
+ SUBTEST("data type conversion");
// Initialize the dataset
for (int i = 0; i < 1000000; i++) {
@@ -449,7 +448,7 @@ test_compression(H5File& file)
dscreatplist.setDeflate (6);
#ifdef H5_HAVE_FILTER_DEFLATE
- TESTING("compression (setup)");
+ SUBTEST("Compression (setup)");
// Create the dataset
dataset = new DataSet (file.createDataSet
@@ -461,7 +460,7 @@ test_compression(H5File& file)
* STEP 1: Read uninitialized data. It should be zero.
*----------------------------------------------------------------------
*/
- TESTING("compression (uninitialized read)");
+ SUBTEST("Compression (uninitialized read)");
dataset->read ((void*) check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);
@@ -483,7 +482,7 @@ test_compression(H5File& file)
* to it.
*----------------------------------------------------------------------
*/
- TESTING("compression (write)");
+ SUBTEST("Compression (write)");
for (i=n=0; i<size[0]; i++)
{
@@ -501,7 +500,7 @@ test_compression(H5File& file)
* STEP 3: Try to read the data we just wrote.
*----------------------------------------------------------------------
*/
- TESTING("compression (read)");
+ SUBTEST("Compression (read)");
// Read the dataset back
dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);
@@ -524,7 +523,7 @@ test_compression(H5File& file)
* dataset although we rewrite the whole thing.
*----------------------------------------------------------------------
*/
- TESTING("compression (modify)");
+ SUBTEST("Compression (modify)");
for (i=0; i<size[0]; i++)
{
@@ -555,7 +554,7 @@ test_compression(H5File& file)
* object header.
*----------------------------------------------------------------------
*/
- TESTING("compression (re-open)");
+ SUBTEST("Compression (re-open)");
// close this dataset to reuse the var
delete dataset;
@@ -581,7 +580,7 @@ test_compression(H5File& file)
* boundaries (we know that case already works from above tests).
*----------------------------------------------------------------------
*/
- TESTING("compression (partial I/O)");
+ SUBTEST("Compression (partial I/O)");
const hsize_t hs_size[2] = {4, 50};
const hsize_t hs_offset[2] = {7, 30};
@@ -617,7 +616,7 @@ test_compression(H5File& file)
PASSED();
#else
- TESTING("deflate filter");
+ SUBTEST("deflate filter");
SKIPPED();
cerr << not_supported << endl;
#endif
@@ -627,7 +626,7 @@ test_compression(H5File& file)
* to write and then read the dataset.
*----------------------------------------------------------------------
*/
- TESTING("compression (app-defined method)");
+ SUBTEST("Compression (app-defined method)");
if (H5Zregister (H5Z_BOGUS)<0)
throw Exception("test_compression", "Failed in app-defined method");
@@ -699,7 +698,7 @@ static herr_t
test_multiopen (H5File& file)
{
- TESTING("multi-open with extending");
+ SUBTEST("Multi-open with extending");
DataSpace* space = NULL;
try {
@@ -780,7 +779,7 @@ test_multiopen (H5File& file)
static herr_t
test_types(H5File& file)
{
- TESTING("various datatypes");
+ SUBTEST("Various datatypes");
size_t i;
DataSet* dset = NULL;
@@ -972,13 +971,16 @@ test_types(H5File& file)
*
*-------------------------------------------------------------------------
*/
-int
-main()
+#ifdef __cplusplus
+extern "C"
+#endif
+void test_dset()
{
hid_t fapl_id;
fapl_id = h5_fileaccess(); // in h5test.c, returns a file access template
int nerrors=0; // keep track of number of failures occurr
+
try
{
// Turn of the auto-printing when failure occurs so that we can
@@ -1005,16 +1007,12 @@ main()
}
catch (Exception E)
{
- return(test_report(nerrors, H5std_string(" Dataset")));
+ test_report(nerrors, H5std_string(" Dataset"));
}
// Clean up data file
cleanup_dsets();
-
- // Print out dsets test results
- cerr << endl << endl;
- return(test_report(nerrors, H5std_string(" Dataset")));
-} // main
+} // test_dset
/*-------------------------------------------------------------------------
* Function: cleanup_dsets