diff options
author | Binh-Minh Ribler <bmribler@hdfgroup.org> | 2005-06-19 21:02:21 (GMT) |
---|---|---|
committer | Binh-Minh Ribler <bmribler@hdfgroup.org> | 2005-06-19 21:02:21 (GMT) |
commit | 828c6646a1b2bf0c7ddcf551996d44e9fd876715 (patch) | |
tree | 8299eac5249581602001b1049dad7e64fd171c70 /c++/test | |
parent | d6410304369bea0644da9ab594f22af6201fa16e (diff) | |
download | hdf5-828c6646a1b2bf0c7ddcf551996d44e9fd876715.zip hdf5-828c6646a1b2bf0c7ddcf551996d44e9fd876715.tar.gz hdf5-828c6646a1b2bf0c7ddcf551996d44e9fd876715.tar.bz2 |
[svn-r10953] Purpose: Updating C++ tests
Description:
Updated various comments/headers.
Platforms tested:
Linux 2.4 (heping)
AIX 5.1 (copper)
Diffstat (limited to 'c++/test')
-rw-r--r-- | c++/test/dsets.cpp | 226 | ||||
-rw-r--r-- | c++/test/tfile.cpp | 133 | ||||
-rw-r--r-- | c++/test/th5s.cpp | 106 |
3 files changed, 224 insertions, 241 deletions
diff --git a/c++/test/dsets.cpp b/c++/test/dsets.cpp index 36cfaa7..f721ea1 100644 --- a/c++/test/dsets.cpp +++ b/c++/test/dsets.cpp @@ -15,14 +15,12 @@ /***************************************************************************** FILE dsets.cpp - HDF5 C++ testing the functionalities associated with the - C dataset interface (H5D) + C dataset interface (H5D) EXTERNAL ROUTINES/VARIABLES: These routines are in the test directory of the C library: - h5_reset() -- in h5test.c, resets the library by closing it - h5_fileaccess() -- in h5test.c, returns a file access template - h5_fixname() -- in h5test.c, create a file name from a file base name - h5_cleanup() -- in h5test.c, cleanup temporary test files + h5_reset() -- in h5test.c, resets the library by closing it + h5_fileaccess() -- in h5test.c, returns a file access template ***************************************************************************/ @@ -76,8 +74,9 @@ void cleanup_dsets(void); static herr_t test_create( H5File& file) { - TESTING("create, open, close"); + TESTING("create, open, close"); + // Setting this to NULL for cleaning up in failure situations DataSet *dataset = NULL; try { // Create a data space @@ -88,7 +87,7 @@ test_create( H5File& file) // Create a dataset using the default dataset creation properties. // We're not sure what they are, so we won't check. - dataset = new DataSet (file.createDataSet + dataset = new DataSet (file.createDataSet (DSET_DEFAULT_NAME, PredType::NATIVE_DOUBLE, space)); // Add a comment to the dataset @@ -96,6 +95,7 @@ test_create( H5File& file) // Close the dataset delete dataset; + dataset = NULL; // Try creating a dataset that already exists. This should fail since a // dataset can only be created once. If an exception is not thrown for @@ -106,9 +106,9 @@ test_create( H5File& file) // continuation here, that means no exception has been thrown throw InvalidActionException("H5File::createDataSet", "Library allowed overwrite of existing dataset"); - } - catch (FileIException E) // catching invalid creating dataset - {} // do nothing, exception expected + } + catch (FileIException E) // catching invalid creating dataset + {} // do nothing, exception expected // Open the dataset we created above and then close it. This is one // way to open an existing dataset for accessing. @@ -119,9 +119,9 @@ test_create( H5File& file) // This is another way to open an existing dataset for accessing. DataSet another_dataset(file.openDataSet (DSET_DEFAULT_NAME)); - + // Try opening a non-existent dataset. This should fail so if an - // exception is not thrown for this action by openDataSet, then + // exception is not thrown for this action by openDataSet, then // display failure information and throw an exception. try { dataset = new DataSet (file.openDataSet( "does_not_exist" )); @@ -130,12 +130,10 @@ test_create( H5File& file) throw InvalidActionException("H5File::openDataSet", "Attempted to open a non-existent dataset"); } catch (FileIException E ) // catching creating non-existent dataset - {} // do nothing, exception expected + {} // do nothing, exception expected - /* - * Create a new dataset that uses chunked storage instead of the default - * layout. - */ + // Create a new dataset that uses chunked storage instead of the default + // layout. DSetCreatPropList create_parms; hsize_t csize[2]; csize[0] = 5; @@ -165,25 +163,26 @@ test_create( H5File& file) } // catch all other exceptions catch (Exception E) - { - cerr << " FAILED" << endl; - cerr << " <<< " << E.getDetailMsg() << " >>>" << endl << endl; + { + issue_fail_msg(E.getCFuncName(), __LINE__, __FILE__); // clean up and return with failure if (dataset != NULL) delete dataset; return -1; } -} +} // test_create /*------------------------------------------------------------------------- * Function: check_values * * Purpose: Checks a read value against the written value. If they are - * different, the function will - * print out a message and the different values. This function - * is made to reuse the code segment that is used in various - * places throughout test_compression and in test_simple_io. + * different, the function will print out a message and the + * different values. This function is made to reuse the code + * segment that is used in various places throughout + * test_compression and in test_simple_io. Where the C version + * of this code segment "goto error," this function will + * return -1. * * Return: Success: 0 * @@ -237,7 +236,7 @@ test_simple_io( H5File& file) int check[100][200]; int i, j, n; - /* Initialize the dataset */ + // Initialize the dataset for (i = n = 0; i < 100; i++) { for (j = 0; j < 200; j++) { @@ -248,27 +247,27 @@ test_simple_io( H5File& file) char* tconv_buf = new char [1000]; try { - /* Create the data space */ + // Create the data space hsize_t dims[2]; dims[0] = 100; dims[1] = 200; DataSpace space (2, dims, NULL); - /* Create a small conversion buffer to test strip mining */ + // Create a small conversion buffer to test strip mining DSetMemXferPropList xfer; xfer.setBuffer (1000, tconv_buf, NULL); - /* Create the dataset */ + // Create the dataset DataSet dataset (file.createDataSet (DSET_SIMPLE_IO_NAME, PredType::NATIVE_INT, space)); - /* Write the data to the dataset */ + // Write the data to the dataset dataset.write ((void*) points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); - /* Read the dataset back */ + // Read the dataset back dataset.read ((void*) check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); - /* Check that the values read are the same as the values written */ + // Check that the values read are the same as the values written for (i = 0; i < 100; i++) for (j = 0; j < 200; j++) { @@ -282,9 +281,10 @@ test_simple_io( H5File& file) PASSED(); return 0; } // end try + // catch all dataset, space, plist exceptions catch (Exception E) - { + { cerr << " FAILED" << endl; cerr << " <<< " << E.getDetailMsg() << " >>>" << endl << endl; @@ -293,7 +293,7 @@ test_simple_io( H5File& file) delete [] tconv_buf; return -1; } -} +} // test_simple_io /*------------------------------------------------------------------------- * Function: test_tconv @@ -323,7 +323,7 @@ test_tconv( H5File& file) TESTING("data type conversion"); - /* Initialize the dataset */ + // Initialize the dataset for (int i = 0; i < 1000000; i++) { out[i*4+0] = 0x11; out[i*4+1] = 0x22; @@ -333,21 +333,21 @@ test_tconv( H5File& file) try { - /* Create the data space */ + // Create the data space hsize_t dims[1]; dims[0] = 1000000; DataSpace space (1, dims, NULL); - /* Create the data set */ + // Create the data set DataSet dataset (file.createDataSet (DSET_TCONV_NAME, PredType::STD_I32LE, space)); - /* Write the data to the dataset */ + // Write the data to the dataset dataset.write ((void*) out, PredType::STD_I32LE); - /* Read data with byte order conversion */ + // Read data with byte order conversion dataset.read ((void*) in, PredType::STD_I32BE); - /* Check */ + // Check for (int i = 0; i < 1000000; i++) { if (in[4*i+0]!=out[4*i+3] || in[4*i+1]!=out[4*i+2] || @@ -367,7 +367,7 @@ test_tconv( H5File& file) // catch all dataset and space exceptions catch (Exception E) - { + { cerr << " FAILED" << endl; cerr << " <<< " << E.getDetailMsg() << " >>>" << endl << endl; @@ -376,7 +376,7 @@ test_tconv( H5File& file) delete [] in; return -1; } -} +} // test_tconv /* This message derives from H5Z */ const H5Z_class_t H5Z_BOGUS[1] = {{ @@ -399,7 +399,7 @@ const H5Z_class_t H5Z_BOGUS[1] = {{ * Failure: 0 * * Programmer: Robb Matzke - * Tuesday, April 21, 1998 + * Tuesday, April 21, 1998 * * Modifications: * @@ -437,7 +437,6 @@ bogus(unsigned int flags, size_t cd_nelmts, * *------------------------------------------------------------------------- */ - static herr_t test_compression(H5File& file) { @@ -447,7 +446,7 @@ test_compression(H5File& file) int check[100][200]; hsize_t i, j, n; - /* Initialize the dataset */ + // Initialize the dataset for (i = n = 0; i < 100; i++) { for (j = 0; j < 200; j++) { @@ -456,22 +455,18 @@ test_compression(H5File& file) } char* tconv_buf = new char [1000]; DataSet* dataset = NULL; - try { const hsize_t size[2] = {100, 200}; - /* Create the data space */ + // Create the data space DataSpace space1(2, size, NULL); - /* - * Create a small conversion buffer to test strip mining. We - * might as well test all we can! - */ + // Create a small conversion buffer to test strip mining DSetMemXferPropList xfer; xfer.setBuffer (1000, tconv_buf, NULL); - /* Use chunked storage with compression */ + // Use chunked storage with compression DSetCreatPropList dscreatplist; const hsize_t chunk_size[2] = {2, 25}; @@ -479,9 +474,9 @@ test_compression(H5File& file) dscreatplist.setDeflate (6); #ifdef H5_HAVE_FILTER_DEFLATE - TESTING("compression (setup)"); + TESTING("compression (setup)"); - /* Create the dataset */ + // Create the dataset dataset = new DataSet (file.createDataSet (DSET_COMPRESS_NAME, PredType::NATIVE_INT, space1, dscreatplist)); @@ -491,7 +486,7 @@ test_compression(H5File& file) * STEP 1: Read uninitialized data. It should be zero. *---------------------------------------------------------------------- */ - TESTING("compression (uninitialized read)"); + TESTING("compression (uninitialized read)"); dataset->read ((void*) check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); @@ -506,14 +501,14 @@ test_compression(H5File& file) } } } - PASSED(); + PASSED(); /*---------------------------------------------------------------------- * STEP 2: Test compression by setting up a chunked dataset and writing * to it. *---------------------------------------------------------------------- */ - TESTING("compression (write)"); + TESTING("compression (write)"); for (i=n=0; i<size[0]; i++) { @@ -525,18 +520,18 @@ test_compression(H5File& file) dataset->write ((void*) points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); - PASSED(); + PASSED(); /*---------------------------------------------------------------------- * STEP 3: Try to read the data we just wrote. *---------------------------------------------------------------------- */ - TESTING("compression (read)"); + TESTING("compression (read)"); - /* Read the dataset back */ + // Read the dataset back dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); - /* Check that the values read are the same as the values written */ + // Check that the values read are the same as the values written for (i = 0; i < size[0]; i++) for (j = 0; j < size[1]; j++) { @@ -545,7 +540,7 @@ test_compression(H5File& file) throw Exception("test_compression", "Failed in read"); } - PASSED(); + PASSED(); /*---------------------------------------------------------------------- * STEP 4: Write new data over the top of the old data. The new data is @@ -554,7 +549,7 @@ test_compression(H5File& file) * dataset although we rewrite the whole thing. *---------------------------------------------------------------------- */ - TESTING("compression (modify)"); + TESTING("compression (modify)"); for (i=0; i<size[0]; i++) { @@ -565,10 +560,10 @@ test_compression(H5File& file) } dataset->write ((void*)points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); - /* Read the dataset back and check it */ + // Read the dataset back and check it dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); - /* Check that the values read are the same as the values written */ + // Check that the values read are the same as the values written for (i = 0; i < size[0]; i++) for (j = 0; j < size[1]; j++) { @@ -577,7 +572,7 @@ test_compression(H5File& file) throw Exception("test_compression", "Failed in modify"); } - PASSED(); + PASSED(); /*---------------------------------------------------------------------- * STEP 5: Close the dataset and then open it and read it again. This @@ -585,14 +580,15 @@ test_compression(H5File& file) * object header. *---------------------------------------------------------------------- */ - TESTING("compression (re-open)"); - // close this dataset + TESTING("compression (re-open)"); + + // close this dataset to reuse the var delete dataset; dataset = new DataSet (file.openDataSet (DSET_COMPRESS_NAME)); dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); - /* Check that the values read are the same as the values written */ + // Check that the values read are the same as the values written for (i = 0; i < size[0]; i++) for (j = 0; j < size[1]; j++) { @@ -601,7 +597,7 @@ test_compression(H5File& file) throw Exception("test_compression", "Failed in re-open"); } - PASSED(); + PASSED(); /*---------------------------------------------------------------------- @@ -610,7 +606,7 @@ test_compression(H5File& file) * boundaries (we know that case already works from above tests). *---------------------------------------------------------------------- */ - TESTING("compression (partial I/O)"); + TESTING("compression (partial I/O)"); const hsize_t hs_size[2] = {4, 50}; const hsize_t hs_offset[2] = {7, 30}; @@ -623,7 +619,7 @@ test_compression(H5File& file) dataset->write ((void*)points, PredType::NATIVE_INT, space1, space1, xfer); dataset->read ((void*)check, PredType::NATIVE_INT, space1, space1, xfer); - /* Check that the values read are the same as the values written */ + // Check that the values read are the same as the values written for (i=0; i<hs_size[0]; i++) { for (j=0; j<hs_size[1]; j++) { if (points[hs_offset[0]+i][hs_offset[1]+j] != @@ -643,12 +639,12 @@ test_compression(H5File& file) delete dataset; dataset = NULL; - PASSED(); + PASSED(); #else - TESTING("deflate filter"); - SKIPPED(); - cerr << not_supported << endl; + TESTING("deflate filter"); + SKIPPED(); + cerr << not_supported << endl; #endif /*---------------------------------------------------------------------- @@ -656,12 +652,12 @@ test_compression(H5File& file) * to write and then read the dataset. *---------------------------------------------------------------------- */ - TESTING("compression (app-defined method)"); + TESTING("compression (app-defined method)"); if (H5Zregister (H5Z_BOGUS)<0) throw Exception("test_compression", "Failed in app-defined method"); if (H5Pset_filter (dscreatplist.getId(), H5Z_FILTER_BOGUS, 0, 0, NULL)<0) - throw Exception("test_compression", "Failed in app-defined method"); + throw Exception("test_compression", "Failed in app-defined method"); dscreatplist.setFilter (H5Z_FILTER_BOGUS, 0, 0, NULL); DataSpace space2 (2, size, NULL); @@ -670,7 +666,7 @@ test_compression(H5File& file) dataset->write ((void*)points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); - /* Check that the values read are the same as the values written */ + // Check that the values read are the same as the values written for (i = 0; i < size[0]; i++) for (j = 0; j < size[1]; j++) { @@ -691,8 +687,8 @@ test_compression(H5File& file) } // end try // catch all dataset, file, space, and plist exceptions - catch (Exception E) - { + catch (Exception E) + { cerr << " FAILED" << endl; cerr << " <<< " << E.getDetailMsg() << " >>>" << endl << endl; @@ -703,7 +699,7 @@ test_compression(H5File& file) delete [] tconv_buf; return -1; } -} +} // test_compression /*------------------------------------------------------------------------- * Function: test_multiopen @@ -758,7 +754,7 @@ test_multiopen (H5File& file) cur_size[0] = 20; dset1.extend (cur_size); - /* Get the size from the second handle */ + // Get the size from the second handle space = new DataSpace (dset2.getSpace()); hsize_t tmp_size[1]; @@ -777,16 +773,17 @@ test_multiopen (H5File& file) } // end try block // catch all dataset, file, space, and plist exceptions - catch (Exception E) + catch (Exception E) { cerr << " FAILED" << endl; cerr << " <<< " << E.getDetailMsg() << " >>>" << endl << endl; + // clean up and return with failure if (space != NULL) delete space; return -1; } -} +} // test_multiopen /*------------------------------------------------------------------------- @@ -799,7 +796,7 @@ test_multiopen (H5File& file) * Failure: -1 * * Programmer: Binh-Minh Ribler (using C version) - * February 17, 2001 + * February 17, 2001 * * Modifications: * @@ -852,8 +849,7 @@ test_types(H5File& file) for (i=0; i<sizeof buf; i++) buf[i] = (unsigned char)0xff ^ (unsigned char)i; - // Write data from buf using all default dataspaces and property - // list + // Write data from buf using all default dataspaces and property list dset->write (buf, type); // no failure in bitfield_1, close this dataset @@ -862,9 +858,9 @@ test_types(H5File& file) // catch exceptions thrown in try block of bitfield_1 catch (Exception E) - { + { cerr << " FAILED" << endl; - cerr << " <<< " << "bitfield_1: " << E.getFuncName() + cerr << " <<< " << "bitfield_1: " << E.getFuncName() << " - " << E.getDetailMsg() << " >>>" << endl << endl; if (dset != NULL) delete dset; @@ -882,7 +878,7 @@ test_types(H5File& file) for (i=0; i<sizeof(buf); i++) buf[i] = (unsigned char)0xff ^ (unsigned char)i; - // Write data from buf using all default dataspaces and property + // Write data from buf using all default dataspaces and property // list; if writing fails, deallocate dset and return. dset->write (buf, type); @@ -895,14 +891,14 @@ test_types(H5File& file) // catch exceptions thrown in try block of bitfield_2 catch (Exception E) { cerr << " FAILED" << endl; - cerr << " <<< " << "bitfield_2: " << E.getFuncName() + cerr << " <<< " << "bitfield_2: " << E.getFuncName() << " - " << E.getDetailMsg() << " >>>" << endl << endl; if (dset != NULL) - delete dset; + delete dset; throw E; // propagate the exception } - /* opaque_1 */ + /* opaque_1 */ DataType* optype = NULL; try { // opaque_1 block optype = new DataType(H5T_OPAQUE, 1); @@ -915,7 +911,7 @@ test_types(H5File& file) for (i=0; i<sizeof buf; i++) buf[i] = (unsigned char)0xff ^ (unsigned char)i; - // Write data from buf using all default dataspaces and property + // Write data from buf using all default dataspaces and property // list; if writing fails, deallocate dset and return. dset->write (buf, *optype); @@ -925,14 +921,14 @@ test_types(H5File& file) } // end try block of opaque_1 // catch exceptions thrown in try block of opaque_1 - catch (Exception E) { + catch (Exception E) { cerr << " FAILED" << endl; - cerr << " <<< " << "opaque_1: " << E.getFuncName() + cerr << " <<< " << "opaque_1: " << E.getFuncName() << " - " << E.getDetailMsg() << " >>>" << endl << endl; if (dset != NULL) - delete dset; + delete dset; if (optype != NULL) - delete optype; + delete optype; throw E; // propagate the exception } @@ -946,9 +942,9 @@ test_types(H5File& file) // Fill buffer for (i=0; i<sizeof(buf); i++) - buf[i] = (unsigned char)0xff ^ (unsigned char)i; + buf[i] = (unsigned char)0xff ^ (unsigned char)i; - // Write data from buf using all default dataspaces and property + // Write data from buf using all default dataspaces and property // list; if writing fails, deallocate dset and return. dset->write (buf, *optype); @@ -957,26 +953,27 @@ test_types(H5File& file) delete optype; optype = NULL; } //end try block of opaque_2 + // catch exceptions thrown in try block of opaque_2 catch (Exception E) { cerr << " FAILED" << endl; - cerr << " <<< " << "opaque_2: " << E.getFuncName() + cerr << " <<< " << "opaque_2: " << E.getFuncName() << " - " << E.getDetailMsg() << " >>>" << endl << endl; if (dset != NULL) - delete dset; + delete dset; if (optype != NULL) - delete optype; + delete optype; throw E; // propagate the exception } - PASSED(); - return 0; + PASSED(); + return 0; } // end top try block - catch (Exception E) + catch (Exception E) { - return -1; + return -1; } -} +} // test_types /*------------------------------------------------------------------------- * Function: main @@ -995,6 +992,8 @@ test_types(H5File& file) * - moved h5_cleanup to outside of try block because * dataset.h5 cannot be removed until "file" is out of * scope and dataset.h5 is closed. + * Feb 20, 05: + * - cleanup_dsets took care of the cleanup now. * *------------------------------------------------------------------------- */ @@ -1030,11 +1029,6 @@ main(void) nerrors += test_compression(file)<0 ?1:0; nerrors += test_multiopen (file)<0 ?1:0; nerrors += test_types(file)<0 ?1:0; - - // increment the ref count of this property list so that the - // property list id won't be closed when fapl goes out of scope. - // This is a bad hack, but I want to use existing routine h5_cleanup! - fapl.incRefCount(); } catch (Exception E) { @@ -1047,7 +1041,7 @@ main(void) // Print out dsets test results cerr << endl << endl; return(test_report(nerrors, string(" Dataset"))); -} +} // main /*------------------------------------------------------------------------- * Function: cleanup_dsets @@ -1066,5 +1060,5 @@ void cleanup_dsets(void) { remove(FILE1.c_str()); -} /* cleanup_dsets */ +} // cleanup_dsets diff --git a/c++/test/tfile.cpp b/c++/test/tfile.cpp index 794bda0..5314741 100644 --- a/c++/test/tfile.cpp +++ b/c++/test/tfile.cpp @@ -79,7 +79,7 @@ const string FILE4("tfile4.h5"); * cases. Since there are no operator<< for 'long long' * or int64 in VS C++ ostream, I casted the hsize_t values * passed to verify_val to 'long' as well. If problems - * arises later, this will have to be specificly handled + * arises later, this will have to be specificly handled * with a special routine. * *------------------------------------------------------------------------- @@ -87,14 +87,14 @@ const string FILE4("tfile4.h5"); static void test_file_create(void) { - /* Output message about test being performed */ + // Output message about test being performed MESSAGE(5, ("Testing File Creation I/O\n")); - /* Test create with various sequences of H5F_ACC_EXCL and */ - /* H5F_ACC_TRUNC flags */ + // Test create with various sequences of H5F_ACC_EXCL and + // H5F_ACC_TRUNC flags - /* Create with H5F_ACC_EXCL */ - /* First ensure the file does not exist */ + // Create with H5F_ACC_EXCL + // First ensure the file does not exist remove(FILE1.c_str()); // Setting this to NULL for cleaning up in failure situations @@ -109,21 +109,22 @@ test_file_create(void) H5File file2 (FILE1, H5F_ACC_TRUNC); // should throw E // Should FAIL but didn't, so throw an invalid action exception - throw InvalidActionException("H5File constructor", "Attempted to create an existing file."); + throw InvalidActionException("H5File constructor", "Attempted to create an existing file."); } - catch( FileIException E ) {} // do nothing, FAIL expected + catch( FileIException E ) // catch truncating existing file + {} // do nothing, FAIL expected // Close file1 delete file1; file1 = NULL; - // Try again with H5F_ACC_EXCL. This should fail because the file already - // exists from the previous steps. + // Try again with H5F_ACC_EXCL. This should fail because the file + // already exists from the previous steps. try { H5File file2(FILE1, H5F_ACC_EXCL); // should throw E // Should FAIL but didn't, so throw an invalid action exception - throw InvalidActionException("H5File constructor", "File already exists."); + throw InvalidActionException("H5File constructor", "File already exists."); } catch( FileIException E ) // catching creating existing file {} // do nothing, FAIL expected @@ -131,15 +132,13 @@ test_file_create(void) // Test create with H5F_ACC_TRUNC. This will truncate the existing file. file1 = new H5File (FILE1, H5F_ACC_TRUNC); - /* - * Try to truncate first file again. This should fail because file1 is the - * same file and is currently open. - */ + // Try to truncate first file again. This should fail because file1 + // is the same file and is currently open. try { H5File file2 (FILE1, H5F_ACC_TRUNC); // should throw E // Should FAIL but didn't, so throw an invalid action exception - throw InvalidActionException("H5File constructor", "H5F_ACC_TRUNC attempt on an opened file."); + throw InvalidActionException("H5File constructor", "H5F_ACC_TRUNC attempt on an opened file."); } catch( FileIException E ) // catching truncating opened file {} // do nothing, FAIL expected @@ -150,7 +149,7 @@ test_file_create(void) H5File file3 (FILE1, H5F_ACC_EXCL); // should throw E // Should FAIL but didn't, so throw an invalid action exception - throw InvalidActionException("H5File constructor", "H5F_ACC_EXCL attempt on an existing file."); + throw InvalidActionException("H5File constructor", "H5F_ACC_EXCL attempt on an existing file."); } catch( FileIException E ) // catching H5F_ACC_EXCL on existing file {} // do nothing, FAIL expected @@ -196,10 +195,10 @@ test_file_create(void) FileCreatPropList* tmpl1 = NULL; try { - /* Create a new file with a non-standard file-creation template */ + // Create a new file with a non-standard file-creation template tmpl1 = new FileCreatPropList; - /* Set the new file-creation parameters */ + // Set the new file-creation parameters tmpl1->setUserblock (F2_USERBLOCK_SIZE); tmpl1->setSizes( F2_OFFSET_SIZE, F2_LENGTH_SIZE ); tmpl1->setSymk( F2_SYM_INTERN_K, F2_SYM_LEAF_K ); @@ -208,48 +207,46 @@ test_file_create(void) // params. H5File file2( FILE2, H5F_ACC_TRUNC, *tmpl1 ); - /* Release file-creation template */ + // Release file-creation template delete tmpl1; tmpl1 = NULL; - /* Get the file-creation template */ + // Get the file-creation template tmpl1 = new FileCreatPropList (file2.getCreatePlist()); - /* Get the file-creation parameters */ + // Get the file-creation parameters hsize_t ublock = tmpl1->getUserblock(); verify_val((long)ublock, (long)F2_USERBLOCK_SIZE, "FileCreatPropList::getUserblock", __LINE__, __FILE__); - size_t parm1, parm2; /*file-creation parameters */ + size_t parm1, parm2; // file-creation parameters tmpl1->getSizes( parm1, parm2); verify_val(parm1, F2_OFFSET_SIZE, "FileCreatPropList::getSizes", __LINE__, __FILE__); verify_val(parm2, F2_LENGTH_SIZE, "FileCreatPropList::getSizes", __LINE__, __FILE__); - unsigned iparm1,iparm2; /*file-creation parameters */ + unsigned iparm1,iparm2; // file-creation parameters tmpl1->getSymk( iparm1, iparm2); verify_val(iparm1, F2_SYM_INTERN_K, "FileCreatPropList::getSymk", __LINE__, __FILE__); verify_val(iparm2, F2_SYM_LEAF_K, "FileCreatPropList::getSymk", __LINE__, __FILE__); - /* Clone the file-creation template */ + // Clone the file-creation template FileCreatPropList tmpl2; tmpl2.copy (*tmpl1); - /* Dynamically release file-creation template */ + // Release file-creation template delete tmpl1; tmpl1 = NULL; - /* Set the new file-creation parameter */ + // Set the new file-creation parameter tmpl2.setUserblock( F3_USERBLOCK_SIZE ); - /* - * Try to create second file, with non-standard file-creation template - * params - */ + // Try to create second file, with non-standard file-creation template + // params H5File file3( FILE3, H5F_ACC_TRUNC, tmpl2 ); - /* Get the file-creation template */ + // Get the file-creation template tmpl1 = new FileCreatPropList (file3.getCreatePlist()); - /* Get the file-creation parameters */ + // Get the file-creation parameters ublock = tmpl1->getUserblock(); verify_val((long)ublock, (long)F3_USERBLOCK_SIZE, "FileCreatPropList::getUserblock", __LINE__, __FILE__); @@ -261,17 +258,17 @@ test_file_create(void) verify_val(iparm1, F3_SYM_INTERN_K, "FileCreatPropList::getSymk", __LINE__, __FILE__); verify_val(iparm2, F3_SYM_LEAF_K, "FileCreatPropList::getSymk", __LINE__, __FILE__); - /* Dynamically release file-creation template */ + // Release file-creation template delete tmpl1; } // catch all exceptions catch (Exception E) { issue_fail_msg(E.getCFuncName(), __LINE__, __FILE__, E.getCDetailMsg()); - if (tmpl1 != NULL) // clean up + if (tmpl1 != NULL) // clean up delete tmpl1; } -} /* test_file_create() */ +} // test_file_create() /*------------------------------------------------------------------------- @@ -289,7 +286,7 @@ test_file_create(void) * cases. Since there are no operator<< for 'long long' * or int64 in VS C++ ostream, I casted the hsize_t values * passed to verify_val to 'long' as well. If problems - * arises later, this will have to be specificly handled + * arises later, this will have to be specificly handled * with a special routine. * *------------------------------------------------------------------------- @@ -297,27 +294,27 @@ test_file_create(void) static void test_file_open(void) { - /* Output message about test being performed */ + // Output message about test being performed MESSAGE(5, ("Testing File Opening I/O\n")); try { - /* Open first file */ + // Open first file H5File file1 (FILE2, H5F_ACC_RDWR ); - /* Get the file-creation template */ + // Get the file-creation template FileCreatPropList tmpl1 = file1.getCreatePlist(); - /* Get the file-creation parameters */ + // Get the file-creation parameters hsize_t ublock = tmpl1.getUserblock(); verify_val((long)ublock, (long)F2_USERBLOCK_SIZE, "FileCreatPropList::getUserblock", __LINE__, __FILE__); - size_t parm1, parm2; /*file-creation parameters */ + size_t parm1, parm2; // file-creation parameters tmpl1.getSizes( parm1, parm2); verify_val(parm1, F2_OFFSET_SIZE, "FileCreatPropList::getSizes", __LINE__, __FILE__); verify_val(parm2, F2_LENGTH_SIZE, "FileCreatPropList::getSizes", __LINE__, __FILE__); - unsigned iparm1,iparm2; /*file-creation parameters */ + unsigned iparm1,iparm2; // file-creation parameters tmpl1.getSymk( iparm1, iparm2); verify_val(iparm1, F2_SYM_INTERN_K, "FileCreatPropList::getSymk", __LINE__, __FILE__); verify_val(iparm2, F2_SYM_LEAF_K, "FileCreatPropList::getSymk", __LINE__, __FILE__); @@ -326,7 +323,7 @@ test_file_open(void) catch( Exception E ) { issue_fail_msg(E.getCFuncName(), __LINE__, __FILE__, E.getCDetailMsg()); } -} /* test_file_open() */ +} // test_file_open() /*------------------------------------------------------------------------- @@ -346,7 +343,7 @@ test_file_open(void) static void test_file_size(void) { - /* Output message about test being performed */ + // Output message about test being performed MESSAGE(5, ("Testing File Size\n")); hid_t fapl_id; @@ -360,7 +357,7 @@ test_file_size(void) // Set to sec2 driver. Do we want to test other file drivers? // They're not tested in C++. // File drivers seem not implemented. - //fapl.setSec2(); + // fapl.setSec2(); // Create a file H5File file4( FILE4, H5F_ACC_TRUNC, FileCreatPropList::DEFAULT, fapl); @@ -380,7 +377,7 @@ test_file_size(void) // use C test utility routine to close property list. H5Pclose(fapl_id); -} /* test_file_size() */ +} // test_file_size() /*------------------------------------------------------------------------- @@ -405,7 +402,7 @@ const string DSETNAME ("dataset"); const string ATTRNAME ("attribute"); const string DTYPENAME ("compound"); -/* Compound datatype */ +// Compound datatype typedef struct s1_t { unsigned int a; float b; @@ -414,7 +411,7 @@ typedef struct s1_t { static void test_file_name() { - /* Output message about test being performed */ + // Output message about test being performed MESSAGE(5, ("Testing File Name\n")); string file_name; @@ -426,42 +423,42 @@ test_file_name() file_name = file4.getFileName(); verify_val(file_name, FILE4, "H5File::getFileName", __LINE__, __FILE__); - /* Create a group in the root group */ + // Create a group in the root group Group group(file4.createGroup(GROUPNAME, 0)); - /* Get and verify file name */ + // Get and verify file name file_name = group.getFileName(); verify_val(file_name, FILE4, "Group::getFileName", __LINE__, __FILE__); - /* Create the data space */ + // Create the data space hsize_t dims[RANK] = {NX, NY}; DataSpace space(RANK, dims); - /* Create a new dataset */ + // Create a new dataset DataSet dataset(file4.createDataSet (DSETNAME, PredType::NATIVE_INT, space)); - /* Get and verify file name */ + // Get and verify file name file_name = dataset.getFileName(); verify_val(file_name, FILE4, "DataSet::getFileName", __LINE__, __FILE__); - /* Create an attribute for the dataset */ + // Create an attribute for the dataset Attribute attr(dataset.createAttribute(ATTRNAME, PredType::NATIVE_INT, space)); - /* Get and verify file name */ + // Get and verify file name file_name = attr.getFileName(); verify_val(file_name, FILE4, "Attribute::getFileName", __LINE__, __FILE__); - /* Create a compound datatype */ + // Create a compound datatype CompType comp_type (sizeof(s1_t)); - /* Insert fields */ + // Insert fields comp_type.insertMember("a", HOFFSET(s1_t, a), PredType::NATIVE_INT); comp_type.insertMember("b", HOFFSET(s1_t, b), PredType::NATIVE_FLOAT); - /* Save it on file */ + // Save it on file comp_type.commit(file4, DTYPENAME); - /* Get and verify file name */ + // Get and verify file name comp_type.getFileName(); verify_val(file_name, FILE4, "CompType::getFileName", __LINE__, __FILE__); } // end of try block @@ -470,7 +467,7 @@ test_file_name() issue_fail_msg(E.getCFuncName(), __LINE__, __FILE__, E.getCDetailMsg()); } -} /* test_file_name() */ +} // test_file_name() /*------------------------------------------------------------------------- @@ -490,14 +487,14 @@ test_file_name() void test_file(void) { - /* Output message about test being performed */ + // Output message about test being performed MESSAGE(5, ("Testing File I/O operations\n")); - test_file_create(); /* Test file creation (also creation templates) */ - test_file_open(); /* Test file opening */ - test_file_size(); /* Test file size */ - test_file_name(); /* Test getting file's name */ -} /* test_file() */ + test_file_create(); // Test file creation (also creation templates) + test_file_open(); // Test file opening + test_file_size(); // Test file size + test_file_name(); // Test getting file's name +} // test_file() /*------------------------------------------------------------------------- @@ -520,4 +517,4 @@ cleanup_file(void) remove(FILE2.c_str()); remove(FILE3.c_str()); remove(FILE4.c_str()); -} /* cleanup_file */ +} // cleanup_file diff --git a/c++/test/th5s.cpp b/c++/test/th5s.cpp index cccb674..5c53963 100644 --- a/c++/test/th5s.cpp +++ b/c++/test/th5s.cpp @@ -113,32 +113,29 @@ test_h5s_basic(void) hsize_t dims3[H5S_MAX_RANK+1]; hsize_t tmax[4]; - /* Output message about test being performed */ + // Output message about test being performed MESSAGE(5, ("Testing Dataspace Manipulation\n")); try { // beginning of first try block - /* Create file - removed this since the following operations don't - need the file to be opened */ - // Create simple dataspace sid1 DataSpace sid1 (SPACE1_RANK, dims1 ); // Get simple extent npoints of the dataspace sid1 and verify it - hssize_t n; /* Number of dataspace elements */ + hssize_t n; // Number of dataspace elements n = sid1.getSimpleExtentNpoints(); verify_val((long)n, (long)(SPACE1_DIM1 * SPACE1_DIM2 * SPACE1_DIM3), "DataSpace::getSimpleExtentNpoints", __LINE__, __FILE__); // Get the logical rank of dataspace sid1 and verify it - int rank; /* Logical rank of dataspace */ + int rank; // Logical rank of dataspace rank = sid1.getSimpleExtentNdims(); verify_val(rank, SPACE1_RANK, "DataSpace::getSimpleExtentNdims", __LINE__, __FILE__); // Retrieves dimension size of dataspace sid1 and verify it - int ndims; /* Number of dimensions */ - hsize_t tdims[4]; /* Dimension array to test with */ + int ndims; // Number of dimensions + hsize_t tdims[4]; // Dimension array to test with ndims = sid1.getSimpleExtentDims( tdims ); verify_val(HDmemcmp(tdims, dims1, SPACE1_RANK * sizeof(unsigned)), 0, "DataSpace::getSimpleExtentDims", __LINE__, __FILE__); @@ -240,7 +237,7 @@ test_h5s_basic(void) { issue_fail_msg(E.getCFuncName(), __LINE__, __FILE__, E.getCDetailMsg()); } -} /* test_h5s_basic() */ +} // test_h5s_basic() /*------------------------------------------------------------------------- * @@ -265,8 +262,7 @@ test_h5s_basic(void) static void test_h5s_scalar_write(void) { - - /* Output message about test being performed */ + // Output message about test being performed MESSAGE(5, ("Testing Scalar Dataspace Writing\n")); try @@ -274,30 +270,30 @@ test_h5s_scalar_write(void) // Create file H5File fid1(DATAFILE, H5F_ACC_TRUNC); - /* Create scalar dataspace */ + // Create scalar dataspace DataSpace sid1(SPACE3_RANK, NULL); //n = H5Sget_simple_extent_npoints(sid1); - hssize_t n; /* Number of dataspace elements */ + hssize_t n; // Number of dataspace elements n = sid1.getSimpleExtentNpoints(); verify_val((long)n, 1, "DataSpace::getSimpleExtentNpoints", __LINE__, __FILE__); - int rank; /* Logical rank of dataspace */ + int rank; // Logical rank of dataspace rank = sid1.getSimpleExtentNdims(); verify_val(rank, SPACE3_RANK, "DataSpace::getSimpleExtentNdims", __LINE__, __FILE__); // Retrieves dimension size of dataspace sid1 and verify it - int ndims; /* Number of dimensions */ - hsize_t tdims[4]; /* Dimension array to test with */ + int ndims; // Number of dimensions + hsize_t tdims[4]; // Dimension array to test with ndims = sid1.getSimpleExtentDims( tdims ); verify_val(ndims, 0, "DataSpace::getSimpleExtentDims", __LINE__, __FILE__); - /* Verify extent type */ - H5S_class_t ext_type; /* Extent type */ + // Verify extent type + H5S_class_t ext_type; // Extent type ext_type = sid1.getSimpleExtentType(); verify_val(ext_type, H5S_SCALAR, "DataSpace::getSimpleExtentType", __LINE__, __FILE__); - /* Create a dataset */ + // Create a dataset DataSet dataset = fid1.createDataSet("Dataset1", PredType::NATIVE_UINT,sid1); dataset.write(&space3_data, PredType::NATIVE_UINT); @@ -306,7 +302,7 @@ test_h5s_scalar_write(void) { issue_fail_msg(E.getCFuncName(), __LINE__, __FILE__, E.getCDetailMsg()); } -} /* test_h5s_scalar_write() */ +} // test_h5s_scalar_write() /*------------------------------------------------------------------------- * @@ -331,17 +327,17 @@ test_h5s_scalar_write(void) static void test_h5s_scalar_read(void) { - hsize_t tdims[4]; /* Dimension array to test with */ + hsize_t tdims[4]; // Dimension array to test with - /* Output message about test being performed */ + // Output message about test being performed MESSAGE(5, ("Testing Scalar Dataspace Reading\n")); try { - /* Create file */ + // Create file H5File fid1(DATAFILE, H5F_ACC_RDWR); - /* Create a dataset */ + // Create a dataset DataSet dataset = fid1.openDataSet("Dataset1"); DataSpace sid1 = dataset.getSpace(); @@ -357,7 +353,7 @@ test_h5s_scalar_read(void) ndims = sid1.getSimpleExtentDims(tdims); verify_val(ndims, 0, "DataSpace::getSimpleExtentDims", __LINE__, __FILE__); - unsigned rdata; /* Scalar data read in */ + unsigned rdata; // Scalar data read in dataset.read(&rdata, PredType::NATIVE_UINT); verify_val(rdata, space3_data, "DataSet::read", __LINE__, __FILE__); } // end of try block @@ -367,7 +363,7 @@ test_h5s_scalar_read(void) issue_fail_msg(E.getCFuncName(), __LINE__, __FILE__, E.getCDetailMsg()); } -} /* test_h5s_scalar_read() */ +} // test_h5s_scalar_read() /*------------------------------------------------------------------------- * @@ -392,8 +388,7 @@ test_h5s_scalar_read(void) static void test_h5s_null(void) { - - /* Output message about test being performed */ + // Output message about test being performed MESSAGE(5, ("Testing Null Dataspace Writing\n")); try @@ -401,11 +396,10 @@ test_h5s_null(void) // Create file H5File fid1(DATAFILE, H5F_ACC_TRUNC); - /* Create scalar dataspace */ + // Create scalar dataspace DataSpace sid1(H5S_NULL); - //n = H5Sget_simple_extent_npoints(sid1); - hssize_t n; /* Number of dataspace elements */ + hssize_t n; // Number of dataspace elements n = sid1.getSimpleExtentNpoints(); verify_val((long)n, 0, "DataSpace::getSimpleExtentNpoints", __LINE__, __FILE__); @@ -423,7 +417,7 @@ test_h5s_null(void) { issue_fail_msg(E.getCFuncName(), __LINE__, __FILE__, E.getCDetailMsg()); } -} /* test_h5s_null() */ +} // test_h5s_null() /*------------------------------------------------------------------------- * @@ -449,16 +443,15 @@ test_h5s_null(void) static void test_h5s_compound_scalar_write(void) { - - /* Output message about test being performed */ + // Output message about test being performed MESSAGE(5, ("Testing Compound Dataspace Writing\n")); try { - /* Create file */ + // Create file H5File fid1(DATAFILE, H5F_ACC_TRUNC); - /* Create the compound datatype. */ + // Create the compound datatype. CompType tid1(sizeof(struct space4_struct)); space4_field1_off=HOFFSET(struct space4_struct, c1); tid1.insertMember(SPACE4_FIELDNAME1, space4_field1_off, @@ -473,7 +466,7 @@ test_h5s_compound_scalar_write(void) tid1.insertMember(SPACE4_FIELDNAME4, space4_field4_off, PredType::NATIVE_SCHAR); - /* Create scalar dataspace */ + // Create scalar dataspace DataSpace sid1(SPACE3_RANK, NULL); // Get the number of dataspace elements @@ -484,11 +477,11 @@ test_h5s_compound_scalar_write(void) int ndims = sid1.getSimpleExtentNdims(); verify_val(ndims, SPACE3_RANK, "DataSpace::getSimpleExtentNdims", __LINE__, __FILE__); - hsize_t tdims[4]; /* Dimension array to test with */ + hsize_t tdims[4]; // Dimension array to test with ndims = sid1.getSimpleExtentDims(tdims); verify_val(ndims, 0, "DataSpace::getSimpleExtentDims", __LINE__, __FILE__); - /* Create a dataset */ + // Create a dataset DataSet dataset = fid1.createDataSet("Dataset1", tid1, sid1); dataset.write(&space4_data, tid1); @@ -498,8 +491,7 @@ test_h5s_compound_scalar_write(void) // all the exceptions caused by negative returned values by C APIs issue_fail_msg(E.getCFuncName(), __LINE__, __FILE__, E.getCDetailMsg()); } - -} /* test_h5s_compound_scalar_write() */ +} // test_h5s_compound_scalar_write() /*------------------------------------------------------------------------- * @@ -525,16 +517,16 @@ test_h5s_compound_scalar_write(void) static void test_h5s_compound_scalar_read(void) { - hsize_t tdims[4]; /* Dimension array to test with */ + hsize_t tdims[4]; // Dimension array to test with - /* Output message about test being performed */ + // Output message about test being performed MESSAGE(5, ("Testing Compound Dataspace Reading\n")); try { - /* Create file */ + // Create file H5File fid1(DATAFILE, H5F_ACC_RDWR); - /* Create a dataset */ + // Create a dataset DataSet dataset = fid1.openDataSet("Dataset1"); DataSpace sid1 = dataset.getSpace(); @@ -553,7 +545,7 @@ test_h5s_compound_scalar_read(void) // Get the datatype of this dataset. CompType type(dataset); - struct space4_struct rdata; /* Scalar data read in */ + struct space4_struct rdata; // Scalar data read in dataset.read(&rdata, type); // Verify read data @@ -567,14 +559,14 @@ test_h5s_compound_scalar_read(void) << space4_data.f << ", read_data4.f=" << rdata.f << endl; TestErrPrintf("scalar data different: space4_data.c1=%c, read_data4.c1=%c\n", space4_data.c1, rdata.c2); - } /* end if */ + } // end if } // end of try block catch (Exception E) { // all the exceptions caused by negative returned values by C APIs issue_fail_msg(E.getCFuncName(), __LINE__, __FILE__, E.getCDetailMsg()); } -} /* test_h5s_compound_scalar_read() */ +} // test_h5s_compound_scalar_read() /*------------------------------------------------------------------------- * @@ -593,16 +585,16 @@ test_h5s_compound_scalar_read(void) void test_h5s(void) { - /* Output message about test being performed */ + // Output message about test being performed MESSAGE(5, ("Testing Dataspaces\n")); - test_h5s_basic(); /* Test basic H5S code */ - test_h5s_scalar_write(); /* Test scalar H5S writing code */ - test_h5s_scalar_read(); /* Test scalar H5S reading code */ - test_h5s_null(); /* Test null H5S code */ - test_h5s_compound_scalar_write(); /* Test compound datatype scalar H5S writing code */ - test_h5s_compound_scalar_read(); /* Test compound datatype scalar H5S reading code */ -} /* test_h5s() */ + test_h5s_basic(); // Test basic H5S code + test_h5s_scalar_write(); // Test scalar H5S writing code + test_h5s_scalar_read(); // Test scalar H5S reading code + test_h5s_null(); // Test null H5S code + test_h5s_compound_scalar_write(); // Test compound datatype scalar H5S writing code + test_h5s_compound_scalar_read(); // Test compound datatype scalar H5S reading code +} // test_h5s() /*------------------------------------------------------------------------- @@ -623,5 +615,5 @@ void cleanup_h5s(void) { remove(DATAFILE.c_str()); -} +} // cleanup_h5s |