summaryrefslogtreecommitdiffstats
path: root/test/unlink.c
diff options
context:
space:
mode:
authorRaymond Lu <songyulu@hdfgroup.org>2007-02-12 16:15:13 (GMT)
committerRaymond Lu <songyulu@hdfgroup.org>2007-02-12 16:15:13 (GMT)
commitaaa8bba35443dd426d281f757eac3aae2e01b0e0 (patch)
tree5abe146427beca3daf7e7ba80d6339d4b39b8941 /test/unlink.c
parent146dc710d8cb4b52e6854b5ef974ad2dc8c36bba (diff)
downloadhdf5-aaa8bba35443dd426d281f757eac3aae2e01b0e0.zip
hdf5-aaa8bba35443dd426d281f757eac3aae2e01b0e0.tar.gz
hdf5-aaa8bba35443dd426d281f757eac3aae2e01b0e0.tar.bz2
[svn-r13278] The test case "complex group hierarchy" takes too long on copper when running
with direct driver. Skip this test if direct driver is used and the express test mode is set on.
Diffstat (limited to 'test/unlink.c')
-rw-r--r--test/unlink.c18
1 files changed, 18 insertions, 0 deletions
diff --git a/test/unlink.c b/test/unlink.c
index 16624b3..6b39ab0 100644
--- a/test/unlink.c
+++ b/test/unlink.c
@@ -522,7 +522,16 @@ test_filespace(hid_t fapl)
size_t rdcc_nelmts;
size_t rdcc_nbytes;
double rdcc_w0;
+ const char *envval = NULL;
+ int ExpressMode;
+ /* Don't run some tests for some drivers */
+ envval = HDgetenv("HDF5_DRIVER");
+ if(envval == NULL)
+ envval = "nomatch";
+
+ /* See if some tests can be skipped */
+ ExpressMode = GetTestExpress();
puts("Testing file space gets reused:");
@@ -957,6 +966,14 @@ test_filespace(hid_t fapl)
/* Create complex group hiearchy, remove it & verify file size */
TESTING(" complex group hierarchy");
+ if (ExpressMode > 1 && !HDstrcmp(envval, "direct")) {
+ /* This test case with Direct driver has a poor performance on
+ * NCSA copper, though it works. Skip it for now and worry
+ * about the performance later.
+ */
+ SKIPPED();
+ } else {
+
/* Create file */
if ((file=H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) TEST_ERROR
@@ -1034,6 +1051,7 @@ test_filespace(hid_t fapl)
if(file_size!=empty_size) TEST_ERROR
PASSED();
+ }
/* Create dataset and duplicate dataset, remove original & verify file size */
TESTING(" duplicate dataset");