summaryrefslogtreecommitdiffstats
path: root/test
diff options
context:
space:
mode:
authorQuincey Koziol <koziol@hdfgroup.org>2003-04-14 11:22:10 (GMT)
committerQuincey Koziol <koziol@hdfgroup.org>2003-04-14 11:22:10 (GMT)
commitba28043239b3646a2c05e782cdc2b716e3700307 (patch)
tree619610e4aef250859ecd6b97404d0ba4674e7afd /test
parenteb3e5b8144979c653d12c56790e4efb3cafdd11b (diff)
downloadhdf5-ba28043239b3646a2c05e782cdc2b716e3700307.zip
hdf5-ba28043239b3646a2c05e782cdc2b716e3700307.tar.gz
hdf5-ba28043239b3646a2c05e782cdc2b716e3700307.tar.bz2
[svn-r6665] Purpose:
New feature. Description: Added tests to verify correct operation of re-using file space. Platforms tested: FreeBSD 4.8 (sleipnir) w/C++ Linux 2.4 (burrwhite) w/FORTRAN Solaris 2.7 (arabica) w/FORTRAN IRIX64 6.5 (modi4) w/parallel & FORTRAN (h5committest not run due to my ongoing difficulties with C++ on burrwhite).
Diffstat (limited to 'test')
-rw-r--r--test/unlink.c653
1 files changed, 647 insertions, 6 deletions
diff --git a/test/unlink.c b/test/unlink.c
index 72b64a13..b6939dc 100644
--- a/test/unlink.c
+++ b/test/unlink.c
@@ -18,6 +18,8 @@
*
* Purpose: Test H5Gunlink().
*/
+
+#include <time.h>
#include "h5test.h"
const char *FILENAME[] = {
@@ -25,14 +27,64 @@ const char *FILENAME[] = {
"new_move_a",
"new_move_b",
"lunlink",
+ "filespace",
NULL
};
#define THE_OBJECT "/foo"
-/* Macros for test_create_unlink() */
-#define GROUPNAME "Group"
+/* Macros for test_create_unlink() & test_filespace */
+#define GROUPNAME "group"
+#define GROUP2NAME "group2"
#define NGROUPS 1000
+#define DATASETNAME "dataset"
+#define DATASET2NAME "dataset2"
+#define ATTRNAME "attribute"
+#define TYPENAME "datatype"
+#define FILESPACE_NDIMS 3
+#define FILESPACE_DIM0 20
+#define FILESPACE_DIM1 20
+#define FILESPACE_DIM2 20
+#define FILESPACE_CHUNK0 10
+#define FILESPACE_CHUNK1 10
+#define FILESPACE_CHUNK2 10
+#define FILESPACE_DEFLATE_LEVEL 6
+#define FILESPACE_REWRITE 10
+#define FILESPACE_NATTR 100
+#define FILESPACE_ATTR_NDIMS 2
+#define FILESPACE_ATTR_DIM0 5
+#define FILESPACE_ATTR_DIM1 5
+#define FILESPACE_TOP_GROUPS 10
+#define FILESPACE_NESTED_GROUPS 50
+#define FILESPACE_NDATASETS 50
+
+
+/*-------------------------------------------------------------------------
+ * Function: verify_file_size
+ *
+ * Purpose: Verify that a file is a particular size.
+ *
+ * Return: Success: 1
+ * Failure: 0
+ *
+ * Programmer: Quincey Koziol
+ * Saturday, March 22, 2003
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+static off_t
+get_file_size(const char *filename)
+{
+ h5_stat_t sb;
+
+ /* Get the file's statistics */
+ if (HDstat(filename, &sb)>=0)
+ return(sb.st_size);
+
+ return(0);
+} /* end get_file_size() */
/*-------------------------------------------------------------------------
@@ -453,11 +505,597 @@ check_new_move(void)
return 0;
error:
- return -1;
+ return 1;
}
/*-------------------------------------------------------------------------
+ * Function: test_filespace
+ *
+ * Purpose: Test proper reuse of space in the file when objects are unlinked
+ *
+ * Return: Success: 0
+ * Failure: number of errors
+ *
+ * Programmer: Quincey Koziol
+ * Saturday, March 22, 2003
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+static int
+test_filespace(void)
+{
+ hid_t fapl; /* File access property list */
+ hid_t fapl_nocache; /* File access property list with raw data cache turned off */
+ hid_t contig_dcpl; /* Dataset creation property list for contiguous dataset */
+ hid_t early_chunk_dcpl; /* Dataset creation property list for chunked dataset & early allocation */
+ hid_t late_chunk_dcpl; /* Dataset creation property list for chunked dataset & late allocation */
+ hid_t comp_dcpl; /* Dataset creation property list for compressed, chunked dataset */
+ hid_t compact_dcpl; /* Dataset creation property list for compact dataset */
+ hid_t file; /* File ID */
+ hid_t group, group2; /* Group IDs */
+ hid_t dataset, dataset2; /* Dataset IDs */
+ hid_t space; /* Dataspace ID */
+ hid_t type; /* Datatype ID */
+ hid_t attr_space; /* Dataspace ID for attributes */
+ hid_t attr; /* Attribute ID */
+ char filename[1024]; /* Name of file to create */
+ char objname[128]; /* Name of object to create */
+ hsize_t dims[FILESPACE_NDIMS]= {FILESPACE_DIM0, FILESPACE_DIM1, FILESPACE_DIM2}; /* Dataset dimensions */
+ hsize_t chunk_dims[FILESPACE_NDIMS]= {FILESPACE_CHUNK0, FILESPACE_CHUNK1, FILESPACE_CHUNK2}; /* Chunk dimensions */
+ hsize_t attr_dims[FILESPACE_ATTR_NDIMS]= {FILESPACE_ATTR_DIM0, FILESPACE_ATTR_DIM1}; /* Attribute dimensions */
+ int *data; /* Pointer to dataset buffer */
+ int *tmp_data; /* Temporary pointer to dataset buffer */
+ off_t empty_size; /* Size of an empty file */
+ off_t file_size; /* Size of each file created */
+ unsigned u,v,w; /* Local index variables */
+
+ /* Metadata cache parameters */
+ int mdc_nelmts;
+ size_t rdcc_nelmts;
+ size_t rdcc_nbytes;
+ double rdcc_w0;
+
+
+ puts("Testing file space gets reused");
+
+ /* Open file */
+ fapl = h5_fileaccess();
+ h5_fixname(FILENAME[4], fapl, filename, sizeof filename);
+
+/* Create FAPL with raw data cache disabled */
+ /* Create file access property list with raw data cache disabled */
+ if ((fapl_nocache=H5Pcopy(fapl))<0) TEST_ERROR;
+
+ /* Get the cache settings */
+ if(H5Pget_cache(fapl_nocache,&mdc_nelmts,&rdcc_nelmts,&rdcc_nbytes,&rdcc_w0)<0) TEST_ERROR;
+
+ /* Disable the raw data cache */
+ rdcc_nelmts=0;
+ rdcc_nbytes=0;
+ if(H5Pset_cache(fapl_nocache,mdc_nelmts,rdcc_nelmts,rdcc_nbytes,rdcc_w0)<0) TEST_ERROR;
+
+/* Create empty file for size comparisons later */
+
+ /* Create file */
+ if ((file=H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl))<0) TEST_ERROR;
+
+ /* Close file */
+ if(H5Fclose(file)<0) TEST_ERROR;
+
+ /* Get the size of an empty file */
+ if((empty_size=get_file_size(filename))==0) TEST_ERROR;
+
+/* Create common objects for datasets */
+
+ /* Create dataset creation property list for contigous storage */
+ if ((contig_dcpl=H5Pcreate(H5P_DATASET_CREATE))<0) TEST_ERROR;
+
+ /* Make certain that space is allocated early */
+ if(H5Pset_alloc_time(contig_dcpl, H5D_ALLOC_TIME_EARLY) < 0) TEST_ERROR;
+
+ /* Create dataset creation property list for chunked storage & early allocation */
+ if ((early_chunk_dcpl=H5Pcopy(contig_dcpl))<0) TEST_ERROR;
+
+ /* Set chunk dimensions */
+ if(H5Pset_chunk(early_chunk_dcpl, FILESPACE_NDIMS, chunk_dims) < 0) TEST_ERROR;
+
+ /* Create dataset creation property list for chunked storage & late allocation */
+ if ((late_chunk_dcpl=H5Pcreate(H5P_DATASET_CREATE))<0) TEST_ERROR;
+
+ /* Set chunk dimensions */
+ if(H5Pset_chunk(late_chunk_dcpl, FILESPACE_NDIMS, chunk_dims) < 0) TEST_ERROR;
+
+ /* Create dataset creation property list for compressed, chunked storage & early allocation */
+ if ((comp_dcpl=H5Pcopy(early_chunk_dcpl))<0) TEST_ERROR;
+
+ /* Make certain that space is allocated early */
+ if(H5Pset_deflate(comp_dcpl, FILESPACE_DEFLATE_LEVEL) < 0) TEST_ERROR;
+
+ /* Create dataset creation property list for compact storage */
+ if ((compact_dcpl=H5Pcreate(H5P_DATASET_CREATE))<0) TEST_ERROR;
+
+ /* Set to compact storage */
+ if(H5Pset_layout(compact_dcpl, H5D_COMPACT) < 0) TEST_ERROR;
+
+ /* Create dataspace for datasets */
+ if((space = H5Screate_simple(FILESPACE_NDIMS, dims, NULL))<0) TEST_ERROR;
+
+ /* Create buffer for writing dataset */
+ if((data = HDmalloc(sizeof(int)*FILESPACE_DIM0*FILESPACE_DIM1*FILESPACE_DIM2))==NULL) TEST_ERROR;
+
+/* Create single dataset (with contiguous storage & late allocation), remove it & verify file size */
+ TESTING(" contiguous dataset with late allocation");
+
+ /* Create file */
+ if ((file=H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl))<0) TEST_ERROR;
+
+ /* Create a single dataset to remove */
+ if((dataset = H5Dcreate (file, DATASETNAME, H5T_NATIVE_INT, space, H5P_DEFAULT))<0) TEST_ERROR;
+ if(H5Dclose (dataset)<0) TEST_ERROR;
+
+ /* Remove the dataset */
+ if(H5Gunlink (file, DATASETNAME)<0) TEST_ERROR;
+
+ /* Close file */
+ if(H5Fclose(file)<0) TEST_ERROR;
+
+ /* Get the size of the file */
+ if((file_size=get_file_size(filename))==0) TEST_ERROR;
+
+ /* Verify the file is correct size */
+ if(file_size!=empty_size) TEST_ERROR;
+
+ PASSED();
+
+/* Create single dataset (with contiguous storage & early allocation), remove it & verify file size */
+ TESTING(" contiguous dataset with early allocation");
+
+ /* Create file */
+ if ((file=H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl))<0) TEST_ERROR;
+
+ /* Create a single dataset to remove */
+ if((dataset = H5Dcreate (file, DATASETNAME, H5T_NATIVE_INT, space, contig_dcpl))<0) TEST_ERROR;
+ if(H5Dclose (dataset)<0) TEST_ERROR;
+
+ /* Remove the dataset */
+ if(H5Gunlink (file, DATASETNAME)<0) TEST_ERROR;
+
+ /* Close file */
+ if(H5Fclose(file)<0) TEST_ERROR;
+
+ /* Get the size of the file */
+ if((file_size=get_file_size(filename))==0) TEST_ERROR;
+
+ /* Verify the file is correct size */
+ if(file_size!=empty_size) TEST_ERROR;
+
+ PASSED();
+
+/* Create single dataset (with chunked storage & late allocation), remove it & verify file size */
+ TESTING(" chunked dataset with late allocation");
+
+ /* Create file */
+ if ((file=H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl))<0) TEST_ERROR;
+
+ /* Create a single dataset to remove */
+ if((dataset = H5Dcreate (file, DATASETNAME, H5T_NATIVE_INT, space, late_chunk_dcpl))<0) TEST_ERROR;
+ if(H5Dclose (dataset)<0) TEST_ERROR;
+
+ /* Remove the dataset */
+ if(H5Gunlink (file, DATASETNAME)<0) TEST_ERROR;
+
+ /* Close file */
+ if(H5Fclose(file)<0) TEST_ERROR;
+
+ /* Get the size of the file */
+ if((file_size=get_file_size(filename))==0) TEST_ERROR;
+
+ /* Verify the file is correct size */
+ if(file_size!=empty_size) TEST_ERROR;
+
+ PASSED();
+
+/* Create single dataset (with chunked storage & early allocation), remove it & verify file size */
+ TESTING(" chunked dataset with early allocation");
+
+ /* Create file */
+ if ((file=H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl))<0) TEST_ERROR;
+
+ /* Create a single dataset to remove */
+ if((dataset = H5Dcreate (file, DATASETNAME, H5T_NATIVE_INT, space, early_chunk_dcpl))<0) TEST_ERROR;
+ if(H5Dclose (dataset)<0) TEST_ERROR;
+
+ /* Remove the dataset */
+ if(H5Gunlink (file, DATASETNAME)<0) TEST_ERROR;
+
+ /* Close file */
+ if(H5Fclose(file)<0) TEST_ERROR;
+
+ /* Get the size of the file */
+ if((file_size=get_file_size(filename))==0) TEST_ERROR;
+
+ /* Verify the file is correct size */
+ if(file_size!=empty_size) TEST_ERROR;
+
+ PASSED();
+
+/* Create single dataset (with compressed storage & early allocation), remove it & verify file size */
+ TESTING(" compressed, chunked dataset");
+
+ /* Create file */
+ if ((file=H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl))<0) TEST_ERROR;
+
+ /* Create a single dataset to remove */
+ if((dataset = H5Dcreate (file, DATASETNAME, H5T_NATIVE_INT, space, comp_dcpl))<0) TEST_ERROR;
+ if(H5Dclose (dataset)<0) TEST_ERROR;
+
+ /* Remove the dataset */
+ if(H5Gunlink (file, DATASETNAME)<0) TEST_ERROR;
+
+ /* Close file */
+ if(H5Fclose(file)<0) TEST_ERROR;
+
+ /* Get the size of the file */
+ if((file_size=get_file_size(filename))==0) TEST_ERROR;
+
+ /* Verify the file is correct size */
+ if(file_size!=empty_size) TEST_ERROR;
+
+ PASSED();
+
+/* Create single dataset (with compressed storage & early allocation), re-write it a bunch of
+ * times (which should re-allocate blocks many times) and remove it & verify
+ * file size.
+ */
+ TESTING(" re-writing compressed, chunked dataset");
+
+ /* Create file (using FAPL with disabled raw data cache) */
+ if ((file=H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_nocache))<0) TEST_ERROR;
+
+ /* Create a single dataset to remove */
+ if((dataset = H5Dcreate (file, DATASETNAME, H5T_NATIVE_INT, space, comp_dcpl))<0) TEST_ERROR;
+
+ /* Alternate re-writing dataset with compressible & random data */
+ for(u=0; u<FILESPACE_REWRITE; u++) {
+ /* Set buffer to some compressible values */
+ for (v=0, tmp_data=data; v<(FILESPACE_DIM0*FILESPACE_DIM1*FILESPACE_DIM2); v++)
+ *tmp_data++ = v*u;
+
+ /* Write the buffer to the dataset */
+ if (H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, data)<0) TEST_ERROR;
+
+ /* Set buffer to different random numbers each time */
+ for (v=0, tmp_data=data; v<(FILESPACE_DIM0*FILESPACE_DIM1*FILESPACE_DIM2); v++)
+ *tmp_data++ = HDrandom();
+
+ /* Write the buffer to the dataset */
+ if (H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, data)<0) TEST_ERROR;
+ } /* end for */
+
+ /* Close dataset */
+ if(H5Dclose (dataset)<0) TEST_ERROR;
+
+ /* Remove the dataset */
+ if(H5Gunlink (file, DATASETNAME)<0) TEST_ERROR;
+
+ /* Close file */
+ if(H5Fclose(file)<0) TEST_ERROR;
+
+ /* Get the size of the file */
+ if((file_size=get_file_size(filename))==0) TEST_ERROR;
+
+ /* Verify the file is correct size */
+ if(file_size!=empty_size) TEST_ERROR;
+
+ PASSED();
+
+/* Create single dataset (with compact storage), remove it & verify file size */
+ TESTING(" compact dataset");
+
+ /* Create file */
+ if ((file=H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl))<0) TEST_ERROR;
+
+ /* Create a single dataset to remove */
+ if((dataset = H5Dcreate (file, DATASETNAME, H5T_NATIVE_INT, space, compact_dcpl))<0) TEST_ERROR;
+ if(H5Dclose (dataset)<0) TEST_ERROR;
+
+ /* Remove the dataset */
+ if(H5Gunlink (file, DATASETNAME)<0) TEST_ERROR;
+
+ /* Close file */
+ if(H5Fclose(file)<0) TEST_ERROR;
+
+ /* Get the size of the file */
+ if((file_size=get_file_size(filename))==0) TEST_ERROR;
+
+ /* Verify the file is correct size */
+ if(file_size!=empty_size) TEST_ERROR;
+
+ PASSED();
+
+/* Create two datasets (with contiguous storage), alternate adding attributes
+ * to each one (which creates many object header continuations),
+ * remove both & verify file size.
+ */
+ TESTING(" object header continuations");
+
+ /* Create file */
+ if ((file=H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl))<0) TEST_ERROR;
+
+ /* Create datasets to remove */
+ if((dataset = H5Dcreate (file, DATASETNAME, H5T_NATIVE_INT, space, contig_dcpl))<0) TEST_ERROR;
+ if((dataset2 = H5Dcreate (file, DATASET2NAME, H5T_NATIVE_INT, space, contig_dcpl))<0) TEST_ERROR;
+
+ /* Create a dataspace for the attributes */
+ if((attr_space = H5Screate_simple(FILESPACE_ATTR_NDIMS, attr_dims, NULL))<0) TEST_ERROR;
+
+ /* Alternate adding attributes to each one */
+ for(u=0; u<FILESPACE_NATTR; u++) {
+ /* Set the name of the attribute to create */
+ sprintf(objname,"%s %u",ATTRNAME,u);
+
+ /* Create an attribute on the first dataset */
+ if((attr = H5Acreate (dataset, objname, H5T_NATIVE_INT, attr_space, H5P_DEFAULT))<0) TEST_ERROR;
+
+ /* Don't worry about writing the attribute - it will have a fill value */
+
+ /* Close the attribute on the first dataset */
+ if(H5Aclose (attr)<0) TEST_ERROR;
+
+ /* Create an attribute on the second dataset */
+ if((attr = H5Acreate (dataset2, objname, H5T_NATIVE_INT, attr_space, H5P_DEFAULT))<0) TEST_ERROR;
+
+ /* Don't worry about writing the attribute - it will have a fill value */
+
+ /* Close the attribute on the second dataset */
+ if(H5Aclose (attr)<0) TEST_ERROR;
+
+ /* Flush the file (to fix the sizes of object header buffers, etc) */
+ if(H5Fflush(file,H5F_SCOPE_GLOBAL)<0) TEST_ERROR;
+ } /* end for */
+
+ /* Close the dataspace for the attributes */
+ if(H5Sclose (attr_space)<0) TEST_ERROR;
+
+ /* Close datasets */
+ if(H5Dclose (dataset)<0) TEST_ERROR;
+ if(H5Dclose (dataset2)<0) TEST_ERROR;
+
+ /* Remove the datasets */
+ if(H5Gunlink (file, DATASETNAME)<0) TEST_ERROR;
+ if(H5Gunlink (file, DATASET2NAME)<0) TEST_ERROR;
+
+ /* Close file */
+ if(H5Fclose(file)<0) TEST_ERROR;
+
+ /* Get the size of the file */
+ if((file_size=get_file_size(filename))==0) TEST_ERROR;
+
+ /* Verify the file is correct size */
+ if(file_size!=empty_size) TEST_ERROR;
+
+ PASSED();
+
+/* Create single named datatype, remove it & verify file size */
+ TESTING(" named datatype");
+
+ /* Create file */
+ if ((file=H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl))<0) TEST_ERROR;
+
+ /* Create datatype to commit */
+ if((type = H5Tcopy (H5T_NATIVE_INT))<0) TEST_ERROR;
+
+ /* Create a single name datatype to remove */
+ if(H5Tcommit (file, TYPENAME, type)<0) TEST_ERROR;
+ if(H5Tclose (type)<0) TEST_ERROR;
+
+ /* Remove the named datatype */
+ if(H5Gunlink (file, TYPENAME)<0) TEST_ERROR;
+
+ /* Close file */
+ if(H5Fclose(file)<0) TEST_ERROR;
+
+ /* Get the size of the file */
+ if((file_size=get_file_size(filename))==0) TEST_ERROR;
+
+ /* Verify the file is correct size */
+ if(file_size!=empty_size) TEST_ERROR;
+
+ PASSED();
+
+/* Create single group, remove it & verify file size */
+ TESTING(" single group");
+
+ /* Create file */
+ if ((file=H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl))<0) TEST_ERROR;
+
+ /* Create a single group to remove */
+ if((group = H5Gcreate (file, GROUPNAME, 0))<0) TEST_ERROR;
+ if(H5Gclose (group)<0) TEST_ERROR;
+
+ /* Remove the group */
+ if(H5Gunlink (file, GROUPNAME)<0) TEST_ERROR;
+
+ /* Close file */
+ if(H5Fclose(file)<0) TEST_ERROR;
+
+ /* Get the size of the file */
+ if((file_size=get_file_size(filename))==0) TEST_ERROR;
+
+ /* Verify the file is correct size */
+ if(file_size!=empty_size) TEST_ERROR;
+
+ PASSED();
+
+/* Create many groups, remove them & verify file size */
+ TESTING(" multiple groups");
+
+ /* Create file */
+ if ((file=H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl))<0) TEST_ERROR;
+
+ /* Create a many groups to remove */
+ for(u=0; u<NGROUPS; u++) {
+ sprintf(objname,"%s %u",GROUPNAME,u);
+ if((group = H5Gcreate (file, objname, 0))<0) TEST_ERROR;
+ if(H5Gclose (group)<0) TEST_ERROR;
+ } /* end for */
+
+ /* Remove the all the groups */
+ for(u=0; u<NGROUPS; u++) {
+ sprintf(objname,"%s %u",GROUPNAME,u);
+ if(H5Gunlink (file, objname)<0) TEST_ERROR;
+ } /* end for */
+
+ /* Close file */
+ if(H5Fclose(file)<0) TEST_ERROR;
+
+ /* Get the size of the file */
+ if((file_size=get_file_size(filename))==0) TEST_ERROR;
+
+ /* Verify the file is correct size */
+ if(file_size!=empty_size) TEST_ERROR;
+
+ PASSED();
+
+/* Create simple group hiearchy, remove it & verify file size */
+ TESTING(" simple group hierarchy");
+
+ /* Create file */
+ if ((file=H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl))<0) TEST_ERROR;
+
+ /* Create a small group hierarchy to remove */
+ if((group = H5Gcreate (file, GROUPNAME, 0))<0) TEST_ERROR;
+ if((group2 = H5Gcreate (group, GROUP2NAME, 0))<0) TEST_ERROR;
+ if(H5Gclose (group2)<0) TEST_ERROR;
+ if(H5Gclose (group)<0) TEST_ERROR;
+
+ /* Remove the second group */
+ if(H5Gunlink (file, GROUPNAME "/" GROUP2NAME)<0) TEST_ERROR;
+
+ /* Remove the first group */
+ if(H5Gunlink (file, GROUPNAME)<0) TEST_ERROR;
+
+ /* Close file */
+ if(H5Fclose(file)<0) TEST_ERROR;
+
+ /* Get the size of the file */
+ if((file_size=get_file_size(filename))==0) TEST_ERROR;
+
+ /* Verify the file is correct size */
+ if(file_size!=empty_size) TEST_ERROR;
+
+ PASSED();
+
+/* Create complex group hiearchy, remove it & verify file size */
+ TESTING(" complex group hierarchy");
+
+ /* Create file */
+ if ((file=H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl))<0) TEST_ERROR;
+
+ /* Create a complex group hierarchy to remove */
+ for(u=0; u<FILESPACE_TOP_GROUPS; u++) {
+ /* Create group */
+ sprintf(objname,"%s %u",GROUPNAME,u);
+ if((group = H5Gcreate (file, objname, 0))<0) TEST_ERROR;
+
+ /* Create nested groups inside top groups */
+ for(v=0; v<FILESPACE_NESTED_GROUPS; v++) {
+ /* Create group */
+ sprintf(objname,"%s %u",GROUP2NAME,v);
+ if((group2 = H5Gcreate (group, objname, 0))<0) TEST_ERROR;
+
+ /* Create datasets inside nested groups */
+ for(w=0; w<FILESPACE_NDATASETS; w++) {
+ /* Create & close a dataset */
+ sprintf(objname,"%s %u",DATASETNAME,w);
+ if((dataset = H5Dcreate (group2, objname, H5T_NATIVE_INT, space, H5P_DEFAULT))<0) TEST_ERROR;
+ if(H5Dclose (dataset)<0) TEST_ERROR;
+ } /* end for */
+
+ /* Close nested group */
+ if(H5Gclose (group2)<0) TEST_ERROR;
+ } /* end for */
+
+ /* Close top group */
+ if(H5Gclose (group)<0) TEST_ERROR;
+ } /* end for */
+
+ /* Remove complex group hierarchy */
+ for(u=0; u<FILESPACE_TOP_GROUPS; u++) {
+ /* Open group */
+ sprintf(objname,"%s %u",GROUPNAME,u);
+ if((group = H5Gopen (file, objname))<0) TEST_ERROR;
+
+ /* Open nested groups inside top groups */
+ for(v=0; v<FILESPACE_NESTED_GROUPS; v++) {
+ /* Create group */
+ sprintf(objname,"%s %u",GROUP2NAME,v);
+ if((group2 = H5Gopen (group, objname))<0) TEST_ERROR;
+
+ /* Remove datasets inside nested groups */
+ for(w=0; w<FILESPACE_NDATASETS; w++) {
+ /* Remove dataset */
+ sprintf(objname,"%s %u",DATASETNAME,w);
+ if(H5Gunlink (group2, objname)<0) TEST_ERROR;
+ } /* end for */
+
+ /* Close nested group */
+ if(H5Gclose (group2)<0) TEST_ERROR;
+
+ /* Remove nested group */
+ sprintf(objname,"%s %u",GROUP2NAME,v);
+ if(H5Gunlink (group, objname)<0) TEST_ERROR;
+ } /* end for */
+
+ /* Close top group */
+ if(H5Gclose (group)<0) TEST_ERROR;
+
+ /* Remove top group */
+ sprintf(objname,"%s %u",GROUPNAME,u);
+ if(H5Gunlink (file, objname)<0) TEST_ERROR;
+ } /* end for */
+
+ /* Close file */
+ if(H5Fclose(file)<0) TEST_ERROR;
+
+ /* Get the size of the file */
+ if((file_size=get_file_size(filename))==0) TEST_ERROR;
+
+ /* Verify the file is correct size */
+ if(file_size!=empty_size) TEST_ERROR;
+
+ PASSED();
+
+ /* Cleanup common objects */
+
+ /* Release dataset buffer */
+ HDfree(data);
+
+ /* Close property lists */
+ if(H5Pclose(fapl)<0) TEST_ERROR;
+ if(H5Pclose(fapl_nocache)<0) TEST_ERROR;
+ if(H5Pclose(contig_dcpl)<0) TEST_ERROR;
+ if(H5Pclose(early_chunk_dcpl)<0) TEST_ERROR;
+ if(H5Pclose(late_chunk_dcpl)<0) TEST_ERROR;
+ if(H5Pclose(comp_dcpl)<0) TEST_ERROR;
+ if(H5Pclose(compact_dcpl)<0) TEST_ERROR;
+
+ /* Close dataspace */
+ if(H5Sclose(space)<0) TEST_ERROR;
+
+ /* Indicate success */
+ /* Don't print final "PASSED", since we aren't on the correct line anymore */
+ return 0;
+
+error:
+ return 1;
+} /* end test_filespace() */
+
+
+/*-------------------------------------------------------------------------
* Function: test_create_unlink
*
* Purpose: Creates and then unlinks a large number of objects
@@ -563,12 +1201,14 @@ main(void)
size_t rdcc_nbytes;
double rdcc_w0;
+ /* Set the random # seed */
+ HDsrandom((unsigned long)time(NULL));
+
/* Open */
h5_reset();
fapl = h5_fileaccess();
h5_fixname(FILENAME[0], fapl, filename, sizeof filename);
- if ((file=H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl))<0)
- goto error;
+ if ((file=H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl))<0) TEST_ERROR;
/* Make copy of regular fapl, to turn down the elements in the metadata cache */
if((fapl2=H5Pcopy(fapl))<0)
@@ -590,6 +1230,7 @@ main(void)
nerrors += test_rename(file);
nerrors += test_new_move();
nerrors += check_new_move();
+ nerrors += test_filespace();
/* Test creating & unlinking lots of objects with default FAPL */
nerrors += test_create_unlink("create and unlink large number of objects",fapl);
@@ -597,7 +1238,7 @@ main(void)
nerrors += test_create_unlink("create and unlink large number of objects with small cache",fapl2);
/* Close */
- if (H5Fclose(file)<0) goto error;
+ if (H5Fclose(file)<0) TEST_ERROR;
if (nerrors) {
printf("***** %d FAILURE%s! *****\n", nerrors, 1==nerrors?"":"S");
exit(1);