Hello.
I've run into the following problem while using HDF5: I can't create an
attribute for an empty dataset when application is executed in parallel.
I have some dataset with total number of elements N. I'm writing it to a
file as 1d dataset.
When application is executed in parallel, each process writes it's own
piece of data.
After saving the dataset to the file, I want to attach some attributes
to it.
And here is the problem emerges: if total number of elements N is zero,
i.e. dataset is empty,
my attemp to create an attribute results in an error and program
termination.
It doesn't happen if the dataset is nonempty.
Besides, serial version of the program works fine it both cases.
I must be doing something wrong, but I can't figure out what exactly.
What is the correct way to create an attribute for a dataset in parallel?
Attached is the source code for the parallel and the serial versions of
the program.
I'm using Debian with libhdf5-1.8.13.
#include <hdf5.h>
#include <stdlib.h>
#include <mpi.h>
#define FILE "empty_dset.h5"
#define DSET "/dset"
void distribute_elements_between_processes( hsize_t *subset_dims,
hsize_t *subset_offset,
int total_elements );
int main(){
MPI_Init(NULL, NULL);
int mpi_process_rank;
MPI_Comm_rank( MPI_COMM_WORLD, &mpi_process_rank );
hid_t file_id, dataset_id, dataspace_id, attribute_id; /* identifiers */
hid_t plist_id, memspace, filespace;
herr_t status;
int dset_rank = 1;
int total_elements = 0;
hsize_t dims[dset_rank];
dims[0] = total_elements;
/* Create a new file collectively. */
plist_id = H5Pcreate( H5P_FILE_ACCESS );
H5Pset_fapl_mpio( plist_id, MPI_COMM_WORLD, MPI_INFO_NULL );
file_id = H5Fcreate( FILE, H5F_ACC_TRUNC, H5P_DEFAULT, plist_id );
H5Pclose(plist_id);
hsize_t subset_dims[dset_rank], subset_offset[dset_rank];
distribute_elements_between_processes( subset_dims, subset_offset, total_elements );
int *data = (int *) malloc( subset_dims[0] * sizeof(int) );
for( int i = 0; i < subset_dims[0]; i++ ){
data[i] = mpi_process_rank;
}
plist_id = H5Pcreate( H5P_DATASET_XFER );
status = H5Pset_dxpl_mpio( plist_id, H5FD_MPIO_COLLECTIVE );
memspace = H5Screate_simple( dset_rank, subset_dims, NULL );
filespace = H5Screate_simple( dset_rank, dims, NULL );
status = H5Sselect_hyperslab( filespace, H5S_SELECT_SET,
subset_offset, NULL, subset_dims, NULL );
/* Create a dataset. */
dataset_id = H5Dcreate2(file_id, DSET, H5T_STD_I32BE, filespace,
H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
status = H5Dwrite( dataset_id, H5T_NATIVE_INT,
memspace, filespace, plist_id, data );
H5Dclose( dataset_id );
H5Sclose( filespace );
H5Sclose( memspace );
H5Pclose( plist_id );
free(data);
/* Create an attribute */
hsize_t attr_dspace_rank = 1;
hsize_t single_element = 1;
double attr = 3.0;
dataset_id = H5Dopen2(file_id, DSET, H5P_DEFAULT);
dataspace_id = H5Screate_simple( attr_dspace_rank, &single_element, NULL);
attribute_id = H5Acreate2( dataset_id, "attr", H5T_IEEE_F64BE, dataspace_id,
H5P_DEFAULT, H5P_DEFAULT );
status = H5Awrite(attribute_id, H5T_NATIVE_DOUBLE, &attr);
status = H5Aclose(attribute_id);
status = H5Sclose(dataspace_id);
status = H5Dclose(dataset_id);
/* Close the file. */
status = H5Fclose(file_id);
MPI_Finalize();
}
void distribute_elements_between_processes( hsize_t *subset_dims,
hsize_t *subset_offset,
int total_elements )
{
int rest;
int mpi_n_of_proc, mpi_process_rank;
MPI_Comm_size( MPI_COMM_WORLD, &mpi_n_of_proc );
MPI_Comm_rank( MPI_COMM_WORLD, &mpi_process_rank );
subset_dims[0] = total_elements / mpi_n_of_proc;
rest = total_elements % mpi_n_of_proc;
if( mpi_process_rank < rest )
subset_dims[0]++;
subset_offset[0] = subset_dims[0] * mpi_process_rank;
if( mpi_process_rank > rest )
subset_offset[0] += rest;
/* printf( "proc = %d, dims = %d, offset = %d \n", */
/* mpi_process_rank, subset_dims[0], subset_offset[0] ); */
return;
}
#include <hdf5.h>
#include <stdlib.h>
#define FILE "empty_dset.h5"
#define DSET "/dset"
int main(){
hid_t file_id, dataset_id, dataspace_id, attribute_id; /* identifiers */
hid_t plist_id, memspace, filespace;
herr_t status;
int dset_rank = 1;
int total_elements = 10;
hsize_t dims[dset_rank];
dims[0] = total_elements;
int *data = (int *) malloc( dims[0] * sizeof(int) );
for( int i = 0; i < dims[0]; i++ ){
data[i] = i;
}
/* Create a new file. */
file_id = H5Fcreate( FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
/* Create a dataset. */
dataspace_id = H5Screate_simple( dset_rank, dims, NULL);
dataset_id = H5Dcreate2(file_id, DSET, H5T_STD_I32BE, dataspace_id,
H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
status = H5Dwrite( dataset_id, H5T_NATIVE_INT,
H5S_ALL, H5S_ALL, H5P_DEFAULT, data );
H5Dclose( dataset_id );
H5Sclose( dataspace_id );
free(data);
/* Create an attribute */
hsize_t attr_dim = 1;
hsize_t single_element = 1;
double attr = 3.0;
dataset_id = H5Dopen2(file_id, DSET, H5P_DEFAULT);
dataspace_id = H5Screate_simple( attr_dim, &single_element, NULL);
attribute_id = H5Acreate2( dataset_id, "attr", H5T_IEEE_F64BE, dataspace_id,
H5P_DEFAULT, H5P_DEFAULT );
status = H5Awrite(attribute_id, H5T_NATIVE_DOUBLE, &attr);
status = H5Aclose(attribute_id);
status = H5Sclose(dataspace_id);
status = H5Dclose(dataset_id);
/* Close the file. */
status = H5Fclose(file_id);
}
_______________________________________________
Hdf-forum is for HDF software users discussion.
[email protected]
http://lists.hdfgroup.org/mailman/listinfo/hdf-forum_lists.hdfgroup.org
Twitter: https://twitter.com/hdf5