Hi Hdf users,
I am trying to write a C program to read a large (39GB) of raw binary
data and store it into a dataset in a hdf file. Attached is the code.

The problem i am facing is this: whenever I run this code, i see a *lot*
of memory usage; I am guessing that this is because the dataset that i
write into is not being flushed into the disk.....
What can i do to alleviate this? I am using hyperslabs to select one
"height" of a 3D cube structured data and write it to dataset. Is there
anyway i can ask the program to write the data to the disk, thus freeing
up the main memory?  I have seen this article:
http://www.hdfgroup.org/HDF5/faq/linux-mem.html
(am running a debian squeeze with 4GB memory)
but i am willing to take the time hit for disk write...

Thanks,
--Pratik


/*
 *   Creating and closing a dataset.
 */
#include "hdf5.h"
#include <stdio.h>
#include <stdlib.h>

#define FILEw "dset.h5"
#define FILEr "BR1003_resampled_norm_mask_BIP"
#define sx 3156
#define sy 3089
#define sz 814
int main() {

   hid_t       file_id, dataset_id, dataspace_id,memspace_id;
   hid_t       dataspace, memspace;
   hsize_t     dims[3],dimsmem[1],offset[3],block[3],stride[3]={1,1,sz},count[3]={1,1,1};
   herr_t      status;
   float *data;
   int i,j;
   FILE *fr;
   fr=fopen(FILEr,"r");
    
   /* Create a new file using default properties. */
   file_id = H5Fcreate(FILEw, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
   data=(float*)malloc(sizeof(float)*sz); 
   
   /* Create the data space for the dataset. */
   /* the dimensions are hard-coded here, so 
    * be careful when using it on other data */
   
   dims[0] = sx; 
   dims[1] = sy; 
   dims[2] = sz; 
   dimsmem[0]=sz;
   dataspace_id = H5Screate_simple(3, dims, NULL);
   
   /* Create the dataset. */
   dataset_id = H5Dcreate2(file_id, "dset",H5T_NATIVE_FLOAT, dataspace_id,H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);

   /* Loop over the source file and transfer data to the hdf dataset */
   offset[2]=0;
   block[0]=1;
   block[1]=1;
   block[2]=sz;
   for(i=0;i<sx;i++)
    {
        offset[0]=i;
        for(j=0;j<sy;j++)
        {
             offset[1]=j;
             fread(data,sizeof(float),sz,fr);
             dataspace=H5Screate_simple(3, dims, NULL);
             memspace =H5Screate_simple(1, dimsmem, NULL);
             status = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, offset, stride, count, block);
             //printf("%d\n",j);
             status = H5Dwrite(dataset_id, H5T_NATIVE_FLOAT, memspace,dataspace, H5P_DEFAULT,data);

        }
    }
   /* End access to the dataset and release resources used by it. */
   status = H5Dclose(dataset_id);

   /* Terminate access to the data space. */ 
   status = H5Sclose(dataspace_id);

   /* Close the file. */
   status = H5Fclose(file_id);
   
   //free(data);
   fclose(fr);
}



_______________________________________________
Hdf-forum is for HDF software users discussion.
[email protected]
http://mail.hdfgroup.org/mailman/listinfo/hdf-forum_hdfgroup.org

Reply via email to