Everyone,

I'm new to the HDF5 community, and I'm using the HDF5.net library to help 
with some data processing.

We have built a file format around HDF5 that specifies how recorded sensor 
channels should be stored. I'm trying to write some some software for that 
file format in C#.  Right now all I'm trying to do is batch dump the file 
format to CSV as a proof of concept that I understand how to read the 
datasets and attributes.

The basic flow is I open the HDF5 file and recurse through the groups to 
find all of the datasets. I store the path of the datasets in class.

>From there I open each of the datasets and read the data into an array. 
The array is then processed and stored in memory and then eventually 
dumped to CSV.

The problem I'm having is that the code is processing multiple HDF5 files 
in sequence, and I start to get errors on the 6th file in the sequence. So 
if I have 10 files, the first file process just fine, but the last five 
all have errors. It doesn't matter which files they are they just throw an 
error. If I restart the software and just have them process the file that 
failed before it works just fine.

The error that is thrown is H5G.getObjectNameByIndex: 
  Failed to find name of object in group 2000018 with index 0

Which is called in my findChildren function. The line of code that throws 
the exception is:
string obj_name = H5G.getObjectNameByIndex(curr_group, j);


Here is the code I'm currently using

if(datasets.Count == 0)
        getDatasets();

//open the HDF5 library
H5.Open();

//open the file 
H5FileId gh5file = H5F.open(fname, H5F.OpenMode.ACC_RDONLY);

double[][] data = new double[numElems][];
for(int i = 0; i < numElems; i++)
        data[i] = new double[datasets.Count];

for (int i = 0; i < datasets.Count; i++)
{
        if (datasets[i].dtype_size == 4 && datasets[i].order == H5T.Order
.LE)
        {
                float[] dataArray = readArray<float>(gh5file, i);

                for (int j = 0; j < dataArray.Length; j++)
        {
                        data[j][i] = dataArray[j];
        }
        }
        //repeat for Big Endian and 8 byte floats
}
 
using (StreamWriter sw = new StreamWriter(csv_filename))
{
        //write data[j][i] to the csv file
}

H5F.close(gh5file);
H5.Close();



readArray is implemented like so
private T[] readArray<T>(H5FileId gh5file, int i)
{
        H5DataSetId dId = H5D.open(gh5file, datasets[i].name);
       T[] dataArray = new T[datasets[i].numPoints];
       H5Array<T> TArray = new H5Array<T>(dataArray);
       H5DataTypeId dtypeID = H5D.getType(dId);
       H5T.Order my_order = H5T.get_order(dtypeID);
       H5D.read(dId, dtypeID, TArray);
       H5D.close(dId);
       return dataArray;
}

getGroups recurses down and all object path information I care about
public void getGroups()
{
        //open the HDF5 library
       H5.Open();
       //open the file 
       H5FileId gh5file = H5F.open(fname, H5F.OpenMode.ACC_RDONLY);

       //read in the root group
       H5GroupId rootGroup = H5G.open(gh5file, "/");

       //find out how many objects are under the root group
       long count = H5G.getNumObjects(rootGroup);

       //store the names at this level
       ObjectInfo info = H5G.getObjectInfo(rootGroup, "/DYNAMIC DATA", 
false);
       groups.Add(new Group("/DYNAMIC DATA", info.objectType));

       //find all children of roots children recursively
       for (int i = 0; i < groups.Count; i++)
       {
               findChildren(groups[i], gh5file);
       }
 
       H5G.close(rootGroup);

       H5F.close(gh5file);

       H5.Close();
}

getDatasets just pulls the datasets out of the groups list
public void getDatasets()
{
        getGroups();
 
       for (int i = 0; i < groups.Count; i++)
       {
        extractDatasets(groups[i], datasets);
       }
}


findChildren is how I read the file info to find the datasets 
public static void findChildren(Group s, H5FileId t)
{
 
        //Get the number of children that belong to current group
       H5GroupId curr_group = H5G.open(t, s.name);
       long count = H5G.getNumObjects(curr_group);
 
       //for each of the children add them to the parent list
       for (ulong j = 0; j < (ulong)count; j++)
       {
        //read it by index
              string obj_name = H5G.getObjectNameByIndex(curr_group, j);
              ObjectInfo info = H5G.getObjectInfo(curr_group, obj_name, 
false);
 
              s.children.Add(new Group(s.name + "/" + obj_name, 
info.objectType));
       }
 
       //For each child we are going to recurse down 
       for (int i = 0; i < s.children.Count; i++)
       {
        //in the GH5 file format there is nothing below dataset
              //if it is not a dataset, recurse on its children
              if (s.children[i].type != H5GType.DATASET)
              {
                findChildren(s.children[i], t);
              }
              else //it is a dataset so log the enformation
              {
                    s.children[i].dsetID = H5D.open(t, 
s.children[i].name);
                    H5DataTypeId dID = H5D.getType(s.children[i].dsetID);
                    s.children[i].dtypeID = dID;
                    s.children[i].dtype = H5T.getClass(dID);
                    s.children[i].dtype_size = H5T.getSize(dID);
                    s.children[i].numPoints = H5S.getSimpleExtentNPoints(
H5D.getSpace(s.children[i].dsetID));
                    s.children[i].order = H5T.get_order(dID);
                    s.children[i].unit = Helpers
.ReadAttributeString(s.children[i].dsetID, s.children[i].name,      
"UNITS");
 
                    H5D.close(s.children[i].dsetID);
              }
       }
 
        H5G.close(curr_group);
}
_______________________________________________
Hdf-forum is for HDF software users discussion.
[email protected]
http://mail.lists.hdfgroup.org/mailman/listinfo/hdf-forum_lists.hdfgroup.org

Reply via email to