On Mon, Jun 9, 2014 at 10:16 PM, Blaise A Bourdin <bour...@lsu.edu> wrote:
> Hi, > > I agree with Barry that since this will not mean any interface > changes, it can be done with patch releases. > > > Sounds like a plan > > > >> >> I still can’t figure out how to deal with cell sets of different types >> (say quads and tri). >> > > I have not tried this. I had been focusing getting higher order (Q2) and > either quads or tris to work. This > all seems to be working correctly now. I have been using it to > visualize/restart a magma dynamics app > with 3 fields all with different discretizations. > > > Nice. > > > >> All cell and vertex sets seem to be contained in the hdf5 file, but not >> in a way that is usable by post processing tools (visit, paraview, ensight). >> > > For restarting, mixed meshes should work fine. They are stored in > > /topology/cells > /cones > /order > /orientation > > and the field values are in > > /fields/<name> > > > For visualization and most post-processing, there are separate arrays > > /viz/topology > /geometry > > which are exactly what I needed to make Paraview understand the xdmf. > The fields > sampled down to cells and vertices are in > > /vertex_fields/<name> > /cell_fields/<name> > > > I saw that. Does it mean that all heavy data is duplicated in the hdf5 > file, i.e. that /fields contains values that petsc understands and /viz is > for visualization? > Yes, this is controlled by the viewer format. I saw no way to get xmf to look at scattered data, so duplication was inevitable. > > >> The xdmf generation script bin/pythonscripts/petsc_gen_xdmf.py is quite >> fragile. >> > > I have not had it fail for me, but would be happy to look at the failure > you are getting. > > > Cool. > > I am attaching a very simple code that reads an exo file and saves it, > and two simple exodus meshes. > I have attached a cleaned up version. > The xmf generation script fails on it, most likely because I don’t have a > /time section in the file. My workaround is to replace l. 209 with time = > [0,1]. > The xmf script works on it now. > When I read the xmf file in visit, I see only one cell set. In paraview, I > see two blocks “domain” and “domain[1]", both of which contain the entire > mesh. > The labels marking these sets are in the .h5 file, but I am not sure what you want in the xmf. Can you show me a simple example. Right now, I am using the xmf generation from PyLith. > If I do not interpolate the mesh in DMPlexCreateExodusFromFile, > petsc_gen_xdmf.py fails. > I just ran it and it worked here. See below. > DMView fails in parallel. I must be doing something wrong. > There was a bug (overaggressive check) that caused this, but it has been fixed. Are you using 'next'? > [0]PETSC ERROR: --------------------- Error Message > -------------------------------------------------------------- > [0]PETSC ERROR: Argument out of range > [0]PETSC ERROR: Point 8 has 0 constraints > -3 dof > [0]PETSC ERROR: See http://www.mcs.anl.gov/petsc/documentation/faq.html > for trouble shooting. > [0]PETSC ERROR: Petsc Development GIT revision: v3.4.4-4444-g8c25fe2 GIT > Date: 2014-06-07 16:01:56 -0500 > [0]PETSC ERROR: ./testHDF5Plex on a Darwin-intel14.0-g named iMac.local by > blaise Mon Jun 9 22:14:29 2014 > [0]PETSC ERROR: Configure options CFLAGS= CXXFLAGS= LDFLAGS=-Wl,-no_pie > --download-chaco=1 --download-exodusii=1 --download-hdf5=1 > --download-metis=1 --download-netcdf=1 --download-parmetis=1 > --download-sowing=1 --download-triangle=1 --download-yaml=1 > --with-blas-lapack-dir=/opt/intel/composerxe/mkl --with-cmake=cmake > --with-debugging=1 --with-mpi-dir=/opt/HPC/mpich-3.0.4-intel14.0 --with-pic > --with-shared-libraries=1 --with-vendor-compilers=intel --with-x11=1 > [0]PETSC ERROR: #1 DMCreateDefaultSF() line 3065 in > /opt/HPC/petsc-dev/src/dm/interface/dm.c > [0]PETSC ERROR: #2 DMGetDefaultSF() line 2985 in > /opt/HPC/petsc-dev/src/dm/interface/dm.c > [0]PETSC ERROR: #3 DMLocalToGlobalBegin() line 1737 in > /opt/HPC/petsc-dev/src/dm/interface/dm.c > [0]PETSC ERROR: #4 VecView_Plex_Local_HDF5() line 122 in > /opt/HPC/petsc-dev/src/dm/impls/plex/plexhdf5.c > [0]PETSC ERROR: #5 VecView_Plex_Local() line 86 in > /opt/HPC/petsc-dev/src/dm/impls/plex/plex.c > [0]PETSC ERROR: #6 VecView() line 601 in > /opt/HPC/petsc-dev/src/vec/vec/interface/vector.c > [0]PETSC ERROR: #7 DMPlexWriteCoordinates_HDF5_Static() line 396 in > /opt/HPC/petsc-dev/src/dm/impls/plex/plexhdf5.c > [0]PETSC ERROR: #8 DMPlexView_HDF5() line 485 in > /opt/HPC/petsc-dev/src/dm/impls/plex/plexhdf5.c > [0]PETSC ERROR: #9 DMView_Plex() line 450 in > /opt/HPC/petsc-dev/src/dm/impls/plex/plex.c > [0]PETSC ERROR: #10 DMView() line 648 in > /opt/HPC/petsc-dev/src/dm/interface/dm.c > [0]PETSC ERROR: #11 main() line 79 in > /Users/blaise/Development/DMComplex/testHDF5Plex.c > [0]PETSC ERROR: ----------------End of Error Message -------send entire > error message to petsc-ma...@mcs.anl.gov---------- > > > Finally, DMView fails with the mixed element type mesh with the > following error message: > Writing to TwoSquaresMixed_seq.h5 > *[0]PETSC ERROR: --------------------- Error Message > --------------------------------------------------------------* > [0]PETSC ERROR: No support for this operation for this object type > [0]PETSC ERROR: Visualization topology currently only supports identical > cell shapes > [0]PETSC ERROR: See http://www.mcs.anl.gov/petsc/documentation/faq.html > for trouble shooting. > [0]PETSC ERROR: Petsc Development GIT revision: v3.4.4-4444-g8c25fe2 GIT > Date: 2014-06-07 16:01:56 -0500 > [0]PETSC ERROR: ./testHDF5Plex on a Darwin-intel14.0-g named iMac.local by > blaise Mon Jun 9 22:11:48 2014 > [0]PETSC ERROR: Configure options CFLAGS= CXXFLAGS= LDFLAGS=-Wl,-no_pie > --download-chaco=1 --download-exodusii=1 --download-hdf5=1 > --download-metis=1 --download-netcdf=1 --download-parmetis=1 > --download-sowing=1 --download-triangle=1 --download-yaml=1 > --with-blas-lapack-dir=/opt/intel/composerxe/mkl --with-cmake=cmake > --with-debugging=1 --with-mpi-dir=/opt/HPC/mpich-3.0.4-intel14.0 --with-pic > --with-shared-libraries=1 --with-vendor-compilers=intel --with-x11=1 > [0]PETSC ERROR: #1 DMPlexWriteTopology_Vertices_HDF5_Static() line 322 in > /opt/HPC/petsc-dev/src/dm/impls/plex/plexhdf5.c > [0]PETSC ERROR: #2 DMPlexView_HDF5() line 488 in > /opt/HPC/petsc-dev/src/dm/impls/plex/plexhdf5.c > [0]PETSC ERROR: #3 DMView_Plex() line 450 in > /opt/HPC/petsc-dev/src/dm/impls/plex/plex.c > [0]PETSC ERROR: #4 main() line 59 in > /Users/blaise/Development/DMComplex/testHDF5Plex.c > *[0]PETSC ERROR: ----------------End of Error Message -------send entire > error message to petsc-ma...@mcs.anl.gov > <petsc-ma...@mcs.anl.gov>----------* > I need to understand how xmf treats these mixes meshes. This is where help would be useful. The checkpointing works with them however. Thanks, Matt -- What most experimenters take for granted before they begin their experiments is infinitely more interesting than any results to which their experiments lead. -- Norbert Wiener
static char help[] = "\n"; #include <petscdmplex.h> #include <petscsf.h> #include <exodusII.h> #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ #include <petscsf.h> #include <petscviewerhdf5.h> #include <petscdmplex.h> #undef __FUNCT__ #define __FUNCT__ "main" int main(int argc,char **argv) { DM dm, pdm; PetscMPIInt numproc, rank; PetscViewer hdf5Viewer; PetscInt numFields = 2; PetscInt numComp[2] = {1,1}; PetscInt numDof[6] = {1, 0, 0, 0, 0, 1}; /*{Vertex,Edge,Cell} */ PetscInt bcFields[1] = {0}, numBC = 0; IS bcPoints[1] = {NULL}; PetscSection section; Vec V; PetscInt dim; char ifilename[PETSC_MAX_PATH_LEN], ofilename[PETSC_MAX_PATH_LEN], prefix[PETSC_MAX_PATH_LEN], *ext; PetscErrorCode ierr; ierr = PetscInitialize(&argc, &argv, NULL, help);CHKERRQ(ierr); ierr = MPI_Comm_size(PETSC_COMM_WORLD, &numproc); ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank); ierr = PetscOptionsGetString(NULL, "-i", ifilename, sizeof(ifilename), NULL);CHKERRQ(ierr); ierr = PetscStrrchr(ifilename, '.', &ext); ierr = PetscStrncpy(prefix, ifilename, ext-ifilename);CHKERRQ(ierr); ierr = PetscPrintf(PETSC_COMM_WORLD, "prefix is %s\n", prefix);CHKERRQ(ierr); /* Create DM from exo */ ierr = DMPlexCreateExodusFromFile(PETSC_COMM_WORLD, ifilename, PETSC_FALSE, &dm);CHKERRQ(ierr); ierr = DMPlexDistribute(dm, NULL, 0, NULL, &pdm);CHKERRQ(ierr); if (pdm) {ierr = DMDestroy(&dm);CHKERRQ(ierr); dm = pdm;} /* Create section */ ierr = DMPlexGetDimension(dm, &dim);CHKERRQ(ierr); ierr = DMPlexCreateSection(dm, dim, numFields, numComp, numDof, numBC, bcFields, bcPoints, NULL, §ion);CHKERRQ(ierr); ierr = DMSetDefaultSection(dm, section);CHKERRQ(ierr); ierr = PetscSectionDestroy(§ion);CHKERRQ(ierr); /* Write mesh */ ierr = PetscSNPrintf(ofilename, FILENAME_MAX, "%s.h5", prefix);CHKERRQ(ierr); ierr = PetscPrintf(PETSC_COMM_WORLD, "Writing to %s\n", ofilename);CHKERRQ(ierr); ierr = PetscViewerHDF5Open(PetscObjectComm((PetscObject) dm), ofilename, FILE_MODE_WRITE, &hdf5Viewer);CHKERRQ(ierr); ierr = DMView_Plex(dm, hdf5Viewer);CHKERRQ(ierr); /* Write vectors */ ierr = DMSetOutputSequenceNumber(dm, 0);CHKERRQ(ierr); ierr = DMGetGlobalVector(dm, &V);CHKERRQ(ierr); ierr = VecSet(V, 0.0);CHKERRQ(ierr); ierr = VecView(V, hdf5Viewer);CHKERRQ(ierr); ierr = DMSetOutputSequenceNumber(dm, 1);CHKERRQ(ierr); ierr = VecSet(V, 1.0);CHKERRQ(ierr); ierr = VecView(V, hdf5Viewer);CHKERRQ(ierr); ierr = DMRestoreGlobalVector(dm, &V);CHKERRQ(ierr); ierr = PetscViewerDestroy(&hdf5Viewer);CHKERRQ(ierr); /* Cleanup */ ierr = DMDestroy(&dm);CHKERRQ(ierr); ierr = PetscFinalize(); return 0; }