Dear developers,
I wrote a code that extracts subDMs corresponding to the various strata
in the Face Sets.
I run into troubles when I view a subDM or a Vec attached to the subDM
using the VTK format.
More precisely, the problem only occurs on more than one processor when
the rank=0 processor has no points on a given subDM.
For instance, when the attached reproducer is run on 2 procs, the
u_01.vtu file (global u Vec mapped to the subDM corresponding to stratum=1)
only includes the header, but no data. All other u_0?.vtu files can
successfully be loaded and viewed in paraview.
The problem does NOT arise when I view the same objects in HDF5 format.
However, my problem in using the HDF5 lies in the fact that:
while the hdf5 file obtained with DMView can be post-processed with
"petsc/lib/petsc/bin/petsc_gen_xdmf.py" to create a xmf file readable by
paraview
I do not know how to view the field(s) associated with the DM when the
hdf5 file is obtained from VecView.
The reproducer compiles with the latest petsc release.
Thanks,
Aldo
--
Dr. Aldo Bonfiglioli
Associate professor of Fluid Mechanics
Dipartimento di Ingegneria
Universita' della Basilicata
V.le dell'Ateneo Lucano, 10 85100 Potenza ITALY
tel:+39.0971.205203 fax:+39.0971.205215
web: https://urldefense.us/v3/__http://docenti.unibas.it/site/home/docente.html?m=002423__;!!G_uCfscf7eWS!eXE2csxUb1J5bnRosf9LIGxLs17TdstMxQcGs0mbXFroRjzLN81jVmeAWfMr41X8JGEuVm286lVTmDgmi5s1zfsfegJNuWVVWTM$
program read_gmsh_dmplex
!
! This program meshes the unit cube with tetgen, then extracts as many subDMs
! as the number of strata in the Face Sets (six for the cube)
! each subDM is viewed in a patch_??.vtu or patch_??.h5 file inside ExtractStratumSubmesh
! a global vec is then initialized and mapped into a (smaller) vec defined on each subDM
! each of these smaller vecs is dumped into a u_??.vtu file to be viewd by paraview
!
#include "petsc/finclude/petscdmplex.h"
#include "petsc/finclude/petscdmlabel.h"
#include "petsc/finclude/petscds.h"
use petscdm
implicit none
type(tDM) :: dm
type(tDM), allocatable :: subdm(:)
DMLabel :: label
IS :: values
integer :: ierr
Vec :: u, ureduced
character(len=256) :: filename
character(len=8) :: stringa = "patch_nn"
character(len=7) :: objname
integer :: i, j
integer :: my_pe, numprocs
PetscInt :: ival, nstrata, total_n, bs, nloc, nglo
PetscInt, allocatable :: all_data(:)
PetscInt, pointer :: idx(:)
PetscInt :: ndim, ndofs
PetscViewer :: viewer
PetscBool :: lflag
character(len=PETSC_MAX_PATH_LEN) :: IOBuffer
logical, parameter :: verbose = .false.
! Initialize PETSc
PetscCallA(PetscInitialize(PETSC_NULL_CHARACTER, ierr))
if (ierr /= 0) stop 'Error initializing PETSc'
call MPI_Comm_rank(PETSC_COMM_WORLD, my_pe, ierr)
call MPI_Comm_size(PETSC_COMM_WORLD, numprocs, ierr)
!
! Create DM from command-line options
!
PetscCallA(DMPlexCreate(PETSC_COMM_WORLD, dm, ierr))
PetscCallA(DMSetFromOptions(dm, ierr))
PetscCallA(DMGetDimension(dm, ndim, ierr))
objname = "0D plex"
write (objname(1:1), FMT="(I1.1)") ndim
PetscCallA(PetscObjectSetName(dm, trim(objname), ierr))
PetscCallA(DMSetup(dm, ierr))
!
! ndofs can be either 1 or ndim
!
PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, "-ndofs", ndofs, lflag, ierr))
if(.not.lflag)then
ndofs = 1
endif
! create a section
call createsectionalternate(dm,ndofs)
PetscCallA(DMCreateDS(dm, ierr)) ! this is needed wehn writing and HDF5 file
!
! View the mesh
!
PetscCallA(DMView(dm, PETSC_VIEWER_STDOUT_WORLD, ierr))
!
! use: -dm_view hdf5:mesh.h5 for HDF5
!
! use: -dm_view vtk:mesh.vtu for VTK
!
PetscCallA(DMViewFromOptions(dm, PETSC_NULL_OBJECT, '-dm_view', ierr))
!
PetscCallA(DMGetLabel(dm, "Face Sets", label, ierr))
if (PetscObjectIsNull(label)) then
write (IOBuffer, *) "Face Sets returned a NULL label\n"
stop
else
write (IOBuffer, *) "Face Sets returned a GOOD label\n"
end if
PetscCallA(PetscPrintf(PETSC_COMM_WORLD, IOBuffer, ierr))
!
! create a global vector
!
PetscCallA(DMCreateGlobalVector(dm, u, ierr))
PetscCallA(PetscObjectSetName(u, "Unknown_", ierr))
PetscCallA(VecSetBlockSize(u, ndofs, ierr))
PetscCallA(VecGetBlockSize(u, bs, ierr))
!
! sets the solution equal to the distance from the origin when ndofs = 1
! or x,y,z when ndofs = ndim
!
call FormExactSolution(dm,u,ierr)
!
! save the solution to a file
!
! use, for instance, -vec_view hdf5:u.h5 -vec_view vtk:u.vtu
!
! the h5 format cannot be loaded into paraview
!
PetscCall(VecViewFromOptions(u, PETSC_NULL_OBJECT, '-vec_view', ierr))
!
if (verbose) PetscCallA(DMLabelView(label, PETSC_VIEWER_STDOUT_SELF, ierr))
!
! retrieve info on Face Sets and related strata
!
PetscCallA(DMLabelGetValueIS(label, values, ierr))
if (verbose) PetscCallA(ISView(values, PETSC_VIEWER_STDOUT_SELF, ierr))
PetscCallA(ISGetSize(values, nstrata, ierr))
!
PetscCallA(ISGetIndices(values, idx, ierr))
write (IOBuffer, FMT=*) "[", my_pe, "] found these ", nstrata, " strata in Face Sets: ", (idx(j), j=1, nstrata), "\n"
PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, IOBuffer, ierr))
!
! collect all total_n strata from all procs
! each proc needs to be aware of ALL the strata in the global mesh,
! not just those seen by the proc (which might be none)
!
call query_strata(idx, all_data, total_n)
PetscCallA(ISRestoreIndices(values, idx, ierr))
allocate (subdm(total_n))
!
write (IOBuffer, FMT=*) "[", my_pe, "] found these ", total_n, " strata in Face Sets: ", (all_data(j), j=1, total_n), "\n\n\n"
PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, IOBuffer, ierr))
PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT, ierr))
!
! loop over ALL strata (not only those known to my_pe) to extract the SubDM
!
do i = 1, total_n
ival = all_data(i)
write (stringa(7:8), fmt="(I2.2)") ival
call ExtractStratumSubmesh(dm, trim(stringa), ival, subdm(i))
call createsectionalternate(subdm(i),ndofs) ! attach a setion to each subdm
PetscCallA(DMCreateDS(subdm(i), ierr))
end do
PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT, ierr))
do i = 1, total_n
call DMView(subdm(i), PETSC_VIEWER_STDOUT_WORLD, ierr)
end do
!
! copy data from vector u, which is attached to the DM, to (smaller) vectors attached to the subDMs
!
do i = 1, total_n
ival = all_data(i)
write (stringa(7:8), fmt="(I2.2)") ival
PetscCallA(DMCreateGlobalVector(subdm(i), ureduced, ierr)) ! we do not need to access ghosts to make a copy
PetscCallA(PetscObjectSetName(ureduced, "Unknown_", ierr))
PetscCallA(VecGetSize(ureduced, nglo, ierr))
PetscCallA(VecGetLocalSize(ureduced, nloc, ierr))
write (IOBuffer, FMT=*) "[", my_pe, "] Global reduced Vec has global/local size: ", nglo, nloc, " in stratum : ", ival, "\n"
PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, IOBuffer, ierr))
!
! copy Vec u (which is attached to the DM) to a "reduced" Vec attached to the subDM
!
call copy_data(dm, subdm(i), u, ureduced)
!
! view the reduced vector to a VTK file: this is where troubles occur whenever
! the proc in rank=0 has no data for a given subDM
!
filename = "u_00.h5"
filename = "u_00.vtu"
write (filename(3:4), FMT="(I2.2)") ival
write (IOBuffer, FMT=*) "Backing up the solution to ", trim(filename), "\n"
PetscCallA(PetscSynchronizedPrintf(PETSC_COMM_WORLD, IOBuffer, ierr))
! PetscCall(PetscViewerHDF5Open(PETSC_COMM_WORLD, trim(filename), FILE_MODE_WRITE, viewer, ierr))
#if 1
PetscCallA(PetscViewerCreate(PETSC_COMM_WORLD, viewer, ierr))
! PetscCallA(PetscViewerSetType(viewer, PETSCVIEWERHDF5, ierr)) ! HDF5 viewer
PetscCallA(PetscViewerSetType(viewer, PETSCVIEWERVTK, ierr))
PetscCallA(PetscViewerFileSetMode(viewer, FILE_MODE_WRITE, ierr))
PetscCallA(PetscViewerFileSetName(viewer, trim(filename), ierr))
#endif
PetscCallA(VecView(ureduced, viewer, ierr))
PetscCallA(PetscViewerDestroy(viewer, ierr))
PetscCallA(VecDestroy(ureduced, ierr))
end do
PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT, ierr))
do i = 1, total_n
if (PetscObjectIsNull(subdm(i))) then
write (6, *) "SubDM # ", i, " is NULL"
else
PetscCallA(DMDestroy(subdm(i), ierr))
end if
end do
deallocate (subdm)
! Destroy DMPlex object
PetscCallA(DMDestroy(dm, ierr))
! Finalize PETSc
PetscCallA(PetscFinalize(ierr))
contains
!
subroutine CreateSectionAlternate(dm,ndofs)
! this subroutine is taken from https://petsc.org/release/src/dm/impls/plex/tutorials/ex1f90.F90.html
#include "petsc/finclude/petscdmplex.h"
#include "petsc/finclude/petscdmlabel.h"
use petscdmplex
use petscsys
implicit none
DM :: dm
PetscInt, intent(in) :: ndofs
PetscSection :: section
PetscInt :: ndim, numFields, numBC
PetscInt :: i
PetscInt, target, dimension(3) :: numComp
PetscInt, pointer :: pNumComp(:)
PetscInt, target, dimension(12) :: numDof
PetscInt, pointer :: pNumDof(:)
PetscInt, parameter :: zero = 0, one = 1, two = 2
PetscInt :: f, d
PetscErrorCode :: ierr
character(len=PETSC_MAX_PATH_LEN) :: IOBuffer
PetscMPIInt :: my_pe
logical, parameter :: verbose = .false.
character(len=3) :: varname = "u_0"
call MPI_Comm_rank(PETSC_COMM_WORLD, my_pe, ierr)
PetscCall(DMGetDimension(dm, ndim, ierr))
!
! when this routine is called passing a subdm, ndim will be different (lower)
! from the value returned by calling DMGetCoordinateDimension
!
! Create ndofs scalar fields
numFields = ndofs
do i = 1, numFields
numComp(i) = 1
enddo
pNumComp => numComp
do i = 1, numFields*(ndim + 1)
numDof(i) = 0
end do
! numDof[f*(dim+1)+d] gives the number of dof for field f on points of dimension d. For instance, numDof[1] is the number of dof for field 0 on each edge.
! il punto di dimensione d=0 è il vertice
! il punto di dimensione d=1 è il lato (edge)
! il punto di dimensione d=ndim-1 sono le facce; in 2D coincidono con gli edge
! il punto di dimensione d=ndim è la cella; in 2D coincidono con le facce
!
! dim=1 | 2 | 3 |
!------------+-------------+-------------+
! 1D | 2D | 3D |
!------------+-------------+-------------+
! vertex vertex vertex | d=0
!------------+-------------+-------------+
! edge edge edge | d=1
!------------+-------------+-------------+
! vertex edge triangle | d=dim-1
!------------+-------------+-------------+
! edge triangle tetra | d=dim
!------------+-------------+-------------+
!
!
! f = numFields - 1 ! field #
d = 0 ! vertices
do f = 0, numFields - 1
!
! numDof[f*(dim+1)+d] gives the number of dof for field f on points of dimension d. For instance, numDof[1] is the number of dof for field 0 on each edge.
!
! Let u be defined on vertices
numDof(f*(ndim + 1) + d + 1) = 1
! ^
! |
! |
! fortran 1-based
!
! Let v be defined on cells
! numDof(1*(ndim+1)+ndim+1) = ndim
! Let v be defined on faces
! numDof(2*(ndim+1)+ndim) = ndim-1
!
enddo
#if 1
do i = 1, numFields*(ndim + 1)
write (IOBuffer, *) "numDof(", i, ")= ", numDof(i), "\n"
PetscCall(PetscPrintf(PETSC_COMM_WORLD, IOBuffer, ierr))
end do
#endif
pNumDof => numDof
!
! which stratum should I select?
!
! Get the nof points in depth=1 that are marked; should be 2 in 1D
!
! numBC - The number of boundary conditions
!
numBC = 0 ! Bcs are handled by zeroing selected rows
!
! bcField - An array of size numBC giving the field number for each boundary condition
!
! bcComps - [Optional] An array of size numBC giving an IS holding the field components to which each boundary condition applies
!
! bcPoints - An array of size numBC giving an IS holding the DMPLEX points to which each boundary condition applies
!
! Create a PetscSection with this data layout
PetscCall(DMSetNumFields(dm, numFields, ierr))
! PetscErrorCode DMPlexCreateSection(DM dm, DMLabel label[], const PetscInt numComp[], const PetscInt numDof[], PetscInt numBC, const PetscInt bcField[], const IS bcComps[], const IS bcPoints[], IS perm, PetscSection *section)
! PetscCall(DMPlexCreateSection(dm,nolabel,pNumComp,pNumDof,numBC,pBcField,pBcCompIS,pBcPointIS,PETSC_NULL_IS,section,ierr))
PetscCall(DMPlexCreateSection(dm,PETSC_NULL_DMLABEL_ARRAY,numComp,numDof,numBC,PETSC_NULL_INTEGER_ARRAY,PETSC_NULL_IS_ARRAY,PETSC_NULL_IS_ARRAY,PETSC_NULL_IS,section, ierr))
! PetscCall(DMPlexCreateSection(dm,nolabel,pNumComp,pNumDof,numBC,pBCField,PETSC_NULL_IS,PETSC_NULL_IS,PETSC_NULL_IS,section,ierr))
! Name the Field variables
do f = 0, numFields-1
write(varname(3:3),FMT="(I1.1)")f
PetscCall(PetscSectionSetFieldName(section, f, varname, ierr))
enddo
if (verbose) PetscCall(PetscSectionView(section, PETSC_VIEWER_STDOUT_WORLD, ierr))
! Tell the DM to use this data layout
PetscCall(DMSetLocalSection(dm, section, ierr))
! Cleanup
PetscCall(PetscSectionDestroy(section, ierr))
return
end subroutine CreateSectionAlternate
subroutine query_strata(strata, all_data, total_n)
!
! strata contains strata known the calling processor, which might be zero
! all_data contains ALL total_n strata in the DM
!
#include "petsc/finclude/petscsys.h"
use mpi
use petscsys
integer, intent(in) :: strata(:)
integer, intent(out), allocatable :: all_data(:)
integer, intent(out) :: total_n
integer :: ierr, rank, nprocs
integer, allocatable :: recvcounts(:), displs(:)
integer :: strata_n
integer :: i
call MPI_Comm_rank(PETSC_COMM_WORLD, rank, ierr)
call MPI_Comm_size(PETSC_COMM_WORLD, nprocs, ierr)
strata_n = size(strata)
! Step 1: gather all lengths
allocate (recvcounts(nprocs))
call MPI_Allgather(strata_n, 1, MPI_INTEGER, recvcounts, 1, MPI_INTEGER, PETSC_COMM_WORLD, ierr)
! Step 2: compute displacements
allocate (displs(nprocs))
displs(1) = 0
do i = 2, nprocs
displs(i) = displs(i - 1) + recvcounts(i - 1)
end do
total_n = sum(recvcounts)
allocate (all_data(total_n))
! Step 3: gather all data
call MPI_Allgatherv(strata, strata_n, MPI_INTEGER, &
all_data, recvcounts, displs, MPI_INTEGER, PETSC_COMM_WORLD, ierr)
! Step 4: remove duplicates locally
if (total_n > 0) then
call sortsp(all_data, total_n)
end if
end subroutine query_strata
!
subroutine ExtractStratumSubmesh(dm, bctype, stratumValue, subdm)
!
! extracts subMeshes corresponding to a given stratumValue of the Face Sets
!
DM :: dm, subdm
PetscInt, intent(in) :: stratumValue
character(len=*), intent(in) :: bctype
DMLabel :: fslabel, sublabel
IS :: points
integer :: ierr
PetscBool :: markedFaces = .true.
integer :: nitems, height
PetscMPIInt :: my_pe, numprocs
PetscInt :: ibgn, iend, i
PetscInt, pointer :: x_vv(:)
! PetscViewer :: viewer
character(len=PETSC_MAX_PATH_LEN) :: IOBuffer
call MPI_Comm_rank(PETSC_COMM_WORLD, my_pe, ierr)
call MPI_Comm_size(PETSC_COMM_WORLD, numprocs, ierr)
!
write (IOBuffer, *) "Working on ", bctype, "\n"
PetscCall(PetscPrintf(PETSC_COMM_WORLD, IOBuffer, ierr))
!
! Get the face set label
!
PetscCall(DMGetLabel(dm, "Face Sets", fslabel, ierr))
if (PetscObjectIsNull(fslabel)) then
!
! this should never occur, unless the mesh has no Face Sets
!
write (IOBuffer, *) "[", my_pe, "] Face Sets returned a NULL label when stratum = ", stratumValue, "\n"
PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, IOBuffer, ierr))
error stop
end if
PetscCall(DMLabelGetStratumIS(fslabel, stratumValue, points, ierr))
if (PetscObjectIsNull(points)) then
write (IOBuffer, *) "[", my_pe, "] DMLabelGetStratumIS returned a NULL IS when stratum = ", stratumValue, "\n"
PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, IOBuffer, ierr))
nitems = 0
else
PetscCall(ISGetSize(points, nitems, ierr)) ! Is the IS local ?
end if
PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT, ierr))
write (IOBuffer, *) "[", my_pe, "] There are ", nitems, " points in ", trim(bctype), " with stratum ", stratumValue, "\n"
PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, IOBuffer, ierr))
PetscCallA(DMLabelCreate(PETSC_COMM_WORLD, trim(bctype), sublabel, ierr))
if (nitems > 0) then
PetscCall(ISGetIndices(points, x_vv, ierr))
do i = 1, nitems
PetscCall(DMLabelSetValue(sublabel, x_vv(i), stratumValue, ierr))
end do
PetscCall(ISRestoreIndices(points, x_vv, ierr))
PetscCall(ISDestroy(points, ierr))
end if
PetscCallA(DMPlexLabelComplete(dm, sublabel, ierr))
!
PetscCallA(DMPlexCreateSubmesh(dm, subLabel, stratumValue, markedFaces, subdm, ierr))
PetscCall(DMLabelDestroy(sublabel, ierr))
#if 1
do height = 0, 3
PetscCall(DMPlexGetHeightStratum(subdm, height, ibgn, iend, ierr))
nitems = iend - ibgn
write (IOBuffer, *) "[",my_pe,"] There are ", nitems, " points @ height ",height," in ", trim(bctype), " with stratum ", stratumValue, "\n"
PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, IOBuffer, ierr))
end do
#endif
PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT, ierr))
!
! options prefix will be -patch_nn_dm_view
!
! we use the options db to choose between the vtk or hdf5 format
!
PetscCall(PetscObjectSetName(subdm, trim(bctype), ierr))
PetscCall(DMSetOptionsPrefix(subdm, trim(bctype)//"_", ierr))
PetscCall(DMViewFromOptions(subdm, PETSC_NULL_OBJECT, '-dm_view', ierr))
return
end subroutine ExtractStratumSubmesh
!
subroutine copy_data(dm, subdm, vfull, vreduced)
!
! transfer data from the full vector attached to the dm
! to the reduced vector attached to the subdm
!
DM :: dm, subdm
IS :: subpointIS, vtxIS
PetscSection :: FullDataSection
Vec :: vfull, vreduced
PetscInt :: ibgn, iend, height, nitems, point, i, ifull, offset, ndofs, nsize, depth, ni, bs
PetscInt, pointer :: ptr(:)
PetscMPIInt :: my_pe, numprocs
PetscErrorCode :: errorcode, ierr
character(len=PETSC_MAX_PATH_LEN) :: IOBuffer
logical, parameter :: verbose = .false.
call MPI_Comm_rank(PETSC_COMM_WORLD, my_pe, ierr)
call MPI_Comm_size(PETSC_COMM_WORLD, numprocs, ierr)
if( ndim .ne. 3 )then
write (IOBuffer, *) "[", my_pe, "] Subr copy_data currently works with 3D datasets only \n"
PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, IOBuffer, ierr))
PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT, ierr))
return
endif
!
! Returns an IS covering the entire subdm chart with the original points as data
!
PetscCall(DMPlexGetSubpointIS(subdm, subpointIS, ierr))
PetscCall(PetscObjectSetName(subpointIS, "subpoint IS", ierr))
if (verbose) PetscCall(ISView(subpointIS, PETSC_VIEWER_STDOUT_WORLD, ierr))
!
! subpointIS has ALL points, not just vertices
!
PetscCall(VecGetBlockSize(vfull, bs, ierr))
PetscCall(ISGetLocalSize(subpointIS, nsize, ierr))
!
height = 3 ! vertices
PetscCall(DMPlexGetHeightStratum(subdm, height, ibgn, iend, ierr))
nitems = iend - ibgn ! includes off-processor points
!
!
! height = 3 --> vertices
! height = 2 --> edges
! height = 1 --> faces
! height = 0 --> cells
!
!
call ISGetIndices(subpointIS, ptr, ierr) ! subpointIS MUST NOT be destroyed
!
PetscCall(DMGetGlobalSection(dm, FullDataSection, ierr))
!
allocate (idx(nitems*bs))
i = 0
do point = ibgn, iend - 1 ! loop over points of the subdm
ifull = ptr(point + 1) ! corresponding point in the dm; should be a vertex
! let us check if it is a vertex
PetscCall(DMPlexGetPointDepth(dm, ifull, depth, ierr))
if (depth .ne. 0) then
write (IOBuffer, *) "[", my_pe, "] Unrecoverable error: point ", ifull, &
" in the DM has depth ", depth, "\n"
PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, IOBuffer, ierr))
errorcode = 13
call MPI_Abort(PETSC_COMM_WORLD, errorcode, ierr)
end if
!
PetscCall(PetscSectionGetDof(FulldataSection, ifull, ndofs, ierr))
if (ndofs < 0) then
! off-processor: do nothing
if (numprocs == 1) then
write (IOBuffer, *) "When run on 1 proc I should NOT encounter negative ndofs: ", ndofs, "\n"
PetscCallA(PetscPrintf(PETSC_COMM_WORLD, IOBuffer, ierr))
errorcode = 10
call MPI_Abort(PETSC_COMM_WORLD, errorcode, ierr)
else
#if 0
write (IOBuffer, *) "[",my_pe,"] point ", point, " in the subdm returned Ndofs = ", ndofs, "\n"
PetscCallA(PetscSynchronizedPrintf(PETSC_COMM_WORLD, IOBuffer, ierr))
#endif
end if
elseif (ndofs == bs) then
do j = 0, ndofs - 1
i = i + 1
PetscCall(PetscSectionGetOffset(FulldataSection, ifull, offset, ierr))
idx(i) = offset + j
end do
else ! = 0 or >1
write (IOBuffer, *) "Ndofs = ", ndofs, " while I expect it to be ", bs, "\n"
PetscCallA(PetscPrintf(PETSC_COMM_WORLD, IOBuffer, ierr))
PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT, ierr))
errorcode = 10
call MPI_Abort(PETSC_COMM_WORLD, errorcode, ierr)
end if
end do
ni = i
call ISRestoreIndices(subpointIS, ptr, ierr)
!
PetscCall(ISCreateGeneral(PETSC_COMM_WORLD, ni, idx, PETSC_COPY_VALUES, vtxis, ierr))
PetscCallA(VecGetLocalSize(vreduced, nsize, ierr))
if (nsize .ne. ni) then
write (IOBuffer, *) "[", my_pe, "] There are ", nsize, " LOCAL entries in the reduced array, ",ni," in the IS and ", nitems, " in the subdm\n"
PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, IOBuffer, ierr))
PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT, ierr))
errorcode = 12
call MPI_Abort(PETSC_COMM_WORLD, errorcode, ierr)
end if
PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT, ierr))
!
! mode == SCATTER_FORWARD: vfull[is[i]] = vreduced[i]
! mode == SCATTER_REVERSE: vreduced[i] = vfull[is[i]]
!
PetscCall(VecISCopy(vfull, vtxis, SCATTER_REVERSE, vreduced, ierr))
deallocate (idx)
PetscCall(ISDestroy(vtxis, ierr))
if (verbose) PetscCall(VecView(vreduced, PETSC_VIEWER_STDOUT_WORLD, ierr))
!
!
end subroutine copy_data
!
subroutine sortsp(iarr, n2)
!-----------------------------------------------------------------------
! sorts integers, suppresses multiple occurrences
! input
! n1 = no. of integers
! input/output
! iarr = array containing integers
! output
! n2 = new number of integers
!-----------------------------------------------------------------------
PetscInt, intent(inout) :: iarr(:)
PetscInt, intent(out) :: n2
PetscInt :: n1, ind, j, k
n1 = size(iarr, DIM=1)
10 continue
ind = 0
do j = 2, n1
if (iarr(j) .lt. iarr(j - 1)) then
k = iarr(j)
iarr(j) = iarr(j - 1)
iarr(j - 1) = k
ind = 1
end if
end do
if (ind .ne. 0) goto 10
n2 = min(n1, 1)
do j = 2, n1
if (iarr(j) .gt. iarr(j - 1)) then
n2 = n2 + 1
iarr(n2) = iarr(j)
end if
end do
end subroutine sortsp
!
subroutine FormExactSolution(dm,u,ierr)
!
! build the initial solution: we only need to initialize processor owned vertices, I believe ......
!
#include <petsc/finclude/petsc.h>
implicit none
DM, intent(in) :: dm
Vec, intent(inout) :: u
PetscErrorCode, intent(out) :: ierr
PetscInt :: size,depth,vbgn,vend
PetscReal :: radius
PetscReal, allocatable :: xyz(:)
PetscInt :: i,j,k,ioff,joff,nitems,ifail,ndim
PetscScalar, pointer :: xx_g(:),xx_u(:)
Vec :: coordVec
Vec :: localu
character(len=PETSC_MAX_PATH_LEN) :: IOBuffer
logical, parameter :: verbose = .false.
PetscMPIInt :: my_pe,errorcode
character(len=25) :: subname = "FormExactSolutionSteadyXd"
call MPI_Comm_rank(PETSC_COMM_WORLD, my_pe, ierr)
PetscCall(DMGetDimension(dm, ndim, ierr))
write(subname(24:24),FMT="(I1.1)")ndim
!
allocate(xyz(ndim))
write(IOBuffer,FMT=*)"Sub ",subname,"\n"
PetscCall(PetscPrintf(PETSC_COMM_WORLD, IOBuffer, ierr))
!
! use DMGetCoordinatesLocal or it will NOT work in the parallel case;
! is this true? It should work also without accessing the ghost vertices
PetscCall(DMGetCoordinatesLocal(dm,coordVec,ierr))
PetscCall(VecGetSize(coordVec,size,ierr))
! call VecView(coordVec,PETSC_VIEWER_STDOUT_WORLD, ierr)
!
! do I need the CoordinateSection ?
!
! PetscCall(DMGetCoordinateSection(dm, coordSection,ierr))
!
! I need to retrieve the LocalVector or Dirichlet nodes will be missing
PetscCall(DMGetLocalVector(dm, localu, ierr ))
!
! Depth indexing is related to topological dimension. Depth stratum 0 contains the lowest topological dimension points, often “vertices”.
! If the mesh is “interpolated” (see DMPlexInterpolate()), then depth stratum 1 contains the next higher dimension, e.g., “edges”.
!
depth = 0 ! pick up the vertices
PetscCall(DMPlexGetDepthStratum(dm, depth, vbgn, vend, ierr))
nitems = vend-vbgn
if(nitems*ndim.NE.size)then
write(IOBuffer,FMT=*)"Sub ",subname," : there is a mismatch ",nitems*ndim,size," on PE# ",my_pe,"\n"
PetscCallA(PetscPrintf(PETSC_COMM_SELF, IOBuffer, ierr))
ifail = nitems*ndim-size
else
ifail = 0
endif
if(ifail.NE.0)call PetscError(PETSC_COMM_WORLD,1,PETSC_ERROR_INITIAL,'There is a mismatch in initialize')
!
PetscCall(VecGetArray(localu,xx_u,ierr))
PetscCall(VecGetArrayRead(coordVec,xx_g,ierr))
do j = 1,nitems ! loop over the vertices
ioff = (j-1)*ndim
joff = (j-1)*ndofs
do i = 1, ndim
xyz(i) = xx_g(ioff+i)
enddo
if(verbose)then
write(IOBuffer,FMT=*)"Sub ",subname," : Coords of vertex ",j," on PE# ",my_pe," is ",(xyz(k),k=1,ndim)," \n"
PetscCallA(PetscPrintf(PETSC_COMM_SELF, IOBuffer, ierr))
endif
radius = norm2(xyz(1:ndim))
if( ndofs == 1 )then
xx_u(j) = radius
elseif( ndofs == ndim )then
do k = 1, ndofs
xx_u(joff+k) = xyz(k)
enddo
else
write(IOBuffer,FMT=*)"Sub ",subname," : ndofs = ",ndofs," should be either 1 or ",ndim," on PE#",my_pe,"\n"
PetscCallA(PetscPrintf(PETSC_COMM_SELF, IOBuffer, ierr))
errorcode = -1
Call MPI_abort(PETSC_COMM_WORLD,errorcode,ierr)
endif
enddo
PetscCall(VecRestoreArrayRead(coordVec,xx_g,ierr))
PetscCall(VecRestoreArray(localu,xx_u,ierr))
! call VecView(localu,PETSC_VIEWER_STDOUT_WORLD, ierr)
PetscCall(DMLocalToGlobal(dm, localu, INSERT_VALUES, u, ierr))
PetscCall(DMRestoreLocalVector(dm, localu, ierr ))
return
end subroutine FormExactSolution
!
end program read_gmsh_dmplex
-dm_plex_dim 3
-dm_plex_shape box
-dm_plex_box_faces 20,20,20
-dm_plex_box_lower 0.,0.,0.
-dm_plex_box_upper 1.,1.,1.
##-dm_plex_filename cube6.msh
##-dm_plex_simplex false
-dm_plex_simplex true
-dm_plex_interpolate
##-dm_plex_check_all
##-dm_plex_filename /home/abonfi/grids/3D/MASA_ns3d/unnested/cube1/cube1.msh
#
# read a solution from an existing file
#
#-viewer_type hdf5
#-viewer_format vtk
#-viewer_filename
/home/abonfi/testcases/3D/scalar/advdiff/hiro/DMPlex/Re100/cube0/u.h5
#
##-viewer_binary_filename
/home/abonfi/testcases/3D/scalar/advdiff/hiro/DMPlex/Re100/cube1/sol.bin
#
# and write it to u.*
#
###-vec_view hdf5:u.h5
-vec_view vtk:u.vtu
##
## I can read both mesh.h5 and mesh.vtu in paraview
##
-dm_view hdf5:mesh.h5
##-dm_view vtk:mesh.vtu
#
# uncomment the following to write each boundary patch separately in an HDF5
file
# works both serial and parallel
#
-patch_01_dm_view hdf5:patch_01.h5
-patch_02_dm_view hdf5:patch_02.h5
-patch_03_dm_view hdf5:patch_03.h5
-patch_04_dm_view hdf5:patch_04.h5
-patch_05_dm_view hdf5:patch_05.h5
-patch_06_dm_view hdf5:patch_06.h5
#
# uncomment the following to write each boundary patch separately in a VTK file
# it work on one processor, but fails whenever the rank=0 processor has no
points
# on a given submesh
#
#
#-patch_01_dm_view vtk:patch_01.vtu
#-patch_02_dm_view vtk:patch_02.vtu
#-patch_03_dm_view vtk:patch_03.vtu
#-patch_04_dm_view vtk:patch_04.vtu
#-patch_05_dm_view vtk:patch_05.vtu
#-patch_06_dm_view vtk:patch_06.vtu
#
#
#
##-dm_plex_view_labels "marker"
##-dm_plex_view_labels "Face Sets"
-petscpartitioner_view
####-dm_petscsection_view
-options_left