Dear Petsc Team!

I had an issue when writing out DMPlex objects through hdf5.


This comes from a DMLabel that has only entries on non-local mesh points.
The DMLabel write only includes local parts of the label and so leads to
a zero sized write for the index set.
This seems to be fine except that the hdf5 chunksize is set to zero
which is not allowed.

I added a minimal example to illustrate the error.
It creates a 2D DMPlex in serial, distributes it, labels the nonlocal
points in the mesh and dumps it via PetscObjectViewer to HDF5.
Run with:

   make plex.h5

I also attached a quick fix to override the chunksize.

Please let me know if you anything extra and also if this is expected
behavior... I could certainly with the fact that DMLabel is not supposed
to work this way.

Many thanks,

Fabian

From a8fb918b6f1ef49b8b14c5c492581ff84d484eb6 Mon Sep 17 00:00:00 2001
From: "Fabian.Jakub" <fab...@jakub.com>
Date: Thu, 14 Feb 2019 13:21:28 +0100
Subject: [PATCH] fix hdf5 chunksizes of 0

  chunksize must not be 0.
  H5Pset_chunk,(chunkspace, dim, chunkDims) will otherwise give an
  error.
  This happened for example when dumping a dmlabel inside a dmplex which
  has only entries on non-local points.
---
 src/vec/is/is/impls/general/general.c | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/vec/is/is/impls/general/general.c b/src/vec/is/is/impls/general/general.c
index c6930cf..476a149 100644
--- a/src/vec/is/is/impls/general/general.c
+++ b/src/vec/is/is/impls/general/general.c
@@ -264,7 +264,7 @@ static PetscErrorCode ISView_General_HDF5(IS is, PetscViewer viewer)
   ierr = PetscHDF5IntCast(N/bs,dims + dim);CHKERRQ(ierr);
 
   maxDims[dim]   = dims[dim];
-  chunkDims[dim] = dims[dim];
+  chunkDims[dim] = PetscMax(1,dims[dim]);
   ++dim;
   if (bs >= 1) {
     dims[dim]      = bs;
-- 
2.7.4

program main
#include "petsc/finclude/petsc.h"

      use petsc
      implicit none

      PetscErrorCode :: ierr
      PetscInt, parameter :: petscint_dummy=0
      integer, parameter :: pi=kind(petscint_dummy)

      type(tDM) :: dm, dmdist

      call PetscInitialize(PETSC_NULL_CHARACTER,ierr); CHKERRQ(ierr)

      call create_plex(PETSC_COMM_WORLD, dm)
      call PetscObjectViewFromOptions(dm, PETSC_NULL_VEC, "-show_serial_plex", ierr); CHKERRQ(ierr)

      call distribute_dmplex(dm, dmdist)
      call PetscObjectViewFromOptions(dmdist, PETSC_NULL_VEC, "-show_dist_plex", ierr); CHKERRQ(ierr)

      call label_non_local_points(dmdist)
      call PetscObjectViewFromOptions(dmdist, PETSC_NULL_VEC, "-show_labeled_plex", ierr); CHKERRQ(ierr)

      call DMDestroy(dmdist, ierr);CHKERRQ(ierr)
      call DMDestroy(dm, ierr);CHKERRQ(ierr)
      call PetscFinalize(ierr)
      contains

        subroutine create_plex(comm, dm)
          integer, intent(in) :: comm
          type(tDM), intent(out) :: dm
          integer :: myid

          PetscInt :: i, k, Nfaces, Nedges, Nverts, chartsize

          call mpi_comm_rank(comm, myid, ierr);CHKERRQ(ierr)
          call DMPlexCreate(comm, dm, ierr);CHKERRQ(ierr)
          call PetscObjectSetName(dm, 'testplex', ierr);CHKERRQ(ierr)
          call DMSetDimension(dm, 2_pi, ierr);CHKERRQ(ierr)

          if(myid.eq.0) then

!           16____11_____17_____12___18
!            |          /|\          |
!            |         / | \         |
!            |        /  |  \    3   |
!            |   0   /   |   \       |
!            |      /    |    \      |
!            8     7     |     9     10
!            |    /      |      \    |
!            |   /       8       \   |
!            |  /    1   |    2   \  |
!            | /         |         \ |
!            |/          |          \|
!            /           |           \
!           13-----4----14-----5-----15

            Nfaces = 4
            Nedges = 9
            Nverts = 6
            chartsize = 19
          else
            Nfaces = 0
            Nedges = 0
            Nverts = 0
            chartsize = 0
          endif

          call DMPlexSetChart(dm, 0_pi, chartsize, ierr); CHKERRQ(ierr)

          ! Preallocation
          k=0
          ! cell has 3 edges
          do i = 1, Nfaces
            call DMPlexSetConeSize(dm, k, 3_pi, ierr); CHKERRQ(ierr)
            k = k+1
          enddo

          ! Edges have 2 vertices
          do i = 1, Nedges
            call DMPlexSetConeSize(dm, k, 2_pi, ierr); CHKERRQ(ierr)
            k = k+1
          enddo

          call DMSetUp(dm, ierr); CHKERRQ(ierr) ! Allocate space for cones

          if(myid.eq.0) then
            ! Setup Connections
            call DMPlexSetCone(dm,  0_pi, [6_pi, 7_pi,11_pi], ierr); CHKERRQ(ierr)
            call DMPlexSetCone(dm,  1_pi, [4_pi, 8_pi, 7_pi], ierr); CHKERRQ(ierr)
            call DMPlexSetCone(dm,  2_pi, [5_pi, 9_pi, 8_pi], ierr); CHKERRQ(ierr)
            call DMPlexSetCone(dm,  3_pi, [9_pi,10_pi,12_pi], ierr); CHKERRQ(ierr)

            call DMPlexSetCone(dm,  4_pi, [13_pi,14_pi], ierr); CHKERRQ(ierr)
            call DMPlexSetCone(dm,  5_pi, [14_pi,15_pi], ierr); CHKERRQ(ierr)
            call DMPlexSetCone(dm,  6_pi, [13_pi,16_pi], ierr); CHKERRQ(ierr)
            call DMPlexSetCone(dm,  7_pi, [13_pi,17_pi], ierr); CHKERRQ(ierr)
            call DMPlexSetCone(dm,  8_pi, [14_pi,17_pi], ierr); CHKERRQ(ierr)
            call DMPlexSetCone(dm,  9_pi, [15_pi,17_pi], ierr); CHKERRQ(ierr)
            call DMPlexSetCone(dm, 10_pi, [15_pi,18_pi], ierr); CHKERRQ(ierr)
            call DMPlexSetCone(dm, 11_pi, [16_pi,17_pi], ierr); CHKERRQ(ierr)
            call DMPlexSetCone(dm, 12_pi, [17_pi,18_pi], ierr); CHKERRQ(ierr)
          endif

          call DMPlexSymmetrize(dm, ierr); CHKERRQ(ierr)
          call DMPlexStratify(dm, ierr); CHKERRQ(ierr)

          call set_coords(comm)
        end subroutine

        subroutine set_coords(comm)
          integer, intent(in) :: comm
          integer :: myid, ierr
          PetscScalar, pointer :: coords(:)
          Vec                  :: coordinates
          PetscInt             :: i, dimEmbed, coordSize, vStart, vEnd, pStart, pEnd, voff
          PetscSection         :: coordSection

          call mpi_comm_rank(comm, myid, ierr); CHKERRQ(ierr)

          call DMGetCoordinateDim(dm, dimEmbed, ierr); CHKERRQ(ierr)

          call DMGetCoordinateSection(dm, coordSection, ierr); CHKERRQ(ierr)

          call PetscSectionSetNumFields(coordSection, 1, ierr); CHKERRQ(ierr)
          call PetscSectionSetUp(coordSection, ierr); CHKERRQ(ierr)
          call PetscSectionSetFieldComponents(coordSection, 0, dimEmbed, ierr); CHKERRQ(ierr)

          call DMPlexGetChart(dm, pStart, pEnd, ierr); CHKERRQ(ierr)
          call DMPlexGetDepthStratum (dm, 0, vStart, vEnd, ierr); CHKERRQ(ierr) ! vertices

          call PetscSectionSetChart(coordSection, pStart, pEnd, ierr);CHKERRQ(ierr)

          do i = vStart, vEnd-1
            call PetscSectionSetDof(coordSection, i, dimEmbed, ierr); CHKERRQ(ierr)
            call PetscSectionSetFieldDof(coordSection, i, 0, dimEmbed, ierr); CHKERRQ(ierr)
          enddo

          call PetscSectionSetUp(coordSection, ierr); CHKERRQ(ierr)
          call PetscSectionGetStorageSize(coordSection, coordSize, ierr); CHKERRQ(ierr)

          call VecCreate(PETSC_COMM_SELF, coordinates, ierr); CHKERRQ(ierr)
          call VecSetSizes(coordinates, coordSize, PETSC_DETERMINE, ierr);CHKERRQ(ierr)
          call VecSetBlockSize(coordinates, dimEmbed, ierr);CHKERRQ(ierr)
          call VecSetType(coordinates, VECSTANDARD, ierr);CHKERRQ(ierr)

          call PetscObjectSetName(coordinates, "coordinates", ierr); CHKERRQ(ierr)

          call VecGetArrayF90(coordinates, coords, ierr); CHKERRQ(ierr)

          if(myid.eq.0) then
            i =13; call PetscSectionGetOffset(coordSection, i, voff, ierr); coords(voff+1:voff+2) = [0,0]
            i =14; call PetscSectionGetOffset(coordSection, i, voff, ierr); coords(voff+1:voff+2) = [1,0]
            i =15; call PetscSectionGetOffset(coordSection, i, voff, ierr); coords(voff+1:voff+2) = [2,0]
            i =16; call PetscSectionGetOffset(coordSection, i, voff, ierr); coords(voff+1:voff+2) = [0,1]
            i =17; call PetscSectionGetOffset(coordSection, i, voff, ierr); coords(voff+1:voff+2) = [1,1]
            i =18; call PetscSectionGetOffset(coordSection, i, voff, ierr); coords(voff+1:voff+2) = [2,1]
          endif

          call VecRestoreArrayF90(coordinates, coords, ierr); CHKERRQ(ierr)
          call DMSetCoordinatesLocal(dm, coordinates, ierr);CHKERRQ(ierr)
          call PetscObjectViewFromOptions(coordinates, PETSC_NULL_VEC, "-show_plex_coordinates", ierr); CHKERRQ(ierr)
          call VecDestroy(coordinates, ierr);CHKERRQ(ierr)
        end subroutine

        subroutine distribute_dmplex(dm, dmdist)
          type(tDM), intent(in) :: dm
          type(tDM), intent(out) :: dmdist
          type(tPetscSF) :: msf
          call DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE, ierr); CHKERRQ(ierr)
          call DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE, ierr); CHKERRQ(ierr)

          msf = PETSC_NULL_SF
          call DMPlexDistribute(dm, 0_pi, msf, dmdist, ierr); CHKERRQ(ierr)
          if(dmdist.eq.PETSC_NULL_DM) then
            print *,'Did not distribute the mesh'
            call DMClone(dm, dmdist, ierr); CHKERRQ(ierr)
          endif
        end subroutine

        subroutine label_non_local_points(dm)
          type(tDM), intent(inout) :: dm
          integer :: comm, myid, ierr
          integer(pi) :: i

          type(tDMLabel) :: label_nonlocal
          type(tPetscSF) :: sf
          integer(pi) :: nleaves, nroots
          integer(pi), pointer :: myidx(:) ! list of my indices that we do not own
          type(PetscSFNode), pointer  :: remote(:) ! rank and remote idx of those points

          call PetscObjectGetComm(dm, comm, ierr); CHKERRQ(ierr)
          call mpi_comm_rank(comm, myid, ierr); CHKERRQ(ierr)

          call DMCreateLabel(dm, "nonlocal", ierr); CHKERRQ(ierr)
          call DMGetLabel(dm, "nonlocal", label_nonlocal, ierr); CHKERRQ(ierr)

          call DMLabelSetDefaultValue(label_nonlocal, int(myid, pi), ierr); CHKERRQ(ierr)
          call DMGetPointSF(dm, sf, ierr); CHKERRQ(ierr)
          call PetscSFGetGraph(sf, nroots, nleaves, myidx, remote, ierr); CHKERRQ(ierr)

          do i = 1, nleaves
            call DMLabelSetValue(label_nonlocal, myidx(i), remote(i)%rank, ierr); CHKERRQ(ierr)
          enddo
          myidx=>NULL(); remote=>NULL()
        end subroutine
      end program
include ${PETSC_DIR}/lib/petsc/conf/variables
include ${PETSC_DIR}/lib/petsc/conf/rules

plex.xmf:: plex.h5
                ${PETSC_DIR}/lib/petsc/bin/petsc_gen_xdmf.py plex.h5

plex.h5:: hdf5_issue_nonlocal_only_labels
                ./hdf5_issue_nonlocal_only_labels -show_labeled_plex 
::ascii_info_detail
                ./hdf5_issue_nonlocal_only_labels -show_labeled_plex 
hdf5:plex.h5

hdf5_issue_nonlocal_only_labels:: hdf5_issue_nonlocal_only_labels.F90
                ${PETSC_FCOMPILE} -c hdf5_issue_nonlocal_only_labels.F90
                ${FLINKER} hdf5_issue_nonlocal_only_labels.o -o 
hdf5_issue_nonlocal_only_labels ${PETSC_LIB}

clean::
                rm -rf *.o plex.h5 plex.xmf hdf5_issue_nonlocal_only_labels
HDF5-DIAG: Error detected in HDF5 (1.8.18) MPI-process 0:
#000: H5Pdcpl.c line 870 in H5Pset_chunk(): all chunk dimensions must be 
positive
major: Invalid arguments to routine
minor: Out of range
[0]PETSC ERROR: --------------------- Error Message 
--------------------------------------------------------------
[0]PETSC ERROR: Error in external library
[0]PETSC ERROR: Error in HDF5 call H5Pset_chunk() Status -1
[0]PETSC ERROR: See http://www.mcs.anl.gov/petsc/documentation/faq.html for 
trouble shooting.
[0]PETSC ERROR: Petsc Development GIT revision: v3.10.3-1549-g9b78045  GIT 
Date: 2019-02-13 11:10:12 -0600
[0]PETSC ERROR: plex_prism on a debug_gcc named met-ws-970r17 by Fabian.Jakub 
Thu Feb 14 14:39:13 2019
[0]PETSC ERROR: Configure options --with-cc=/usr/bin/mpicc 
--with-fc=/usr/bin/mpif90 --with-cxx=/usr/bin/mpicxx --with-fortran 
--with-fortran-interfaces --with-shared-libraries=1 --download-hdf5 
--download-netcdf=netcdf-c-4.6.2.tar.gz --download-zlib --download-szlib 
--download-openblas --download-metis --download-parmetis --download-ptscotch 
--no-download-petsc4py --download-ml --download-hypre --with-debugging=1 
COPTFLAGS="-O2 -g" FOPTFLAGS="-O2 -g"
[0]PETSC ERROR: #1 ISView_General_HDF5() line 288 in 
/software/meteo/xenial/x86_64/petsc/master/src/vec/is/is/impls/general/general.c
[0]PETSC ERROR: #2 ISView_General() line 513 in 
/software/meteo/xenial/x86_64/petsc/master/src/vec/is/is/impls/general/general.c
[0]PETSC ERROR: #3 ISView() line 1015 in 
/software/meteo/xenial/x86_64/petsc/master/src/vec/is/is/interface/index.c
[0]PETSC ERROR: #4 DMPlexWriteLabels_HDF5_Static() line 668 in 
/software/meteo/xenial/x86_64/petsc/master/src/dm/impls/plex/plexhdf5.c
[0]PETSC ERROR: #5 DMPlexView_HDF5_Internal() line 692 in 
/software/meteo/xenial/x86_64/petsc/master/src/dm/impls/plex/plexhdf5.c
[0]PETSC ERROR: #6 DMView_Plex() line 1103 in 
/software/meteo/xenial/x86_64/petsc/master/src/dm/impls/plex/plex.c
[0]PETSC ERROR: #7 DMView() line 904 in 
/software/meteo/xenial/x86_64/petsc/master/src/dm/interface/dm.c
[0]PETSC ERROR: #8 PetscObjectView() line 100 in 
/software/meteo/xenial/x86_64/petsc/master/src/sys/objects/destroy.c
[0]PETSC ERROR: #9 PetscObjectViewFromOptions() line 136 in 
/software/meteo/xenial/x86_64/petsc/master/src/sys/objects/destroy.c
[0]PETSC ERROR: #10 User provided function() line 0 in User file


HDF5-DIAG: Error detected in HDF5 (1.8.18) MPI-process 1:
#000: H5Pdcpl.c line 870 in H5Pset_chunk(): all chunk dimensions must be 
positive
major: Invalid arguments to routine
minor: Out of range
[1]PETSC ERROR: --------------------- Error Message 
--------------------------------------------------------------
[1]PETSC ERROR: Error in external library
[1]PETSC ERROR: Error in HDF5 call H5Pset_chunk() Status -1
[1]PETSC ERROR: See http://www.mcs.anl.gov/petsc/documentation/faq.html for 
trouble shooting.
[1]PETSC ERROR: Petsc Development GIT revision: v3.10.3-1549-g9b78045  GIT 
Date: 2019-02-13 11:10:12 -0600
[1]PETSC ERROR: plex_prism on a debug_gcc named met-ws-970r17 by Fabian.Jakub 
Thu Feb 14 14:39:13 2019
[1]PETSC ERROR: Configure options --with-cc=/usr/bin/mpicc 
--with-fc=/usr/bin/mpif90 --with-cxx=/usr/bin/mpicxx --with-fortran 
--with-fortran-interfaces --with-shared-libraries=1 --download-hdf5 
--download-netcdf=netcdf-c-4.6.2.tar.gz --download-zlib --download-szlib 
--download-openblas --download-metis --download-parmetis --download-ptscotch 
--no-download-petsc4py --download-ml --download-hypre --with-debugging=1 
COPTFLAGS="-O2 -g" FOPTFLAGS="-O2 -g"
[1]PETSC ERROR: #1 ISView_General_HDF5() line 288 in 
/software/meteo/xenial/x86_64/petsc/master/src/vec/is/is/impls/general/general.c
[1]PETSC ERROR: #2 ISView_General() line 513 in 
/software/meteo/xenial/x86_64/petsc/master/src/vec/is/is/impls/general/general.c
[1]PETSC ERROR: #3 ISView() line 1015 in 
/software/meteo/xenial/x86_64/petsc/master/src/vec/is/is/interface/index.c
[1]PETSC ERROR: #4 DMPlexWriteLabels_HDF5_Static() line 668 in 
/software/meteo/xenial/x86_64/petsc/master/src/dm/impls/plex/plexhdf5.c
[1]PETSC ERROR: #5 DMPlexView_HDF5_Internal() line 692 in 
/software/meteo/xenial/x86_64/petsc/master/src/dm/impls/plex/plexhdf5.c
[1]PETSC ERROR: #6 DMView_Plex() line 1103 in 
/software/meteo/xenial/x86_64/petsc/master/src/dm/impls/plex/plex.c
[1]PETSC ERROR: #7 DMView() line 904 in 
/software/meteo/xenial/x86_64/petsc/master/src/dm/interface/dm.c
[1]PETSC ERROR: #8 PetscObjectView() line 100 in 
/software/meteo/xenial/x86_64/petsc/master/src/sys/objects/destroy.c
[1]PETSC ERROR: #9 PetscObjectViewFromOptions() line 136 in 
/software/meteo/xenial/x86_64/petsc/master/src/sys/objects/destroy.c
[1]PETSC ERROR: #10 User provided function() line 0 in User file
--------------------------------------------------------------------------

Attachment: signature.asc
Description: OpenPGP digital signature

Reply via email to