xref: /petsc/src/vec/is/sf/tutorials/ex1f.F90 (revision 030f984af8d8bb4c203755d35bded3c05b3d83ce)
1
2!    Description: A star forest is a simple tree with one root and zero or more leaves.
3!    Many common communication patterns can be expressed as updates of rootdata using leafdata and vice-versa.
4!     This example creates a star forest, communicates values using the graph  views the graph, then destroys it.
5!
6!     This is a copy of ex1.c but currently only tests the broadcast operation
7
8      program main
9#include <petsc/finclude/petscvec.h>
10      use petscmpi  ! or mpi or mpi_f08
11      use petscvec
12      implicit none
13
14      PetscErrorCode                ierr
15      PetscInt                      i,nroots,nrootsalloc,nleaves,nleavesalloc,mine(6),stride
16      type(PetscSFNode)             remote(6)
17      PetscMPIInt                   rank,size
18      PetscSF                       sf
19      PetscInt                      rootdata(6),leafdata(6)
20
21! used with PetscSFGetGraph()
22      type(PetscSFNode), pointer :: gremote(:)
23      PetscInt, pointer ::          gmine(:)
24      PetscInt                      gnroots,gnleaves;
25
26      call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
27      if (ierr .ne. 0) then
28        print*,'Unable to initialize PETSc'
29        stop
30      endif
31      stride = 2
32      call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr);CHKERRA(ierr)
33      call MPI_Comm_size(PETSC_COMM_WORLD,size,ierr);CHKERRA(ierr)
34
35      if (rank == 0) then
36         nroots = 3
37      else
38         nroots = 2
39      endif
40      nrootsalloc  = nroots * stride;
41      if (rank > 0) then
42         nleaves = 3
43      else
44         nleaves = 2
45      endif
46      nleavesalloc = nleaves * stride
47      if (stride > 1) then
48         do i=1,nleaves
49            mine(i) = stride * (i-1)
50         enddo
51      endif
52
53! Left periodic neighbor
54      remote(1)%rank  = modulo(rank+size-1,size)
55      remote(1)%index = 1 * stride
56! Right periodic neighbor
57      remote(2)%rank  = modulo(rank+1,size)
58      remote(2)%index = 0 * stride
59      if (rank > 0) then !               All processes reference rank 0, index
60         remote(3)%rank  = 0
61         remote(3)%index = 2 * stride
62      endif
63
64!  Create a star forest for communication
65      call PetscSFCreate(PETSC_COMM_WORLD,sf,ierr);CHKERRA(ierr)
66      call PetscSFSetFromOptions(sf,ierr);CHKERRA(ierr)
67      call PetscSFSetGraph(sf,nrootsalloc,nleaves,mine,PETSC_COPY_VALUES,remote,PETSC_COPY_VALUES,ierr);CHKERRA(ierr)
68      call PetscSFSetUp(sf,ierr);CHKERRA(ierr)
69
70!   View graph, mostly useful for debugging purposes.
71      call PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_ASCII_INFO_DETAIL,ierr);CHKERRA(ierr)
72      call PetscSFView(sf,PETSC_VIEWER_STDOUT_WORLD,ierr);CHKERRA(ierr)
73      call PetscViewerPopFormat(PETSC_VIEWER_STDOUT_WORLD,ierr);CHKERRA(ierr)
74
75!   Allocate space for send and receive buffers. This example communicates PetscInt, but other types, including
76!     * user-defined structures, could also be used.
77!     Set rootdata buffer to be broadcast
78      do i=1,nrootsalloc
79         rootdata(i) = -1
80      enddo
81      do i=1,nroots
82         rootdata(1 + (i-1)*stride) = 100*(rank+1) + i - 1
83      enddo
84
85!     Initialize local buffer, these values are never used.
86      do i=1,nleavesalloc
87         leafdata(i) = -1
88      enddo
89
90!     Broadcast entries from rootdata to leafdata. Computation or other communication can be performed between the begin and end calls.
91      call PetscSFBcastBegin(sf,MPIU_INTEGER,rootdata,leafdata,MPI_REPLACE,ierr);CHKERRA(ierr)
92      call PetscSFBcastEnd(sf,MPIU_INTEGER,rootdata,leafdata,MPI_REPLACE,ierr);CHKERRA(ierr)
93      call PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Bcast Rootdata\n",ierr);CHKERRA(ierr)
94      call PetscIntView(nrootsalloc,rootdata,PETSC_VIEWER_STDOUT_WORLD,ierr);CHKERRA(ierr)
95      call PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Bcast Leafdata\n",ierr);CHKERRA(ierr)
96      call PetscIntView(nleavesalloc,leafdata,PETSC_VIEWER_STDOUT_WORLD,ierr);CHKERRA(ierr)
97
98      call PetscSFGetGraph(sf,gnroots,gnleaves,gmine,gremote,ierr);CHKERRA(ierr)
99      if (gnleaves .ne. nleaves) then; SETERRA(PETSC_COMM_WORLD,PETSC_ERR_PLIB,'nleaves returned from PetscSFGetGraph() does not match that set with PetscSFSetGraph()'); endif
100      do i=1,nleaves
101         if (gmine(i) .ne. mine(i)) then; SETERRA(PETSC_COMM_WORLD,PETSC_ERR_PLIB,'Root from PetscSFGetGraph() does not match that set with PetscSFSetGraph()'); endif
102      enddo
103      do i=1,nleaves
104         if (gremote(i)%index .ne. remote(i)%index) then; SETERRA(PETSC_COMM_WORLD,PETSC_ERR_PLIB,'Leaf from PetscSFGetGraph() does not match that set with PetscSFSetGraph()'); endif
105      enddo
106
107      deallocate(gremote)
108!    Clean storage for star forest.
109      call PetscSFDestroy(sf,ierr);CHKERRA(ierr)
110      call PetscFinalize(ierr);
111  end
112
113!/*TEST
114!  build:
115!    requires: define(PETSC_HAVE_FORTRAN_TYPE_STAR)
116!
117!  test:
118!    nsize: 3
119!
120!TEST*/
121