xref: /petsc/src/sys/tutorials/ex4f.F90 (revision b5f0bcd6e9e8ed97648738542f5163d94f7b1782)
1!
2!     This introductory example illustrates running PETSc on a subset
3!     of processes
4!
5! -----------------------------------------------------------------------
6#include <petsc/finclude/petscsys.h>
7program main
8  use petscmpi  ! or mpi or mpi_f08
9  use petscsys
10  implicit none
11  PetscErrorCode ierr
12  PetscMPIInt rank, size, grank, zero, two
13  PetscReal globalrank
14
15!     We must call MPI_Init() first, making us, not PETSc, responsible for MPI
16
17  PetscCallMPIA(MPI_Init(ierr))
18#if defined(PETSC_HAVE_ELEMENTAL)
19  PetscCallA(PetscElementalInitializePackage(ierr))
20#endif
21!     We can now change the communicator universe for PETSc
22
23  zero = 0
24  two = 2
25  PetscCallMPIA(MPI_Comm_rank(MPI_COMM_WORLD, rank, ierr))
26  PetscCallMPIA(MPI_Comm_split(MPI_COMM_WORLD, mod(rank, two), zero, PETSC_COMM_WORLD, ierr))
27
28!     Every PETSc routine should begin with the PetscInitialize()
29!     routine.
30  PetscCallA(PetscInitializeNoArguments(ierr))
31
32!     The following MPI calls return the number of processes being used
33!     and the rank of this process in the group.
34
35  PetscCallMPIA(MPI_Comm_size(PETSC_COMM_WORLD, size, ierr))
36  PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, rank, ierr))
37
38!     Here we would like to print only one message that represents all
39!     the processes in the group. Sleep so that IO from different ranks
40!     don't get mixed up. Note this is not an ideal solution
41  PetscCallMPIA(MPI_Comm_rank(MPI_COMM_WORLD, grank, ierr))
42  globalrank = grank
43  PetscCallA(PetscSleep(globalrank, ierr))
44  if (rank == 0) write (6, 100) size, rank
45100 format('No of Procs = ', i4, ' rank = ', i4)
46
47!     Always call PetscFinalize() before exiting a program.  This
48!     routine - finalizes the PETSc libraries as well as MPI - provides
49!     summary and diagnostic information if certain runtime options are
50!     chosen (e.g., -log_view).  See PetscFinalize() manpage for more
51!     information.
52
53  PetscCallA(PetscFinalize(ierr))
54  PetscCallMPIA(MPI_Comm_free(PETSC_COMM_WORLD, ierr))
55#if defined(PETSC_HAVE_ELEMENTAL)
56  PetscCallA(PetscElementalFinalizePackage(ierr))
57#endif
58
59!     Since we initialized MPI, we must call MPI_Finalize()
60
61  PetscCallMPIA(MPI_Finalize(ierr))
62end
63
64!/*TEST
65!
66!   test:
67!      nsize: 5
68!      filter: sort -b
69!      filter_output: sort -b
70!      requires: !cuda !saws
71!
72!TEST*/
73