xref: /petsc/src/sys/tutorials/ex4f90.F90 (revision 74df5e01f481fb3fe90b32c3b4345ef0122eb3ce)
1!
2!     This introductory example illustrates running PETSc on a subset
3!     of processes
4!
5! -----------------------------------------------------------------------
6
7program main
8#include <petsc/finclude/petscsys.h>
9  use petscmpi  ! or mpi or mpi_f08
10  use petscsys
11  implicit none
12
13  PetscErrorCode ierr
14  PetscMPIInt rank, size, zero, two
15
16!     We must call MPI_Init() first, making us, not PETSc, responsible
17!     for MPI
18
19  PetscCallMPIA(MPI_Init(ierr))
20
21!     We can now change the communicator universe for PETSc
22
23  zero = 0
24  two = 2
25  PetscCallMPIA(MPI_Comm_rank(MPI_COMM_WORLD, rank, ierr))
26  PetscCallMPIA(MPI_Comm_split(MPI_COMM_WORLD, mod(rank, two), zero, PETSC_COMM_WORLD, ierr))
27
28!     Every PETSc routine should begin with the PetscInitialize()
29!     routine.
30
31  PetscCallA(PetscInitialize(ierr))
32
33!     The following MPI calls return the number of processes being used
34!     and the rank of this process in the group.
35
36  PetscCallMPIA(MPI_Comm_size(PETSC_COMM_WORLD, size, ierr))
37  PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, rank, ierr))
38
39!     Here we would like to print only one message that represents all
40!     the processes in the group.
41  if (rank == 0) write (6, 100) size, rank
42100 format('No of Procs = ', i4, ' rank = ', i4)
43
44!     Always call PetscFinalize() before exiting a program.  This
45!     routine - finalizes the PETSc libraries as well as MPI - provides
46!     summary and diagnostic information if certain runtime options are
47!     chosen (e.g., -log_view).  See PetscFinalize() manpage for more
48!     information.
49
50  PetscCallA(PetscFinalize(ierr))
51  PetscCallMPIA(MPI_Comm_free(PETSC_COMM_WORLD, ierr))
52
53!     Since we initialized MPI, we must call MPI_Finalize()
54
55  PetscCallMPIA(MPI_Finalize(ierr))
56end
57
58!
59!/*TEST
60!
61!   test:
62!
63!TEST*/
64