xref: /petsc/src/sys/tutorials/ex4f.F90 (revision 607e733f3db3ee7f6f605a13295c517df8dbb9c9)
1!
2!     This introductory example illustrates running PETSc on a subset
3!     of processes
4!
5! -----------------------------------------------------------------------
6#include <petsc/finclude/petscsys.h>
7program main
8  use petscsys
9  implicit none
10  PetscErrorCode ierr
11  PetscMPIInt rank, size, grank, zero, two
12  PetscReal globalrank
13
14!     We must call MPI_Init() first, making us, not PETSc, responsible for MPI
15
16  PetscCallMPIA(MPI_Init(ierr))
17#if defined(PETSC_HAVE_ELEMENTAL)
18  PetscCallA(PetscElementalInitializePackage(ierr))
19#endif
20!     We can now change the communicator universe for PETSc
21
22  zero = 0
23  two = 2
24  PetscCallMPIA(MPI_Comm_rank(MPI_COMM_WORLD, rank, ierr))
25  PetscCallMPIA(MPI_Comm_split(MPI_COMM_WORLD, mod(rank, two), zero, PETSC_COMM_WORLD, ierr))
26
27!     Every PETSc routine should begin with the PetscInitialize()
28!     routine.
29  PetscCallA(PetscInitializeNoArguments(ierr))
30
31!     The following MPI calls return the number of processes being used
32!     and the rank of this process in the group.
33
34  PetscCallMPIA(MPI_Comm_size(PETSC_COMM_WORLD, size, ierr))
35  PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, rank, ierr))
36
37!     Here we would like to print only one message that represents all
38!     the processes in the group. Sleep so that IO from different ranks
39!     don't get mixed up. Note this is not an ideal solution
40  PetscCallMPIA(MPI_Comm_rank(MPI_COMM_WORLD, grank, ierr))
41  globalrank = grank
42  PetscCallA(PetscSleep(globalrank, ierr))
43  if (rank == 0) write (6, 100) size, rank
44100 format('No of Procs = ', i4, ' rank = ', i4)
45
46!     Always call PetscFinalize() before exiting a program.  This
47!     routine - finalizes the PETSc libraries as well as MPI - provides
48!     summary and diagnostic information if certain runtime options are
49!     chosen (e.g., -log_view).  See PetscFinalize() manpage for more
50!     information.
51
52  PetscCallA(PetscFinalize(ierr))
53  PetscCallMPIA(MPI_Comm_free(PETSC_COMM_WORLD, ierr))
54#if defined(PETSC_HAVE_ELEMENTAL)
55  PetscCallA(PetscElementalFinalizePackage(ierr))
56#endif
57
58!     Since we initialized MPI, we must call MPI_Finalize()
59
60  PetscCallMPIA(MPI_Finalize(ierr))
61end
62
63!/*TEST
64!
65!   test:
66!      nsize: 5
67!      filter: sort -b
68!      filter_output: sort -b
69!      requires: !cuda !saws
70!
71!TEST*/
72