xref: /petsc/src/mat/tests/ex16f90.F90 (revision 9b88ac225e01f016352a5f4cd90e158abe5f5675)
1!
2!
3!  Tests MatDenseGetArray()
4!
5#include <petsc/finclude/petscmat.h>
6program main
7  use petscmat
8  implicit none
9
10  Mat A
11  PetscErrorCode ierr
12  PetscInt i, j, m, n, iar(1), jar(1)
13  PetscInt one
14  PetscScalar v(1)
15  PetscScalar, pointer :: array(:, :)
16  PetscMPIInt rank
17  integer :: ashape(2)
18  character(len=80) :: string
19
20  PetscCallA(PetscInitialize(ierr))
21  m = 3
22  n = 2
23  one = 1
24!
25!      Create a parallel dense matrix shared by all processors
26!
27  PetscCallA(MatCreateDense(PETSC_COMM_WORLD, PETSC_DECIDE, PETSC_DECIDE, m, n, PETSC_NULL_SCALAR_ARRAY, A, ierr))
28
29!
30!     Set values into the matrix. All processors set all values.
31!
32  do i = 0, m - 1
33    iar(1) = i
34    do j = 0, n - 1
35      jar(1) = j
36      v(1) = 9.0/real(i + j + 1)
37      PetscCallA(MatSetValues(A, one, iar, one, jar, v, INSERT_VALUES, ierr))
38    end do
39  end do
40
41  PetscCallA(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY, ierr))
42  PetscCallA(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY, ierr))
43
44!
45!       Print the matrix to the screen
46!
47  PetscCallA(MatView(A, PETSC_VIEWER_STDOUT_WORLD, ierr))
48
49!
50!      Print the local matrix shape to the screen for each rank
51!
52  PetscCallA(MatDenseGetArray(A, array, ierr))
53  PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, rank, ierr))
54  ashape = shape(array)
55  write (string, '("[", i0, "]", " shape (", i0, ",", i0, ")", a1)') rank, ashape(1), ashape(2), new_line('a')
56  PetscCallA(PetscSynchronizedPrintf(PETSC_COMM_WORLD, string, ierr))
57  PetscCallA(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT, ierr))
58  PetscCallA(MatDenseRestoreArray(A, array, ierr))
59!
60!      Free the space used by the matrix
61!
62  PetscCallA(MatDestroy(A, ierr))
63  PetscCallA(PetscFinalize(ierr))
64end
65
66!/*TEST
67!
68!   test:
69!      nsize: 2
70!      filter: sort -b
71!      filter_output: sort -b
72!
73!TEST*/
74