xref: /petsc/src/mat/tests/ex9.c (revision 8fb5bd83c3955fefcf33a54e3bb66920a9fa884b)
1 
2 static char help[] = "Tests MPI parallel matrix creation. Test MatCreateRedundantMatrix() \n\n";
3 
4 #include <petscmat.h>
5 
6 int main(int argc,char **args)
7 {
8   Mat            C,Credundant;
9   MatInfo        info;
10   PetscMPIInt    rank,size,subsize;
11   PetscInt       i,j,m = 3,n = 2,low,high,iglobal;
12   PetscInt       Ii,J,ldim,nsubcomms;
13   PetscBool      flg_info,flg_mat;
14   PetscScalar    v,one = 1.0;
15   Vec            x,y;
16   MPI_Comm       subcomm;
17 
18   PetscCall(PetscInitialize(&argc,&args,(char*)0,help));
19   PetscCall(PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL));
20   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
21   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size));
22   n    = 2*size;
23 
24   PetscCall(MatCreate(PETSC_COMM_WORLD,&C));
25   PetscCall(MatSetSizes(C,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n));
26   PetscCall(MatSetFromOptions(C));
27   PetscCall(MatSetUp(C));
28 
29   /* Create the matrix for the five point stencil, YET AGAIN */
30   for (i=0; i<m; i++) {
31     for (j=2*rank; j<2*rank+2; j++) {
32       v = -1.0;  Ii = j + n*i;
33       if (i>0)   {J = Ii - n; PetscCall(MatSetValues(C,1,&Ii,1,&J,&v,INSERT_VALUES));}
34       if (i<m-1) {J = Ii + n; PetscCall(MatSetValues(C,1,&Ii,1,&J,&v,INSERT_VALUES));}
35       if (j>0)   {J = Ii - 1; PetscCall(MatSetValues(C,1,&Ii,1,&J,&v,INSERT_VALUES));}
36       if (j<n-1) {J = Ii + 1; PetscCall(MatSetValues(C,1,&Ii,1,&J,&v,INSERT_VALUES));}
37       v = 4.0; PetscCall(MatSetValues(C,1,&Ii,1,&Ii,&v,INSERT_VALUES));
38     }
39   }
40 
41   /* Add extra elements (to illustrate variants of MatGetInfo) */
42   Ii   = n; J = n-2; v = 100.0;
43   PetscCall(MatSetValues(C,1,&Ii,1,&J,&v,INSERT_VALUES));
44   Ii   = n-2; J = n; v = 100.0;
45   PetscCall(MatSetValues(C,1,&Ii,1,&J,&v,INSERT_VALUES));
46 
47   PetscCall(MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY));
48   PetscCall(MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY));
49 
50   /* Form vectors */
51   PetscCall(MatCreateVecs(C,&x,&y));
52   PetscCall(VecGetLocalSize(x,&ldim));
53   PetscCall(VecGetOwnershipRange(x,&low,&high));
54   for (i=0; i<ldim; i++) {
55     iglobal = i + low;
56     v       = one*((PetscReal)i) + 100.0*rank;
57     PetscCall(VecSetValues(x,1,&iglobal,&v,INSERT_VALUES));
58   }
59   PetscCall(VecAssemblyBegin(x));
60   PetscCall(VecAssemblyEnd(x));
61 
62   PetscCall(MatMult(C,x,y));
63 
64   PetscCall(PetscOptionsHasName(NULL,NULL,"-view_info",&flg_info));
65   if (flg_info)  {
66     PetscCall(PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_ASCII_INFO));
67     PetscCall(MatView(C,PETSC_VIEWER_STDOUT_WORLD));
68 
69     PetscCall(MatGetInfo(C,MAT_GLOBAL_SUM,&info));
70     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"matrix information (global sums):\nnonzeros = %" PetscInt_FMT ", allocated nonzeros = %" PetscInt_FMT "\n",(PetscInt)info.nz_used,(PetscInt)info.nz_allocated));
71     PetscCall(MatGetInfo (C,MAT_GLOBAL_MAX,&info));
72     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"matrix information (global max):\nnonzeros = %" PetscInt_FMT ", allocated nonzeros = %" PetscInt_FMT "\n",(PetscInt)info.nz_used,(PetscInt)info.nz_allocated));
73   }
74 
75   PetscCall(PetscOptionsHasName(NULL,NULL,"-view_mat",&flg_mat));
76   if (flg_mat) PetscCall(MatView(C,PETSC_VIEWER_STDOUT_WORLD));
77 
78   /* Test MatCreateRedundantMatrix() */
79   nsubcomms = size;
80   PetscCall(PetscOptionsGetInt(NULL,NULL,"-nsubcomms",&nsubcomms,NULL));
81   PetscCall(MatCreateRedundantMatrix(C,nsubcomms,MPI_COMM_NULL,MAT_INITIAL_MATRIX,&Credundant));
82   PetscCall(MatCreateRedundantMatrix(C,nsubcomms,MPI_COMM_NULL,MAT_REUSE_MATRIX,&Credundant));
83 
84   PetscCall(PetscObjectGetComm((PetscObject)Credundant,&subcomm));
85   PetscCallMPI(MPI_Comm_size(subcomm,&subsize));
86 
87   if (subsize==2 && flg_mat) {
88     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_(subcomm),"\n[%d] Credundant:\n",rank));
89     PetscCall(MatView(Credundant,PETSC_VIEWER_STDOUT_(subcomm)));
90   }
91   PetscCall(MatDestroy(&Credundant));
92 
93   /* Test MatCreateRedundantMatrix() with user-provided subcomm */
94   {
95     PetscSubcomm psubcomm;
96 
97     PetscCall(PetscSubcommCreate(PETSC_COMM_WORLD,&psubcomm));
98     PetscCall(PetscSubcommSetNumber(psubcomm,nsubcomms));
99     PetscCall(PetscSubcommSetType(psubcomm,PETSC_SUBCOMM_CONTIGUOUS));
100     /* enable runtime switch of psubcomm type, e.g., '-psubcomm_type interlaced */
101     PetscCall(PetscSubcommSetFromOptions(psubcomm));
102 
103     PetscCall(MatCreateRedundantMatrix(C,nsubcomms,PetscSubcommChild(psubcomm),MAT_INITIAL_MATRIX,&Credundant));
104     PetscCall(MatCreateRedundantMatrix(C,nsubcomms,PetscSubcommChild(psubcomm),MAT_REUSE_MATRIX,&Credundant));
105 
106     PetscCall(PetscSubcommDestroy(&psubcomm));
107     PetscCall(MatDestroy(&Credundant));
108   }
109 
110   PetscCall(VecDestroy(&x));
111   PetscCall(VecDestroy(&y));
112   PetscCall(MatDestroy(&C));
113   PetscCall(PetscFinalize());
114   return 0;
115 }
116 
117 /*TEST
118 
119    test:
120       nsize: 3
121       args: -view_info
122 
123    test:
124       suffix: 2
125       nsize: 3
126       args: -nsubcomms 2 -view_mat -psubcomm_type interlaced
127 
128    test:
129       suffix: 3
130       nsize: 3
131       args: -nsubcomms 2 -view_mat -psubcomm_type contiguous
132 
133    test:
134       suffix: 3_baij
135       nsize: 3
136       args: -mat_type baij -nsubcomms 2 -view_mat
137 
138    test:
139       suffix: 3_sbaij
140       nsize: 3
141       args: -mat_type sbaij -nsubcomms 2 -view_mat
142 
143    test:
144       suffix: 3_dense
145       nsize: 3
146       args: -mat_type dense -nsubcomms 2 -view_mat
147 
148    test:
149       suffix: 4_baij
150       nsize: 3
151       args: -mat_type baij -nsubcomms 2 -view_mat -psubcomm_type interlaced
152 
153    test:
154       suffix: 4_sbaij
155       nsize: 3
156       args: -mat_type sbaij -nsubcomms 2 -view_mat -psubcomm_type interlaced
157 
158    test:
159       suffix: 4_dense
160       nsize: 3
161       args: -mat_type dense -nsubcomms 2 -view_mat -psubcomm_type interlaced
162 
163 TEST*/
164