xref: /petsc/src/mat/tests/ex231.cxx (revision efa12513287cff49a2b9648ae83199dcbfaad71a)
1 static char help[] = "A test for MatAssembly that heavily relies on PetscSortIntWithArrayPair\n";
2 
3 /*
4    The characteristic of the array (about 4M in length) to sort in this test is that it has
5    many duplicated values that already clustered together (around 95 duplicates per unique integer).
6 
7    It was gotten from a petsc performance bug report from the Moose project. One can use
8    it for future performance study.
9 
10    Contributed-by: Fande Kong <fdkong.jd@gmail.com>, John Peterson <jwpeterson@gmail.com>
11  */
12 #define PETSC_SKIP_CXX_COMPLEX_FIX
13 
14 // PETSc includes
15 #include <petscmat.h>
16 
17 // C++ includes
18 #include <iostream>
19 #include <fstream>
20 #include <sstream>
21 #include <algorithm>
22 #include <vector>
23 #include <string>
24 #include <set>
25 
26 int main (int argc, char** argv)
27 {
28   PetscErrorCode ierr;
29   PetscMPIInt    size, rank;
30   char           file[2][PETSC_MAX_PATH_LEN];
31   PetscBool      flg;
32   const unsigned int n_dofs = 26559;
33   unsigned int   first_local_index;
34   unsigned int   last_local_index;
35 
36   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
37   ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRMPI(ierr);
38   ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRMPI(ierr);
39   if (size > 2) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_SUP,"This example is for <=2 procs");
40 
41   ierr = PetscOptionsGetString(NULL,NULL,"-f0",file[0],sizeof(file[0]),&flg);CHKERRQ(ierr);
42   if (!flg) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER,"Must indicate dof indices file for rank 0 with -f0 option");
43   if (size == 2) {
44     ierr = PetscOptionsGetString(NULL,NULL,"-f1",file[1],sizeof(file[1]),&flg);CHKERRQ(ierr);
45     if (!flg) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER,"Must indicate dof indices file for rank 1 with -f1 option");
46   }
47 
48   if (size == 1) {
49     first_local_index = 0;
50     last_local_index  = 26559;
51   } else {
52     if (rank == 0) {
53       first_local_index = 0;
54       last_local_index  = 13911;
55     } else {
56       first_local_index = 13911;
57       last_local_index  = 26559;
58     }
59   }
60 
61   // Read element dof indices from files
62   std::vector<std::vector<std::vector<PetscInt> > > elem_dof_indices(size);
63   for (PetscInt proc_id = 0; proc_id < size; ++proc_id) {
64     std::string line;
65     std::ifstream dof_file(file[proc_id]);
66     if (dof_file.good()) {
67       while (std::getline (dof_file,line)) {
68         std::vector<PetscInt> dof_indices;
69         std::stringstream sstream(line);
70         std::string token;
71         while (std::getline(sstream, token, ' ')) {dof_indices.push_back(std::atoi(token.c_str()));}
72         elem_dof_indices[proc_id].push_back(dof_indices);
73       }
74     } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_FILE_OPEN,"Could not open file %s",file[proc_id]);
75   }
76 
77   // Debugging: Verify we read in elem_dof_indices correctly
78   // for (unsigned int i=0; i<elem_dof_indices.size(); ++i)
79   //   {
80   //     for (unsigned int j=0; j<elem_dof_indices[i].size(); ++j)
81   //       {
82   //         for (unsigned int k=0; k<elem_dof_indices[i][j].size(); ++k)
83   //           std::cout << elem_dof_indices[i][j][k] << " ";
84   //         std::cout << std::endl;
85   //       }
86   //     std::cout << std::endl;
87   //   }
88 
89   // Set up the (global) sparsity pattern
90   std::vector< std::set< unsigned int > > sparsity(n_dofs);
91   for (PetscInt proc_id = 0; proc_id < size; ++proc_id)
92     for (unsigned int k = 0; k < elem_dof_indices[proc_id].size(); k++) {
93       std::vector<PetscInt>& dof_indices = elem_dof_indices[proc_id][k];
94       for (unsigned int i = 0; i < dof_indices.size(); ++i)
95         for (unsigned int j = 0; j < dof_indices.size(); ++j)
96           sparsity[dof_indices[i]].insert(dof_indices[j]);
97     }
98 
99   // Determine the local nonzeros on this processor
100   const unsigned int n_local_dofs = last_local_index - first_local_index;
101   std::vector<PetscInt> n_nz(n_local_dofs);
102   std::vector<PetscInt> n_oz(n_local_dofs);
103 
104   for (unsigned int i = 0; i < n_local_dofs; ++i) {
105     for (std::set<unsigned int>::iterator iter = sparsity[i+first_local_index].begin(); iter != sparsity[i+first_local_index].end(); iter++) {
106       unsigned int dof = *iter;
107       if ((dof >= first_local_index) && (dof < last_local_index)) n_nz[i]++;
108       else n_oz[i]++;
109     }
110   }
111 
112   // Debugging: print number of on/off diagonal nonzeros
113   // for (unsigned int i=0; i<n_nz.size(); ++i)
114   //   std::cout << n_nz[i] << " ";
115   // std::cout << std::endl;
116 
117   // for (unsigned int i=0; i<n_oz.size(); ++i)
118   //   std::cout << n_oz[i] << " ";
119   // std::cout << std::endl;
120 
121   // Compute and print max number of on- and off-diagonal nonzeros.
122   PetscInt n_nz_max = *std::max_element(n_nz.begin(), n_nz.end());
123   PetscInt n_oz_max = *std::max_element(n_oz.begin(), n_oz.end());
124 
125   ierr = PetscSynchronizedPrintf(PETSC_COMM_WORLD, "Max on-diagonal non-zeros: = %d\n", n_nz_max);CHKERRQ(ierr);
126   ierr = PetscSynchronizedPrintf(PETSC_COMM_WORLD, "Max off-diagonal non-zeros: = %d\n", n_oz_max);CHKERRQ(ierr);
127   ierr = PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);CHKERRQ(ierr);
128 
129   // Initialize the matrix similar to what we do in the PetscMatrix
130   // ctor and init() routines.
131   Mat mat;
132   ierr = MatCreate(PETSC_COMM_WORLD, &mat);CHKERRQ(ierr);
133   ierr = MatSetSizes(mat, n_local_dofs, n_local_dofs, n_dofs, n_dofs);CHKERRQ(ierr);
134   ierr = MatSetBlockSize(mat, 1);CHKERRQ(ierr);
135   ierr = MatSetType(mat, MATAIJ);CHKERRQ(ierr); // Automatically chooses seqaij or mpiaij
136   ierr = MatSeqAIJSetPreallocation(mat, 0, &n_nz[0]);CHKERRQ(ierr);
137   ierr = MatMPIAIJSetPreallocation(mat, 0, &n_nz[0], 0, &n_oz[0]);CHKERRQ(ierr);
138   ierr = MatSetOption(mat, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE);CHKERRQ(ierr);
139 
140   // Local "element" loop
141   for (unsigned int k = 0; k < elem_dof_indices[rank].size(); k++) {
142     std::vector<PetscInt>& dof_indices = elem_dof_indices[rank][k];
143     // DenseMatrix< Number >  zero_mat( dof_indices.size(), dof_indices.size());
144     // B.add_matrix( zero_mat, dof_indices);
145     std::vector<PetscScalar> ones(dof_indices.size() * dof_indices.size(), 1.);
146     ierr = MatSetValues(mat, dof_indices.size(), &dof_indices[0], dof_indices.size(), &dof_indices[0], &ones[0], ADD_VALUES);CHKERRQ(ierr);
147   }
148 
149   // Matrix assembly
150   ierr = MatAssemblyBegin(mat, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
151   ierr = MatAssemblyEnd(mat, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
152 
153   // Clean up
154   ierr = MatDestroy(&mat);CHKERRQ(ierr);
155   ierr = PetscFinalize();
156   return ierr;
157 }
158 /*TEST
159    build:
160       requires: !define(PETSC_HAVE_SUN_CXX)
161 
162    test:
163       nsize: 2
164       args: -f0 ${DATAFILESPATH}/meshes/moose_dof_indices_np_2_proc_0.txt -f1 ${DATAFILESPATH}/meshes/moose_dof_indices_np_2_proc_1.txt
165       # Skip the test for Sun C++ compiler because of its warnings/errors in petscmat.h
166       requires: datafilespath
167 
168 TEST*/
169