1 2 static char help[] = "Tests MatIncreaseOverlap() and MatCreateSubmatrices() for the parallel case.\n\ 3 This example is similar to ex40.c; here the index sets used are random.\n\ 4 Input arguments are:\n\ 5 -f <input_file> : file to load. For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\ 6 -nd <size> : > 0 no of domains per processor \n\ 7 -ov <overlap> : >=0 amount of overlap between domains\n\n"; 8 9 #include <petscmat.h> 10 11 int main(int argc,char **args) 12 { 13 PetscErrorCode ierr; 14 PetscInt nd = 2,ov=1,i,j,lsize,m,n,*idx,bs; 15 PetscMPIInt rank, size; 16 PetscBool flg; 17 Mat A,B,*submatA,*submatB; 18 char file[PETSC_MAX_PATH_LEN]; 19 PetscViewer fd; 20 IS *is1,*is2; 21 PetscRandom r; 22 PetscBool test_unsorted = PETSC_FALSE; 23 PetscScalar rand; 24 25 ierr = PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr; 26 ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRMPI(ierr); 27 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRMPI(ierr); 28 ierr = PetscOptionsGetString(NULL,NULL,"-f",file,sizeof(file),NULL);CHKERRQ(ierr); 29 ierr = PetscOptionsGetInt(NULL,NULL,"-nd",&nd,NULL);CHKERRQ(ierr); 30 ierr = PetscOptionsGetInt(NULL,NULL,"-ov",&ov,NULL);CHKERRQ(ierr); 31 ierr = PetscOptionsGetBool(NULL,NULL,"-test_unsorted",&test_unsorted,NULL);CHKERRQ(ierr); 32 33 /* Read matrix A and RHS */ 34 ierr = PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd);CHKERRQ(ierr); 35 ierr = MatCreate(PETSC_COMM_WORLD,&A);CHKERRQ(ierr); 36 ierr = MatSetType(A,MATAIJ);CHKERRQ(ierr); 37 ierr = MatSetFromOptions(A);CHKERRQ(ierr); 38 ierr = MatLoad(A,fd);CHKERRQ(ierr); 39 ierr = PetscViewerDestroy(&fd);CHKERRQ(ierr); 40 41 /* Read the same matrix as a seq matrix B */ 42 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,file,FILE_MODE_READ,&fd);CHKERRQ(ierr); 43 ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr); 44 ierr = MatSetType(B,MATSEQAIJ);CHKERRQ(ierr); 45 ierr = MatSetFromOptions(B);CHKERRQ(ierr); 46 ierr = MatLoad(B,fd);CHKERRQ(ierr); 47 ierr = PetscViewerDestroy(&fd);CHKERRQ(ierr); 48 49 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 50 51 /* Create the Random no generator */ 52 ierr = MatGetSize(A,&m,&n);CHKERRQ(ierr); 53 ierr = PetscRandomCreate(PETSC_COMM_SELF,&r);CHKERRQ(ierr); 54 ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr); 55 56 /* Create the IS corresponding to subdomains */ 57 ierr = PetscMalloc1(nd,&is1);CHKERRQ(ierr); 58 ierr = PetscMalloc1(nd,&is2);CHKERRQ(ierr); 59 ierr = PetscMalloc1(m ,&idx);CHKERRQ(ierr); 60 for (i = 0; i < m; i++) {idx[i] = i;CHKERRQ(ierr);} 61 62 /* Create the random Index Sets */ 63 for (i=0; i<nd; i++) { 64 /* Skip a few,so that the IS on different procs are diffeent*/ 65 for (j=0; j<rank; j++) { 66 ierr = PetscRandomGetValue(r,&rand);CHKERRQ(ierr); 67 } 68 ierr = PetscRandomGetValue(r,&rand);CHKERRQ(ierr); 69 lsize = (PetscInt)(rand*(m/bs)); 70 /* shuffle */ 71 for (j=0; j<lsize; j++) { 72 PetscInt k, swap, l; 73 74 ierr = PetscRandomGetValue(r,&rand);CHKERRQ(ierr); 75 k = j + (PetscInt)(rand*((m/bs)-j)); 76 for (l = 0; l < bs; l++) { 77 swap = idx[bs*j+l]; 78 idx[bs*j+l] = idx[bs*k+l]; 79 idx[bs*k+l] = swap; 80 } 81 } 82 if (!test_unsorted) {ierr = PetscSortInt(lsize*bs,idx);CHKERRQ(ierr);} 83 ierr = ISCreateGeneral(PETSC_COMM_SELF,lsize*bs,idx,PETSC_COPY_VALUES,is1+i);CHKERRQ(ierr); 84 ierr = ISCreateGeneral(PETSC_COMM_SELF,lsize*bs,idx,PETSC_COPY_VALUES,is2+i);CHKERRQ(ierr); 85 ierr = ISSetBlockSize(is1[i],bs);CHKERRQ(ierr); 86 ierr = ISSetBlockSize(is2[i],bs);CHKERRQ(ierr); 87 } 88 89 if (!test_unsorted) { 90 ierr = MatIncreaseOverlap(A,nd,is1,ov);CHKERRQ(ierr); 91 ierr = MatIncreaseOverlap(B,nd,is2,ov);CHKERRQ(ierr); 92 93 for (i=0; i<nd; ++i) { 94 ierr = ISSort(is1[i]);CHKERRQ(ierr); 95 ierr = ISSort(is2[i]);CHKERRQ(ierr); 96 } 97 } 98 99 ierr = MatCreateSubMatrices(A,nd,is1,is1,MAT_INITIAL_MATRIX,&submatA);CHKERRQ(ierr); 100 ierr = MatCreateSubMatrices(B,nd,is2,is2,MAT_INITIAL_MATRIX,&submatB);CHKERRQ(ierr); 101 102 /* Now see if the serial and parallel case have the same answers */ 103 for (i=0; i<nd; ++i) { 104 ierr = MatEqual(submatA[i],submatB[i],&flg);CHKERRQ(ierr); 105 if (!flg) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"%D-th paralle submatA != seq submatB",i); 106 } 107 108 /* Free Allocated Memory */ 109 for (i=0; i<nd; ++i) { 110 ierr = ISDestroy(&is1[i]);CHKERRQ(ierr); 111 ierr = ISDestroy(&is2[i]);CHKERRQ(ierr); 112 } 113 ierr = MatDestroySubMatrices(nd,&submatA);CHKERRQ(ierr); 114 ierr = MatDestroySubMatrices(nd,&submatB);CHKERRQ(ierr); 115 116 ierr = PetscRandomDestroy(&r);CHKERRQ(ierr); 117 ierr = PetscFree(is1);CHKERRQ(ierr); 118 ierr = PetscFree(is2);CHKERRQ(ierr); 119 ierr = MatDestroy(&A);CHKERRQ(ierr); 120 ierr = MatDestroy(&B);CHKERRQ(ierr); 121 ierr = PetscFree(idx);CHKERRQ(ierr); 122 ierr = PetscFinalize(); 123 return ierr; 124 } 125 126 /*TEST 127 128 build: 129 requires: !complex 130 131 test: 132 nsize: 3 133 requires: datafilespath double !define(PETSC_USE_64BIT_INDICES) !complex 134 args: -f ${DATAFILESPATH}/matrices/arco1 -nd 5 -ov 2 135 136 test: 137 suffix: 2 138 args: -f ${DATAFILESPATH}/matrices/arco1 -nd 8 -ov 2 139 requires: datafilespath double !define(PETSC_USE_64BIT_INDICES) !complex 140 141 test: 142 suffix: unsorted_baij_mpi 143 nsize: 3 144 requires: datafilespath double !define(PETSC_USE_64BIT_INDICES) !complex 145 args: -f ${DATAFILESPATH}/matrices/cfd.1.10 -nd 8 -mat_type baij -test_unsorted 146 147 test: 148 suffix: unsorted_baij_seq 149 requires: datafilespath double !define(PETSC_USE_64BIT_INDICES) !complex 150 args: -f ${DATAFILESPATH}/matrices/cfd.1.10 -nd 8 -mat_type baij -test_unsorted 151 152 test: 153 suffix: unsorted_mpi 154 nsize: 3 155 requires: datafilespath double !define(PETSC_USE_64BIT_INDICES) !complex 156 args: -f ${DATAFILESPATH}/matrices/arco1 -nd 8 -test_unsorted 157 158 test: 159 suffix: unsorted_seq 160 requires: datafilespath double !define(PETSC_USE_64BIT_INDICES) !complex 161 args: -f ${DATAFILESPATH}/matrices/arco1 -nd 8 -test_unsorted 162 163 TEST*/ 164