1 2 static char help[] = "Tests MatIncreaseOverlap() - the parallel case. This example\n\ 3 is similar to ex40.c; here the index sets used are random. Input arguments are:\n\ 4 -f <input_file> : file to load. For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\ 5 -nd <size> : > 0 no of domains per processor \n\ 6 -ov <overlap> : >=0 amount of overlap between domains\n\n"; 7 8 #include <petscmat.h> 9 10 int main(int argc,char **args) 11 { 12 PetscInt nd = 2,ov=1,i,j,m,n,*idx,lsize; 13 PetscErrorCode ierr; 14 PetscMPIInt rank; 15 PetscBool flg; 16 Mat A,B; 17 char file[PETSC_MAX_PATH_LEN]; 18 PetscViewer fd; 19 IS *is1,*is2; 20 PetscRandom r; 21 PetscScalar rand; 22 23 ierr = PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr; 24 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRMPI(ierr); 25 ierr = PetscOptionsGetString(NULL,NULL,"-f",file,sizeof(file),NULL);CHKERRQ(ierr); 26 ierr = PetscOptionsGetInt(NULL,NULL,"-nd",&nd,NULL);CHKERRQ(ierr); 27 ierr = PetscOptionsGetInt(NULL,NULL,"-ov",&ov,NULL);CHKERRQ(ierr); 28 29 /* Read matrix and RHS */ 30 ierr = PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd);CHKERRQ(ierr); 31 ierr = MatCreate(PETSC_COMM_WORLD,&A);CHKERRQ(ierr); 32 ierr = MatSetType(A,MATMPIAIJ);CHKERRQ(ierr); 33 ierr = MatLoad(A,fd);CHKERRQ(ierr); 34 ierr = PetscViewerDestroy(&fd);CHKERRQ(ierr); 35 36 /* Read the matrix again as a seq matrix */ 37 ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,file,FILE_MODE_READ,&fd);CHKERRQ(ierr); 38 ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr); 39 ierr = MatSetType(B,MATSEQAIJ);CHKERRQ(ierr); 40 ierr = MatLoad(B,fd);CHKERRQ(ierr); 41 ierr = PetscViewerDestroy(&fd);CHKERRQ(ierr); 42 43 /* Create the Random no generator */ 44 ierr = MatGetSize(A,&m,&n);CHKERRQ(ierr); 45 ierr = PetscRandomCreate(PETSC_COMM_SELF,&r);CHKERRQ(ierr); 46 ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr); 47 48 /* Create the IS corresponding to subdomains */ 49 ierr = PetscMalloc1(nd,&is1);CHKERRQ(ierr); 50 ierr = PetscMalloc1(nd,&is2);CHKERRQ(ierr); 51 ierr = PetscMalloc1(m ,&idx);CHKERRQ(ierr); 52 53 /* Create the random Index Sets */ 54 for (i=0; i<nd; i++) { 55 for (j=0; j<rank; j++) { 56 ierr = PetscRandomGetValue(r,&rand);CHKERRQ(ierr); 57 } 58 ierr = PetscRandomGetValue(r,&rand);CHKERRQ(ierr); 59 lsize = (PetscInt)(rand*m); 60 for (j=0; j<lsize; j++) { 61 ierr = PetscRandomGetValue(r,&rand);CHKERRQ(ierr); 62 idx[j] = (PetscInt)(rand*m); 63 } 64 ierr = ISCreateGeneral(PETSC_COMM_SELF,lsize,idx,PETSC_COPY_VALUES,is1+i);CHKERRQ(ierr); 65 ierr = ISCreateGeneral(PETSC_COMM_SELF,lsize,idx,PETSC_COPY_VALUES,is2+i);CHKERRQ(ierr); 66 } 67 68 ierr = MatIncreaseOverlap(A,nd,is1,ov);CHKERRQ(ierr); 69 ierr = MatIncreaseOverlap(B,nd,is2,ov);CHKERRQ(ierr); 70 71 /* Now see if the serial and parallel case have the same answers */ 72 for (i=0; i<nd; ++i) { 73 PetscInt sz1,sz2; 74 ierr = ISEqual(is1[i],is2[i],&flg);CHKERRQ(ierr); 75 ierr = ISGetSize(is1[i],&sz1);CHKERRQ(ierr); 76 ierr = ISGetSize(is2[i],&sz2);CHKERRQ(ierr); 77 if (!flg) SETERRQ5(PETSC_COMM_SELF,PETSC_ERR_PLIB,"proc:[%d], i=%D, flg =%d sz1 = %D sz2 = %D\n",rank,i,(int)flg,sz1,sz2);CHKERRQ(ierr); 78 } 79 80 /* Free Allocated Memory */ 81 for (i=0; i<nd; ++i) { 82 ierr = ISDestroy(&is1[i]);CHKERRQ(ierr); 83 ierr = ISDestroy(&is2[i]);CHKERRQ(ierr); 84 } 85 ierr = PetscRandomDestroy(&r);CHKERRQ(ierr); 86 ierr = PetscFree(is1);CHKERRQ(ierr); 87 ierr = PetscFree(is2);CHKERRQ(ierr); 88 ierr = MatDestroy(&A);CHKERRQ(ierr); 89 ierr = MatDestroy(&B);CHKERRQ(ierr); 90 ierr = PetscFree(idx);CHKERRQ(ierr); 91 ierr = PetscFinalize(); 92 return ierr; 93 } 94 95 /*TEST 96 97 build: 98 requires: !complex 99 100 test: 101 nsize: 3 102 requires: datafilespath double !define(PETSC_USE_64BIT_INDICES) !complex 103 args: -f ${DATAFILESPATH}/matrices/arco1 -nd 3 -ov 1 104 105 TEST*/ 106