1 2 static char help[] = "Tests the parallel case for MatIncreaseOverlap(). Input arguments are:\n\ 3 -f <input_file> : file to load. For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\ 4 -nd <size> : > 0 number of domains per processor \n\ 5 -ov <overlap> : >=0 amount of overlap between domains\n\n"; 6 7 #include <petscmat.h> 8 9 PetscErrorCode ISAllGatherDisjoint(IS iis, IS** ois) 10 { 11 IS *is2,is; 12 const PetscInt *idxs; 13 PetscInt i, ls,*sizes; 14 PetscMPIInt size; 15 16 PetscFunctionBeginUser; 17 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)iis),&size)); 18 PetscCall(PetscMalloc1(size,&is2)); 19 PetscCall(PetscMalloc1(size,&sizes)); 20 PetscCall(ISGetLocalSize(iis,&ls)); 21 /* we don't have a public ISGetLayout */ 22 PetscCallMPI(MPI_Allgather(&ls,1,MPIU_INT,sizes,1,MPIU_INT,PetscObjectComm((PetscObject)iis))); 23 PetscCall(ISAllGather(iis,&is)); 24 PetscCall(ISGetIndices(is,&idxs)); 25 for (i = 0, ls = 0; i < size; i++) { 26 PetscCall(ISCreateGeneral(PETSC_COMM_SELF,sizes[i],idxs+ls,PETSC_COPY_VALUES,&is2[i])); 27 ls += sizes[i]; 28 } 29 PetscCall(ISRestoreIndices(is,&idxs)); 30 PetscCall(ISDestroy(&is)); 31 PetscCall(PetscFree(sizes)); 32 *ois = is2; 33 PetscFunctionReturn(0); 34 } 35 36 int main(int argc,char **args) 37 { 38 PetscInt nd = 2,ov = 1,ndpar,i,start,m,n,end,lsize; 39 PetscMPIInt rank; 40 PetscBool flg, useND = PETSC_FALSE; 41 Mat A,B; 42 char file[PETSC_MAX_PATH_LEN]; 43 PetscViewer fd; 44 IS *is1,*is2; 45 PetscRandom r; 46 PetscScalar rand; 47 48 PetscFunctionBeginUser; 49 PetscCall(PetscInitialize(&argc,&args,(char*)0,help)); 50 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank)); 51 PetscCall(PetscOptionsGetString(NULL,NULL,"-f",file,sizeof(file),&flg)); 52 PetscCheck(flg,PETSC_COMM_WORLD,PETSC_ERR_USER,"Must use -f filename to indicate a file containing a PETSc binary matrix"); 53 PetscCall(PetscOptionsGetInt(NULL,NULL,"-nd",&nd,NULL)); 54 PetscCall(PetscOptionsGetInt(NULL,NULL,"-ov",&ov,NULL)); 55 PetscCall(PetscOptionsGetBool(NULL,NULL,"-nested_dissection",&useND,NULL)); 56 57 /* Read matrix */ 58 PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd)); 59 PetscCall(MatCreate(PETSC_COMM_WORLD,&A)); 60 PetscCall(MatSetType(A,MATMPIAIJ)); 61 PetscCall(MatLoad(A,fd)); 62 PetscCall(MatSetFromOptions(A)); 63 PetscCall(PetscViewerDestroy(&fd)); 64 65 /* Read the matrix again as a sequential matrix */ 66 PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF,file,FILE_MODE_READ,&fd)); 67 PetscCall(MatCreate(PETSC_COMM_SELF,&B)); 68 PetscCall(MatSetType(B,MATSEQAIJ)); 69 PetscCall(MatLoad(B,fd)); 70 PetscCall(MatSetFromOptions(B)); 71 PetscCall(PetscViewerDestroy(&fd)); 72 73 /* Create the IS corresponding to subdomains */ 74 if (useND) { 75 MatPartitioning part; 76 IS ndmap; 77 PetscMPIInt size; 78 79 ndpar = 1; 80 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size)); 81 nd = (PetscInt)size; 82 PetscCall(PetscMalloc1(ndpar,&is1)); 83 PetscCall(MatPartitioningCreate(PETSC_COMM_WORLD,&part)); 84 PetscCall(MatPartitioningSetAdjacency(part,A)); 85 PetscCall(MatPartitioningSetFromOptions(part)); 86 PetscCall(MatPartitioningApplyND(part,&ndmap)); 87 PetscCall(MatPartitioningDestroy(&part)); 88 PetscCall(ISBuildTwoSided(ndmap,NULL,&is1[0])); 89 PetscCall(ISDestroy(&ndmap)); 90 PetscCall(ISAllGatherDisjoint(is1[0],&is2)); 91 } else { 92 /* Create the random Index Sets */ 93 PetscCall(PetscMalloc1(nd,&is1)); 94 PetscCall(PetscMalloc1(nd,&is2)); 95 96 PetscCall(MatGetSize(A,&m,&n)); 97 PetscCall(PetscRandomCreate(PETSC_COMM_SELF,&r)); 98 PetscCall(PetscRandomSetFromOptions(r)); 99 for (i=0; i<nd; i++) { 100 PetscCall(PetscRandomGetValue(r,&rand)); 101 start = (PetscInt)(rand*m); 102 PetscCall(PetscRandomGetValue(r,&rand)); 103 end = (PetscInt)(rand*m); 104 lsize = end - start; 105 if (start > end) { start = end; lsize = -lsize;} 106 PetscCall(ISCreateStride(PETSC_COMM_SELF,lsize,start,1,is1+i)); 107 PetscCall(ISCreateStride(PETSC_COMM_SELF,lsize,start,1,is2+i)); 108 } 109 ndpar = nd; 110 PetscCall(PetscRandomDestroy(&r)); 111 } 112 PetscCall(MatIncreaseOverlap(A,ndpar,is1,ov)); 113 PetscCall(MatIncreaseOverlap(B,nd,is2,ov)); 114 if (useND) { 115 IS *is; 116 117 PetscCall(ISAllGatherDisjoint(is1[0],&is)); 118 PetscCall(ISDestroy(&is1[0])); 119 PetscCall(PetscFree(is1)); 120 is1 = is; 121 } 122 /* Now see if the serial and parallel case have the same answers */ 123 for (i=0; i<nd; ++i) { 124 PetscCall(ISEqual(is1[i],is2[i],&flg)); 125 if (!flg) { 126 PetscCall(ISViewFromOptions(is1[i],NULL,"-err_view")); 127 PetscCall(ISViewFromOptions(is2[i],NULL,"-err_view")); 128 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"proc:[%d], i=%" PetscInt_FMT ", flg =%d",rank,i,(int)flg); 129 } 130 } 131 132 /* Free allocated memory */ 133 for (i=0; i<nd; ++i) { 134 PetscCall(ISDestroy(&is1[i])); 135 PetscCall(ISDestroy(&is2[i])); 136 } 137 PetscCall(PetscFree(is1)); 138 PetscCall(PetscFree(is2)); 139 PetscCall(MatDestroy(&A)); 140 PetscCall(MatDestroy(&B)); 141 PetscCall(PetscFinalize()); 142 return 0; 143 } 144 145 /*TEST 146 147 build: 148 requires: !complex 149 150 testset: 151 nsize: 5 152 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex 153 args: -f ${DATAFILESPATH}/matrices/arco1 -viewer_binary_skip_info -ov 2 154 output_file: output/ex40_1.out 155 test: 156 suffix: 1 157 args: -nd 7 158 test: 159 requires: parmetis 160 suffix: 1_nd 161 args: -nested_dissection -mat_partitioning_type parmetis 162 163 testset: 164 nsize: 3 165 requires: double !defined(PETSC_USE_64BIT_INDICES) !complex 166 args: -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/ns-real-int32-float64 -mat_increase_overlap_scalable 1 -ov 2 167 output_file: output/ex40_1.out 168 test: 169 suffix: 2 170 args: -nd 7 171 test: 172 requires: parmetis 173 suffix: 2_nd 174 args: -nested_dissection -mat_partitioning_type parmetis 175 176 TEST*/ 177