1 2 /* 3 Tools to help solve the coarse grid problem redundantly. 4 Provides two scatter contexts that (1) map from the usual global vector 5 to all processors the entire vector in NATURAL numbering and (2) 6 from the entire vector on each processor in natural numbering extracts 7 out this processors piece in GLOBAL numbering 8 */ 9 10 #include <petsc/private/dmdaimpl.h> /*I "petscdmda.h" I*/ 11 12 /*@ 13 DMDAGlobalToNaturalAllCreate - Creates a scatter context that maps from the 14 global vector the entire vector to each processor in natural numbering 15 16 Collective 17 18 Input Parameter: 19 . da - the distributed array context 20 21 Output Parameter: 22 . scatter - the scatter context 23 24 Level: advanced 25 26 .seealso: `DM`, `DMDA`, `DMDAGlobalToNaturalEnd()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`, 27 `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()` 28 @*/ 29 PetscErrorCode DMDAGlobalToNaturalAllCreate(DM da, VecScatter *scatter) 30 { 31 PetscInt N; 32 IS from, to; 33 Vec tmplocal, global; 34 AO ao; 35 DM_DA *dd = (DM_DA *)da->data; 36 37 PetscFunctionBegin; 38 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 39 PetscValidPointer(scatter, 2); 40 PetscCall(DMDAGetAO(da, &ao)); 41 42 /* create the scatter context */ 43 PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)da), dd->w, dd->Nlocal, PETSC_DETERMINE, NULL, &global)); 44 PetscCall(VecGetSize(global, &N)); 45 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)da), N, 0, 1, &to)); 46 PetscCall(AOPetscToApplicationIS(ao, to)); 47 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)da), N, 0, 1, &from)); 48 PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, dd->w, N, NULL, &tmplocal)); 49 PetscCall(VecScatterCreate(global, from, tmplocal, to, scatter)); 50 PetscCall(VecDestroy(&tmplocal)); 51 PetscCall(VecDestroy(&global)); 52 PetscCall(ISDestroy(&from)); 53 PetscCall(ISDestroy(&to)); 54 PetscFunctionReturn(PETSC_SUCCESS); 55 } 56 57 /*@ 58 DMDANaturalAllToGlobalCreate - Creates a scatter context that maps from a copy 59 of the entire vector on each processor to its local part in the global vector. 60 61 Collective 62 63 Input Parameter: 64 . da - the distributed array context 65 66 Output Parameter: 67 . scatter - the scatter context 68 69 Level: advanced 70 71 .seealso: `DM`, `DMDA`, `DMDAGlobalToNaturalEnd()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`, 72 `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()` 73 @*/ 74 PetscErrorCode DMDANaturalAllToGlobalCreate(DM da, VecScatter *scatter) 75 { 76 DM_DA *dd = (DM_DA *)da->data; 77 PetscInt M, m = dd->Nlocal, start; 78 IS from, to; 79 Vec tmplocal, global; 80 AO ao; 81 82 PetscFunctionBegin; 83 PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA); 84 PetscValidPointer(scatter, 2); 85 PetscCall(DMDAGetAO(da, &ao)); 86 87 /* create the scatter context */ 88 PetscCall(MPIU_Allreduce(&m, &M, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)da))); 89 PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)da), dd->w, m, PETSC_DETERMINE, NULL, &global)); 90 PetscCall(VecGetOwnershipRange(global, &start, NULL)); 91 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)da), m, start, 1, &from)); 92 PetscCall(AOPetscToApplicationIS(ao, from)); 93 PetscCall(ISCreateStride(PetscObjectComm((PetscObject)da), m, start, 1, &to)); 94 PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, dd->w, M, NULL, &tmplocal)); 95 PetscCall(VecScatterCreate(tmplocal, from, global, to, scatter)); 96 PetscCall(VecDestroy(&tmplocal)); 97 PetscCall(VecDestroy(&global)); 98 PetscCall(ISDestroy(&from)); 99 PetscCall(ISDestroy(&to)); 100 PetscFunctionReturn(PETSC_SUCCESS); 101 } 102