1 2 /* 3 Code for manipulating distributed regular arrays in parallel. 4 */ 5 6 #include <private/daimpl.h> /*I "petscdmda.h" I*/ 7 8 #undef __FUNCT__ 9 #define __FUNCT__ "VecDuplicate_MPI_DA" 10 PetscErrorCode VecDuplicate_MPI_DA(Vec g,Vec* gg) 11 { 12 PetscErrorCode ierr; 13 DM da; 14 15 PetscFunctionBegin; 16 ierr = PetscObjectQuery((PetscObject)g,"DM",(PetscObject*)&da);CHKERRQ(ierr); 17 ierr = DMCreateGlobalVector(da,gg);CHKERRQ(ierr); 18 ierr = PetscLayoutReference(g->map,&(*gg)->map);CHKERRQ(ierr); 19 PetscFunctionReturn(0); 20 } 21 22 23 #undef __FUNCT__ 24 #define __FUNCT__ "DMCreateGlobalVector_DA" 25 PetscErrorCode DMCreateGlobalVector_DA(DM da,Vec* g) 26 { 27 PetscErrorCode ierr; 28 DM_DA *dd = (DM_DA*)da->data; 29 30 PetscFunctionBegin; 31 PetscValidHeaderSpecific(da,DM_CLASSID,1); 32 PetscValidPointer(g,2); 33 ierr = VecCreate(((PetscObject)da)->comm,g);CHKERRQ(ierr); 34 ierr = VecSetSizes(*g,dd->Nlocal,PETSC_DETERMINE);CHKERRQ(ierr); 35 ierr = VecSetType(*g,da->vectype);CHKERRQ(ierr); 36 ierr = PetscObjectCompose((PetscObject)*g,"DM",(PetscObject)da);CHKERRQ(ierr); 37 ierr = VecSetLocalToGlobalMapping(*g,da->ltogmap);CHKERRQ(ierr); 38 ierr = VecSetLocalToGlobalMappingBlock(*g,da->ltogmapb);CHKERRQ(ierr); 39 ierr = VecSetBlockSize(*g,dd->w);CHKERRQ(ierr); 40 ierr = VecSetOperation(*g,VECOP_VIEW,(void(*)(void))VecView_MPI_DA);CHKERRQ(ierr); 41 ierr = VecSetOperation(*g,VECOP_LOAD,(void(*)(void))VecLoad_Default_DA);CHKERRQ(ierr); 42 ierr = VecSetOperation(*g,VECOP_DUPLICATE,(void(*)(void))VecDuplicate_MPI_DA);CHKERRQ(ierr); 43 PetscFunctionReturn(0); 44 } 45 46 #undef __FUNCT__ 47 #define __FUNCT__ "DMDACreateNaturalVector" 48 /*@ 49 DMDACreateNaturalVector - Creates a parallel PETSc vector that 50 will hold vector values in the natural numbering, rather than in 51 the PETSc parallel numbering associated with the DMDA. 52 53 Collective on DMDA 54 55 Input Parameter: 56 . da - the distributed array 57 58 Output Parameter: 59 . g - the distributed global vector 60 61 Level: developer 62 63 Note: 64 The output parameter, g, is a regular PETSc vector that should be destroyed 65 with a call to VecDestroy() when usage is finished. 66 67 The number of local entries in the vector on each process is the same 68 as in a vector created with DMCreateGlobalVector(). 69 70 .keywords: distributed array, create, global, distributed, vector 71 72 .seealso: DMCreateLocalVector(), VecDuplicate(), VecDuplicateVecs(), 73 DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMGlobalToLocalBegin(), 74 DMGlobalToLocalEnd(), DMDALocalToGlobalBegin() 75 @*/ 76 PetscErrorCode DMDACreateNaturalVector(DM da,Vec* g) 77 { 78 PetscErrorCode ierr; 79 PetscInt cnt; 80 DM_DA *dd = (DM_DA*)da->data; 81 82 PetscFunctionBegin; 83 PetscValidHeaderSpecific(da,DM_CLASSID,1); 84 PetscValidPointer(g,2); 85 if (dd->natural) { 86 ierr = PetscObjectGetReference((PetscObject)dd->natural,&cnt);CHKERRQ(ierr); 87 if (cnt == 1) { /* object is not currently used by anyone */ 88 ierr = PetscObjectReference((PetscObject)dd->natural);CHKERRQ(ierr); 89 *g = dd->natural; 90 } else { 91 ierr = VecDuplicate(dd->natural,g);CHKERRQ(ierr); 92 } 93 } else { /* create the first version of this guy */ 94 ierr = VecCreateMPI(((PetscObject)da)->comm,dd->Nlocal,PETSC_DETERMINE,g);CHKERRQ(ierr); 95 ierr = VecSetBlockSize(*g, dd->w);CHKERRQ(ierr); 96 ierr = PetscObjectReference((PetscObject)*g);CHKERRQ(ierr); 97 dd->natural = *g; 98 } 99 PetscFunctionReturn(0); 100 } 101 102 103 104