xref: /petsc/src/dm/impls/da/dadist.c (revision 7d0a6c19129e7069c8a40e210b34ed62989173db)
1 
2 /*
3   Code for manipulating distributed regular arrays in parallel.
4 */
5 
6 #include "private/daimpl.h"    /*I   "petscdm.h"   I*/
7 
8 #undef __FUNCT__
9 #define __FUNCT__ "VecDuplicate_MPI_DA"
10 PetscErrorCode  VecDuplicate_MPI_DA(Vec g,Vec* gg)
11 {
12   PetscErrorCode ierr;
13   DM             da;
14 
15   PetscFunctionBegin;
16   ierr = PetscObjectQuery((PetscObject)g,"DMDA",(PetscObject*)&da);CHKERRQ(ierr);
17   ierr = DMCreateGlobalVector(da,gg);CHKERRQ(ierr);
18   PetscFunctionReturn(0);
19 }
20 
21 
22 #undef __FUNCT__
23 #define __FUNCT__ "DMCreateGlobalVector_DA"
24 PetscErrorCode  DMCreateGlobalVector_DA(DM da,Vec* g)
25 {
26   PetscErrorCode ierr;
27   DM_DA          *dd = (DM_DA*)da->data;
28 
29   PetscFunctionBegin;
30   PetscValidHeaderSpecific(da,DM_CLASSID,1);
31   PetscValidPointer(g,2);
32   ierr = VecCreate(((PetscObject)da)->comm,g);CHKERRQ(ierr);
33   ierr = VecSetSizes(*g,dd->Nlocal,PETSC_DETERMINE);CHKERRQ(ierr);
34   ierr = VecSetType(*g,da->vectype);CHKERRQ(ierr);
35   ierr = PetscObjectCompose((PetscObject)*g,"DMDA",(PetscObject)da);CHKERRQ(ierr);
36   ierr = VecSetLocalToGlobalMapping(*g,da->ltogmap);CHKERRQ(ierr);
37   ierr = VecSetLocalToGlobalMappingBlock(*g,da->ltogmapb);CHKERRQ(ierr);
38   ierr = VecSetBlockSize(*g,dd->w);CHKERRQ(ierr);
39   ierr = VecSetOperation(*g,VECOP_VIEW,(void(*)(void))VecView_MPI_DA);CHKERRQ(ierr);
40   ierr = VecSetOperation(*g,VECOP_LOAD,(void(*)(void))VecLoad_Default_DA);CHKERRQ(ierr);
41   ierr = VecSetOperation(*g,VECOP_DUPLICATE,(void(*)(void))VecDuplicate_MPI_DA);CHKERRQ(ierr);
42   PetscFunctionReturn(0);
43 }
44 
45 #undef __FUNCT__
46 #define __FUNCT__ "DMDACreateNaturalVector"
47 /*@
48    DMDACreateNaturalVector - Creates a parallel PETSc vector that
49    will hold vector values in the natural numbering, rather than in
50    the PETSc parallel numbering associated with the DMDA.
51 
52    Collective on DMDA
53 
54    Input Parameter:
55 .  da - the distributed array
56 
57    Output Parameter:
58 .  g - the distributed global vector
59 
60    Level: developer
61 
62    Note:
63    The output parameter, g, is a regular PETSc vector that should be destroyed
64    with a call to VecDestroy() when usage is finished.
65 
66    The number of local entries in the vector on each process is the same
67    as in a vector created with DMCreateGlobalVector().
68 
69 .keywords: distributed array, create, global, distributed, vector
70 
71 .seealso: DMCreateLocalVector(), VecDuplicate(), VecDuplicateVecs(),
72           DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMGlobalToLocalBegin(),
73           DMGlobalToLocalEnd(), DMDALocalToGlobalBegin()
74 @*/
75 PetscErrorCode  DMDACreateNaturalVector(DM da,Vec* g)
76 {
77   PetscErrorCode ierr;
78   PetscInt       cnt;
79   DM_DA          *dd = (DM_DA*)da->data;
80 
81   PetscFunctionBegin;
82   PetscValidHeaderSpecific(da,DM_CLASSID,1);
83   PetscValidPointer(g,2);
84   if (dd->natural) {
85     ierr = PetscObjectGetReference((PetscObject)dd->natural,&cnt);CHKERRQ(ierr);
86     if (cnt == 1) { /* object is not currently used by anyone */
87       ierr = PetscObjectReference((PetscObject)dd->natural);CHKERRQ(ierr);
88       *g   = dd->natural;
89     } else {
90       ierr = VecDuplicate(dd->natural,g);CHKERRQ(ierr);
91     }
92   } else { /* create the first version of this guy */
93     ierr = VecCreateMPI(((PetscObject)da)->comm,dd->Nlocal,PETSC_DETERMINE,g);CHKERRQ(ierr);
94     ierr = VecSetBlockSize(*g, dd->w);CHKERRQ(ierr);
95     ierr = PetscObjectReference((PetscObject)*g);CHKERRQ(ierr);
96     dd->natural = *g;
97   }
98   PetscFunctionReturn(0);
99 }
100 
101 
102 
103