xref: /petsc/src/dm/impls/da/daview.c (revision 3923b477fd0dced8a2d147b4fb4519fe3af97d3f)
1 
2 /*
3   Code for manipulating distributed regular arrays in parallel.
4 */
5 
6 #include <petsc-private/daimpl.h>    /*I   "petscdmda.h"   I*/
7 
8 #if defined(PETSC_HAVE_MATLAB_ENGINE)
9 #include <mat.h>   /* MATLAB include file */
10 
11 #undef __FUNCT__
12 #define __FUNCT__ "DMView_DA_Matlab"
13 PetscErrorCode DMView_DA_Matlab(DM da,PetscViewer viewer)
14 {
15   PetscErrorCode   ierr;
16   PetscMPIInt      rank;
17   PetscInt         dim,m,n,p,dof,swidth;
18   DMDAStencilType  stencil;
19   DMDABoundaryType bx,by,bz;
20   mxArray          *mx;
21   const char       *fnames[] = {"dimension","m","n","p","dof","stencil_width","bx","by","bz","stencil_type"};
22 
23   PetscFunctionBegin;
24   ierr = MPI_Comm_rank(((PetscObject)da)->comm,&rank);CHKERRQ(ierr);
25   if (!rank) {
26     ierr = DMDAGetInfo(da,&dim,&m,&n,&p,0,0,0,&dof,&swidth,&bx,&by,&bz,&stencil);CHKERRQ(ierr);
27     mx = mxCreateStructMatrix(1,1,8,(const char **)fnames);
28     if (!mx) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to generate MATLAB struct array to hold DMDA informations");
29     mxSetFieldByNumber(mx,0,0,mxCreateDoubleScalar((double)dim));
30     mxSetFieldByNumber(mx,0,1,mxCreateDoubleScalar((double)m));
31     mxSetFieldByNumber(mx,0,2,mxCreateDoubleScalar((double)n));
32     mxSetFieldByNumber(mx,0,3,mxCreateDoubleScalar((double)p));
33     mxSetFieldByNumber(mx,0,4,mxCreateDoubleScalar((double)dof));
34     mxSetFieldByNumber(mx,0,5,mxCreateDoubleScalar((double)swidth));
35     mxSetFieldByNumber(mx,0,6,mxCreateDoubleScalar((double)bx));
36     mxSetFieldByNumber(mx,0,7,mxCreateDoubleScalar((double)by));
37     mxSetFieldByNumber(mx,0,8,mxCreateDoubleScalar((double)bz));
38     mxSetFieldByNumber(mx,0,9,mxCreateDoubleScalar((double)stencil));
39     ierr = PetscObjectName((PetscObject)da);CHKERRQ(ierr);
40     ierr = PetscViewerMatlabPutVariable(viewer,((PetscObject)da)->name,mx);CHKERRQ(ierr);
41   }
42   PetscFunctionReturn(0);
43 }
44 #endif
45 
46 #undef __FUNCT__
47 #define __FUNCT__ "DMView_DA_Binary"
48 PetscErrorCode DMView_DA_Binary(DM da,PetscViewer viewer)
49 {
50   PetscErrorCode   ierr;
51   PetscMPIInt      rank;
52   PetscInt         dim,m,n,p,dof,swidth,M,N,P;
53   DMDAStencilType  stencil;
54   DMDABoundaryType bx,by,bz;
55   MPI_Comm         comm;
56   PetscInt         classid = DM_FILE_CLASSID,subclassid = DMDA_FILE_CLASSID ;
57   PetscBool        coors = PETSC_FALSE;
58 
59   PetscFunctionBegin;
60   ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr);
61 
62   ierr = DMDAGetInfo(da,&dim,&m,&n,&p,&M,&N,&P,&dof,&swidth,&bx,&by,&bz,&stencil);CHKERRQ(ierr);
63   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
64   if (!rank) {
65 
66     ierr = PetscViewerBinaryWrite(viewer,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
67     ierr = PetscViewerBinaryWrite(viewer,&subclassid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
68     ierr = PetscViewerBinaryWrite(viewer,&dim,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
69     ierr = PetscViewerBinaryWrite(viewer,&m,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
70     ierr = PetscViewerBinaryWrite(viewer,&n,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
71     ierr = PetscViewerBinaryWrite(viewer,&p,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
72     ierr = PetscViewerBinaryWrite(viewer,&dof,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
73     ierr = PetscViewerBinaryWrite(viewer,&swidth,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
74     ierr = PetscViewerBinaryWrite(viewer,&bx,1,PETSC_ENUM,PETSC_FALSE);CHKERRQ(ierr);
75     ierr = PetscViewerBinaryWrite(viewer,&by,1,PETSC_ENUM,PETSC_FALSE);CHKERRQ(ierr);
76     ierr = PetscViewerBinaryWrite(viewer,&bz,1,PETSC_ENUM,PETSC_FALSE);CHKERRQ(ierr);
77     ierr = PetscViewerBinaryWrite(viewer,&stencil,1,PETSC_ENUM,PETSC_FALSE);CHKERRQ(ierr);
78     if (da->coordinates) coors = PETSC_TRUE;
79     ierr = PetscViewerBinaryWrite(viewer,&coors,1,PETSC_BOOL,PETSC_FALSE);CHKERRQ(ierr);
80   }
81 
82   /* save the coordinates if they exist to disk (in the natural ordering) */
83   if (da->coordinates) {
84     ierr = VecView(da->coordinates,viewer);CHKERRQ(ierr);
85   }
86   PetscFunctionReturn(0);
87 }
88 
89 #undef __FUNCT__
90 #define __FUNCT__ "DMView_DA_VTK"
91 PetscErrorCode DMView_DA_VTK(DM da, PetscViewer viewer)
92 {
93   PetscInt       dim, dof, M = 0, N = 0, P = 0;
94   PetscErrorCode ierr;
95 
96   PetscFunctionBegin;
97   ierr = DMDAGetInfo(da, &dim, &M, &N, &P, PETSC_NULL, PETSC_NULL, PETSC_NULL, &dof, PETSC_NULL, PETSC_NULL, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr);
98   /* if (dim != 3) {SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP, "VTK output only works for three dimensional DMDAs.");} */
99   if (!da->coordinates) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_SUP, "VTK output requires DMDA coordinates.");
100   /* Write Header */
101   ierr = PetscViewerASCIIPrintf(viewer,"# vtk DataFile Version 2.0\n");CHKERRQ(ierr);
102   ierr = PetscViewerASCIIPrintf(viewer,"Structured Mesh Example\n");CHKERRQ(ierr);
103   ierr = PetscViewerASCIIPrintf(viewer,"ASCII\n");CHKERRQ(ierr);
104   ierr = PetscViewerASCIIPrintf(viewer,"DATASET STRUCTURED_GRID\n");CHKERRQ(ierr);
105   ierr = PetscViewerASCIIPrintf(viewer,"DIMENSIONS %d %d %d\n", M, N, P);CHKERRQ(ierr);
106   ierr = PetscViewerASCIIPrintf(viewer,"POINTS %d double\n", M*N*P);CHKERRQ(ierr);
107   if (da->coordinates) {
108     DM  dac;
109     Vec natural;
110 
111     ierr = DMGetCoordinateDM(da, &dac);CHKERRQ(ierr);
112     ierr = DMDACreateNaturalVector(dac, &natural);CHKERRQ(ierr);
113     ierr = PetscObjectSetOptionsPrefix((PetscObject) natural, "coor_");CHKERRQ(ierr);
114     ierr = DMDAGlobalToNaturalBegin(dac, da->coordinates, INSERT_VALUES, natural);CHKERRQ(ierr);
115     ierr = DMDAGlobalToNaturalEnd(dac, da->coordinates, INSERT_VALUES, natural);CHKERRQ(ierr);
116     ierr = PetscViewerPushFormat(viewer, PETSC_VIEWER_ASCII_VTK_COORDS);CHKERRQ(ierr);
117     ierr = VecView(natural, viewer);CHKERRQ(ierr);
118     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
119     ierr = VecDestroy(&natural);CHKERRQ(ierr);
120   }
121   PetscFunctionReturn(0);
122 }
123 
124 #undef __FUNCT__
125 #define __FUNCT__ "DMDAGetInfo"
126 /*@C
127    DMDAGetInfo - Gets information about a given distributed array.
128 
129    Not Collective
130 
131    Input Parameter:
132 .  da - the distributed array
133 
134    Output Parameters:
135 +  dim      - dimension of the distributed array (1, 2, or 3)
136 .  M, N, P  - global dimension in each direction of the array
137 .  m, n, p  - corresponding number of procs in each dimension
138 .  dof      - number of degrees of freedom per node
139 .  s        - stencil width
140 .  bx,by,bz - type of ghost nodes at boundary, one of DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_GHOSTED,
141               DMDA_BOUNDARY_MIRROR, DMDA_BOUNDARY_PERIODIC
142 -  st       - stencil type, either DMDA_STENCIL_STAR or DMDA_STENCIL_BOX
143 
144    Level: beginner
145 
146    Note:
147    Use PETSC_NULL (PETSC_NULL_INTEGER in Fortran) in place of any output parameter that is not of interest.
148 
149 .keywords: distributed array, get, information
150 
151 .seealso: DMView(), DMDAGetCorners(), DMDAGetLocalInfo()
152 @*/
153 PetscErrorCode  DMDAGetInfo(DM da,PetscInt *dim,PetscInt *M,PetscInt *N,PetscInt *P,PetscInt *m,PetscInt *n,PetscInt *p,PetscInt *dof,PetscInt *s,DMDABoundaryType *bx,DMDABoundaryType *by,DMDABoundaryType *bz,DMDAStencilType *st)
154 {
155   DM_DA *dd = (DM_DA*)da->data;
156 
157   PetscFunctionBegin;
158   PetscValidHeaderSpecific(da,DM_CLASSID,1);
159   if (dim)  *dim  = dd->dim;
160   if (M)    *M    = dd->M;
161   if (N)    *N    = dd->N;
162   if (P)    *P    = dd->P;
163   if (m)    *m    = dd->m;
164   if (n)    *n    = dd->n;
165   if (p)    *p    = dd->p;
166   if (dof)  *dof  = dd->w;
167   if (s)    *s    = dd->s;
168   if (bx) *bx = dd->bx;
169   if (by) *by = dd->by;
170   if (bz) *bz = dd->bz;
171   if (st)   *st   = dd->stencil_type;
172   PetscFunctionReturn(0);
173 }
174 
175 #undef __FUNCT__
176 #define __FUNCT__ "DMDAGetLocalInfo"
177 /*@C
178    DMDAGetLocalInfo - Gets information about a given distributed array and this processors location in it
179 
180    Not Collective
181 
182    Input Parameter:
183 .  da - the distributed array
184 
185    Output Parameters:
186 .  dainfo - structure containing the information
187 
188    Level: beginner
189 
190 .keywords: distributed array, get, information
191 
192 .seealso: DMDAGetInfo(), DMDAGetCorners()
193 @*/
194 PetscErrorCode  DMDAGetLocalInfo(DM da,DMDALocalInfo *info)
195 {
196   PetscInt w;
197   DM_DA    *dd = (DM_DA*)da->data;
198 
199   PetscFunctionBegin;
200   PetscValidHeaderSpecific(da,DM_CLASSID,1);
201   PetscValidPointer(info,2);
202   info->da   = da;
203   info->dim  = dd->dim;
204   info->mx   = dd->M;
205   info->my   = dd->N;
206   info->mz   = dd->P;
207   info->dof  = dd->w;
208   info->sw   = dd->s;
209   info->bx   = dd->bx;
210   info->by   = dd->by;
211   info->bz   = dd->bz;
212   info->st   = dd->stencil_type;
213 
214   /* since the xs, xe ... have all been multiplied by the number of degrees
215      of freedom per cell, w = dd->w, we divide that out before returning.*/
216   w = dd->w;
217   info->xs = dd->xs/w;
218   info->xm = (dd->xe - dd->xs)/w;
219   /* the y and z have NOT been multiplied by w */
220   info->ys = dd->ys;
221   info->ym = (dd->ye - dd->ys);
222   info->zs = dd->zs;
223   info->zm = (dd->ze - dd->zs);
224 
225   info->gxs = dd->Xs/w;
226   info->gxm = (dd->Xe - dd->Xs)/w;
227   /* the y and z have NOT been multiplied by w */
228   info->gys = dd->Ys;
229   info->gym = (dd->Ye - dd->Ys);
230   info->gzs = dd->Zs;
231   info->gzm = (dd->Ze - dd->Zs);
232   PetscFunctionReturn(0);
233 }
234 
235 #undef __FUNCT__
236 #define __FUNCT__ "DMDAGetLocalBlockInfo"
237 /*@C
238    DMDAGetLocalBlockInfo - Gets information about a given distributed array and this processors location in it with overlap taken into account
239 
240    Not Collective
241 
242    Input Parameter:
243 .  da - the distributed array
244 
245    Output Parameters:
246 .  dainfo - structure containing the information
247 
248    Level: beginner
249 
250 .keywords: distributed array, get, information
251 
252 .seealso: DMDAGetLocalInfo(), DMDASetOverlap()
253 @*/
254 PetscErrorCode  DMDAGetLocalBlockInfo(DM da,DMDALocalInfo *info)
255 {
256   PetscErrorCode ierr;
257   DM_DA          *dd = (DM_DA*)da->data;
258   PetscFunctionBegin;
259   ierr = DMDAGetLocalInfo(da,info);CHKERRQ(ierr);
260 
261   if (dd->overlap > 0) {
262     if (info->xs - dd->overlap > 0 || info->bx == DMDA_BOUNDARY_PERIODIC) {
263       info->xs -= dd->overlap;
264       info->xm += dd->overlap;
265     }
266     if (info->xs + info->xm + dd->overlap < info->mx || info->bx == DMDA_BOUNDARY_PERIODIC) {
267       info->xm += dd->overlap;
268     }
269     if (info->ys - dd->overlap > 0 || info->by == DMDA_BOUNDARY_PERIODIC) {
270       info->ys -= dd->overlap;
271       info->ym += dd->overlap;
272     }
273     if (info->ys + info->ym + dd->overlap < info->my || info->by == DMDA_BOUNDARY_PERIODIC) {
274       info->ym += dd->overlap;
275     }
276     if (info->zs - dd->overlap > 0 || info->bz == DMDA_BOUNDARY_PERIODIC) {
277       info->zs -= dd->overlap;
278       info->zm += dd->overlap;
279     }
280     if (info->zs + info->zm + dd->overlap < info->mz || info->bz == DMDA_BOUNDARY_PERIODIC) {
281       info->zm += dd->overlap;
282     }
283   }
284   PetscFunctionReturn(0);
285 }
286