1 2 /* 3 Code for manipulating distributed regular arrays in parallel. 4 */ 5 6 #include <petsc-private/daimpl.h> /*I "petscdmda.h" I*/ 7 8 #if defined(PETSC_HAVE_MATLAB_ENGINE) 9 #include <mat.h> /* MATLAB include file */ 10 11 #undef __FUNCT__ 12 #define __FUNCT__ "DMView_DA_Matlab" 13 PetscErrorCode DMView_DA_Matlab(DM da,PetscViewer viewer) 14 { 15 PetscErrorCode ierr; 16 PetscMPIInt rank; 17 PetscInt dim,m,n,p,dof,swidth; 18 DMDAStencilType stencil; 19 DMDABoundaryType bx,by,bz; 20 mxArray *mx; 21 const char *fnames[] = {"dimension","m","n","p","dof","stencil_width","bx","by","bz","stencil_type"}; 22 23 PetscFunctionBegin; 24 ierr = MPI_Comm_rank(((PetscObject)da)->comm,&rank);CHKERRQ(ierr); 25 if (!rank) { 26 ierr = DMDAGetInfo(da,&dim,&m,&n,&p,0,0,0,&dof,&swidth,&bx,&by,&bz,&stencil);CHKERRQ(ierr); 27 mx = mxCreateStructMatrix(1,1,8,(const char **)fnames); 28 if (!mx) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to generate MATLAB struct array to hold DMDA informations"); 29 mxSetFieldByNumber(mx,0,0,mxCreateDoubleScalar((double)dim)); 30 mxSetFieldByNumber(mx,0,1,mxCreateDoubleScalar((double)m)); 31 mxSetFieldByNumber(mx,0,2,mxCreateDoubleScalar((double)n)); 32 mxSetFieldByNumber(mx,0,3,mxCreateDoubleScalar((double)p)); 33 mxSetFieldByNumber(mx,0,4,mxCreateDoubleScalar((double)dof)); 34 mxSetFieldByNumber(mx,0,5,mxCreateDoubleScalar((double)swidth)); 35 mxSetFieldByNumber(mx,0,6,mxCreateDoubleScalar((double)bx)); 36 mxSetFieldByNumber(mx,0,7,mxCreateDoubleScalar((double)by)); 37 mxSetFieldByNumber(mx,0,8,mxCreateDoubleScalar((double)bz)); 38 mxSetFieldByNumber(mx,0,9,mxCreateDoubleScalar((double)stencil)); 39 ierr = PetscObjectName((PetscObject)da);CHKERRQ(ierr); 40 ierr = PetscViewerMatlabPutVariable(viewer,((PetscObject)da)->name,mx);CHKERRQ(ierr); 41 } 42 PetscFunctionReturn(0); 43 } 44 #endif 45 46 #undef __FUNCT__ 47 #define __FUNCT__ "DMView_DA_Binary" 48 PetscErrorCode DMView_DA_Binary(DM da,PetscViewer viewer) 49 { 50 PetscErrorCode ierr; 51 PetscMPIInt rank; 52 PetscInt dim,m,n,p,dof,swidth,M,N,P; 53 DMDAStencilType stencil; 54 DMDABoundaryType bx,by,bz; 55 MPI_Comm comm; 56 PetscBool coors = PETSC_FALSE; 57 58 PetscFunctionBegin; 59 ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr); 60 61 ierr = DMDAGetInfo(da,&dim,&m,&n,&p,&M,&N,&P,&dof,&swidth,&bx,&by,&bz,&stencil);CHKERRQ(ierr); 62 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 63 if (!rank) { 64 ierr = PetscViewerBinaryWrite(viewer,&dim,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr); 65 ierr = PetscViewerBinaryWrite(viewer,&m,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr); 66 ierr = PetscViewerBinaryWrite(viewer,&n,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr); 67 ierr = PetscViewerBinaryWrite(viewer,&p,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr); 68 ierr = PetscViewerBinaryWrite(viewer,&dof,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr); 69 ierr = PetscViewerBinaryWrite(viewer,&swidth,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr); 70 ierr = PetscViewerBinaryWrite(viewer,&bx,1,PETSC_ENUM,PETSC_FALSE);CHKERRQ(ierr); 71 ierr = PetscViewerBinaryWrite(viewer,&by,1,PETSC_ENUM,PETSC_FALSE);CHKERRQ(ierr); 72 ierr = PetscViewerBinaryWrite(viewer,&bz,1,PETSC_ENUM,PETSC_FALSE);CHKERRQ(ierr); 73 ierr = PetscViewerBinaryWrite(viewer,&stencil,1,PETSC_ENUM,PETSC_FALSE);CHKERRQ(ierr); 74 if (da->coordinates) coors = PETSC_TRUE; 75 ierr = PetscViewerBinaryWrite(viewer,&coors,1,PETSC_BOOL,PETSC_FALSE);CHKERRQ(ierr); 76 } 77 78 /* save the coordinates if they exist to disk (in the natural ordering) */ 79 if (da->coordinates) { 80 ierr = VecView(da->coordinates,viewer);CHKERRQ(ierr); 81 } 82 PetscFunctionReturn(0); 83 } 84 85 #undef __FUNCT__ 86 #define __FUNCT__ "DMView_DA_VTK" 87 PetscErrorCode DMView_DA_VTK(DM da, PetscViewer viewer) 88 { 89 PetscInt dim, dof, M = 0, N = 0, P = 0; 90 PetscErrorCode ierr; 91 92 PetscFunctionBegin; 93 ierr = DMDAGetInfo(da, &dim, &M, &N, &P, PETSC_NULL, PETSC_NULL, PETSC_NULL, &dof, PETSC_NULL, PETSC_NULL, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr); 94 /* if (dim != 3) {SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP, "VTK output only works for three dimensional DMDAs.");} */ 95 if (!da->coordinates) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_SUP, "VTK output requires DMDA coordinates."); 96 /* Write Header */ 97 ierr = PetscViewerASCIIPrintf(viewer,"# vtk DataFile Version 2.0\n");CHKERRQ(ierr); 98 ierr = PetscViewerASCIIPrintf(viewer,"Structured Mesh Example\n");CHKERRQ(ierr); 99 ierr = PetscViewerASCIIPrintf(viewer,"ASCII\n");CHKERRQ(ierr); 100 ierr = PetscViewerASCIIPrintf(viewer,"DATASET STRUCTURED_GRID\n");CHKERRQ(ierr); 101 ierr = PetscViewerASCIIPrintf(viewer,"DIMENSIONS %d %d %d\n", M, N, P);CHKERRQ(ierr); 102 ierr = PetscViewerASCIIPrintf(viewer,"POINTS %d double\n", M*N*P);CHKERRQ(ierr); 103 if (da->coordinates) { 104 DM dac; 105 Vec natural; 106 107 ierr = DMGetCoordinateDM(da, &dac);CHKERRQ(ierr); 108 ierr = DMDACreateNaturalVector(dac, &natural);CHKERRQ(ierr); 109 ierr = PetscObjectSetOptionsPrefix((PetscObject) natural, "coor_");CHKERRQ(ierr); 110 ierr = DMDAGlobalToNaturalBegin(dac, da->coordinates, INSERT_VALUES, natural);CHKERRQ(ierr); 111 ierr = DMDAGlobalToNaturalEnd(dac, da->coordinates, INSERT_VALUES, natural);CHKERRQ(ierr); 112 ierr = PetscViewerPushFormat(viewer, PETSC_VIEWER_ASCII_VTK_COORDS);CHKERRQ(ierr); 113 ierr = VecView(natural, viewer);CHKERRQ(ierr); 114 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 115 ierr = VecDestroy(&natural);CHKERRQ(ierr); 116 } 117 PetscFunctionReturn(0); 118 } 119 120 #undef __FUNCT__ 121 #define __FUNCT__ "DMDAGetInfo" 122 /*@C 123 DMDAGetInfo - Gets information about a given distributed array. 124 125 Not Collective 126 127 Input Parameter: 128 . da - the distributed array 129 130 Output Parameters: 131 + dim - dimension of the distributed array (1, 2, or 3) 132 . M, N, P - global dimension in each direction of the array 133 . m, n, p - corresponding number of procs in each dimension 134 . dof - number of degrees of freedom per node 135 . s - stencil width 136 . bx,by,bz - type of ghost nodes at boundary, one of DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_GHOSTED, 137 DMDA_BOUNDARY_MIRROR, DMDA_BOUNDARY_PERIODIC 138 - st - stencil type, either DMDA_STENCIL_STAR or DMDA_STENCIL_BOX 139 140 Level: beginner 141 142 Note: 143 Use PETSC_NULL (PETSC_NULL_INTEGER in Fortran) in place of any output parameter that is not of interest. 144 145 .keywords: distributed array, get, information 146 147 .seealso: DMView(), DMDAGetCorners(), DMDAGetLocalInfo() 148 @*/ 149 PetscErrorCode DMDAGetInfo(DM da,PetscInt *dim,PetscInt *M,PetscInt *N,PetscInt *P,PetscInt *m,PetscInt *n,PetscInt *p,PetscInt *dof,PetscInt *s,DMDABoundaryType *bx,DMDABoundaryType *by,DMDABoundaryType *bz,DMDAStencilType *st) 150 { 151 DM_DA *dd = (DM_DA*)da->data; 152 153 PetscFunctionBegin; 154 PetscValidHeaderSpecific(da,DM_CLASSID,1); 155 if (dim) *dim = dd->dim; 156 if (M) *M = dd->M; 157 if (N) *N = dd->N; 158 if (P) *P = dd->P; 159 if (m) *m = dd->m; 160 if (n) *n = dd->n; 161 if (p) *p = dd->p; 162 if (dof) *dof = dd->w; 163 if (s) *s = dd->s; 164 if (bx) *bx = dd->bx; 165 if (by) *by = dd->by; 166 if (bz) *bz = dd->bz; 167 if (st) *st = dd->stencil_type; 168 PetscFunctionReturn(0); 169 } 170 171 #undef __FUNCT__ 172 #define __FUNCT__ "DMDAGetLocalInfo" 173 /*@C 174 DMDAGetLocalInfo - Gets information about a given distributed array and this processors location in it 175 176 Not Collective 177 178 Input Parameter: 179 . da - the distributed array 180 181 Output Parameters: 182 . dainfo - structure containing the information 183 184 Level: beginner 185 186 .keywords: distributed array, get, information 187 188 .seealso: DMDAGetInfo(), DMDAGetCorners() 189 @*/ 190 PetscErrorCode DMDAGetLocalInfo(DM da,DMDALocalInfo *info) 191 { 192 PetscInt w; 193 DM_DA *dd = (DM_DA*)da->data; 194 195 PetscFunctionBegin; 196 PetscValidHeaderSpecific(da,DM_CLASSID,1); 197 PetscValidPointer(info,2); 198 info->da = da; 199 info->dim = dd->dim; 200 info->mx = dd->M; 201 info->my = dd->N; 202 info->mz = dd->P; 203 info->dof = dd->w; 204 info->sw = dd->s; 205 info->bx = dd->bx; 206 info->by = dd->by; 207 info->bz = dd->bz; 208 info->st = dd->stencil_type; 209 210 /* since the xs, xe ... have all been multiplied by the number of degrees 211 of freedom per cell, w = dd->w, we divide that out before returning.*/ 212 w = dd->w; 213 info->xs = dd->xs/w; 214 info->xm = (dd->xe - dd->xs)/w; 215 /* the y and z have NOT been multiplied by w */ 216 info->ys = dd->ys; 217 info->ym = (dd->ye - dd->ys); 218 info->zs = dd->zs; 219 info->zm = (dd->ze - dd->zs); 220 221 info->gxs = dd->Xs/w; 222 info->gxm = (dd->Xe - dd->Xs)/w; 223 /* the y and z have NOT been multiplied by w */ 224 info->gys = dd->Ys; 225 info->gym = (dd->Ye - dd->Ys); 226 info->gzs = dd->Zs; 227 info->gzm = (dd->Ze - dd->Zs); 228 PetscFunctionReturn(0); 229 } 230 231 #undef __FUNCT__ 232 #define __FUNCT__ "DMDAGetLocalBlockInfo" 233 /*@C 234 DMDAGetLocalBlockInfo - Gets information about a given distributed array and this processors location in it with overlap taken into account 235 236 Not Collective 237 238 Input Parameter: 239 . da - the distributed array 240 241 Output Parameters: 242 . dainfo - structure containing the information 243 244 Level: beginner 245 246 .keywords: distributed array, get, information 247 248 .seealso: DMDAGetLocalInfo(), DMDASetOverlap() 249 @*/ 250 PetscErrorCode DMDAGetLocalBlockInfo(DM da,DMDALocalInfo *info) 251 { 252 PetscErrorCode ierr; 253 DM_DA *dd = (DM_DA*)da->data; 254 PetscFunctionBegin; 255 ierr = DMDAGetLocalInfo(da,info);CHKERRQ(ierr); 256 257 if (dd->overlap > 0) { 258 if (info->xs - dd->overlap > 0 || info->bx == DMDA_BOUNDARY_PERIODIC) { 259 info->xs -= dd->overlap; 260 info->xm += dd->overlap; 261 } 262 if (info->xs + info->xm + dd->overlap < info->mx || info->bx == DMDA_BOUNDARY_PERIODIC) { 263 info->xm += dd->overlap; 264 } 265 if (info->ys - dd->overlap > 0 || info->by == DMDA_BOUNDARY_PERIODIC) { 266 info->ys -= dd->overlap; 267 info->ym += dd->overlap; 268 } 269 if (info->ys + info->ym + dd->overlap < info->my || info->by == DMDA_BOUNDARY_PERIODIC) { 270 info->ym += dd->overlap; 271 } 272 if (info->zs - dd->overlap > 0 || info->bz == DMDA_BOUNDARY_PERIODIC) { 273 info->zs -= dd->overlap; 274 info->zm += dd->overlap; 275 } 276 if (info->zs + info->zm + dd->overlap < info->mz || info->bz == DMDA_BOUNDARY_PERIODIC) { 277 info->zm += dd->overlap; 278 } 279 } 280 PetscFunctionReturn(0); 281 } 282