1 2 #include <private/daimpl.h> /*I "petscdmda.h" I*/ 3 4 #undef __FUNCT__ 5 #define __FUNCT__ "DMView_DA_2d" 6 PetscErrorCode DMView_DA_2d(DM da,PetscViewer viewer) 7 { 8 PetscErrorCode ierr; 9 PetscMPIInt rank; 10 PetscBool iascii,isdraw,isbinary; 11 DM_DA *dd = (DM_DA*)da->data; 12 #if defined(PETSC_HAVE_MATLAB_ENGINE) 13 PetscBool ismatlab; 14 #endif 15 16 PetscFunctionBegin; 17 ierr = MPI_Comm_rank(((PetscObject)da)->comm,&rank);CHKERRQ(ierr); 18 19 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 20 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 21 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 22 #if defined(PETSC_HAVE_MATLAB_ENGINE) 23 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);CHKERRQ(ierr); 24 #endif 25 if (iascii) { 26 PetscViewerFormat format; 27 28 ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr); 29 if (format != PETSC_VIEWER_ASCII_VTK && format != PETSC_VIEWER_ASCII_VTK_CELL) { 30 DMDALocalInfo info; 31 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 32 ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr); 33 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D N %D m %D n %D w %D s %D\n",rank,dd->M,dd->N,dd->m,dd->n,dd->w,dd->s);CHKERRQ(ierr); 34 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D, Y range of indices: %D %D\n",info.xs,info.xs+info.xm,info.ys,info.ys+info.ym);CHKERRQ(ierr); 35 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 36 ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr); 37 } else { 38 ierr = DMView_DA_VTK(da,viewer);CHKERRQ(ierr); 39 } 40 } else if (isdraw) { 41 PetscDraw draw; 42 double ymin = -1*dd->s-1,ymax = dd->N+dd->s; 43 double xmin = -1*dd->s-1,xmax = dd->M+dd->s; 44 double x,y; 45 PetscInt base,*idx; 46 char node[10]; 47 PetscBool isnull; 48 49 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 50 ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0); 51 if (!dd->coordinates) { 52 ierr = PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);CHKERRQ(ierr); 53 } 54 ierr = PetscDrawSynchronizedClear(draw);CHKERRQ(ierr); 55 56 /* first processor draw all node lines */ 57 if (!rank) { 58 ymin = 0.0; ymax = dd->N - 1; 59 for (xmin=0; xmin<dd->M; xmin++) { 60 ierr = PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_BLACK);CHKERRQ(ierr); 61 } 62 xmin = 0.0; xmax = dd->M - 1; 63 for (ymin=0; ymin<dd->N; ymin++) { 64 ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);CHKERRQ(ierr); 65 } 66 } 67 ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr); 68 ierr = PetscDrawPause(draw);CHKERRQ(ierr); 69 70 /* draw my box */ 71 ymin = dd->ys; ymax = dd->ye - 1; xmin = dd->xs/dd->w; 72 xmax =(dd->xe-1)/dd->w; 73 ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);CHKERRQ(ierr); 74 ierr = PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);CHKERRQ(ierr); 75 ierr = PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr); 76 ierr = PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr); 77 78 /* put in numbers */ 79 base = (dd->base)/dd->w; 80 for (y=ymin; y<=ymax; y++) { 81 for (x=xmin; x<=xmax; x++) { 82 sprintf(node,"%d",(int)base++); 83 ierr = PetscDrawString(draw,x,y,PETSC_DRAW_BLACK,node);CHKERRQ(ierr); 84 } 85 } 86 87 ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr); 88 ierr = PetscDrawPause(draw);CHKERRQ(ierr); 89 /* overlay ghost numbers, useful for error checking */ 90 /* put in numbers */ 91 92 base = 0; idx = dd->idx; 93 ymin = dd->Ys; ymax = dd->Ye; xmin = dd->Xs; xmax = dd->Xe; 94 for (y=ymin; y<ymax; y++) { 95 for (x=xmin; x<xmax; x++) { 96 if ((base % dd->w) == 0) { 97 sprintf(node,"%d",(int)(idx[base]/dd->w)); 98 ierr = PetscDrawString(draw,x/dd->w,y,PETSC_DRAW_BLUE,node);CHKERRQ(ierr); 99 } 100 base++; 101 } 102 } 103 ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr); 104 ierr = PetscDrawPause(draw);CHKERRQ(ierr); 105 } else if (isbinary){ 106 ierr = DMView_DA_Binary(da,viewer);CHKERRQ(ierr); 107 #if defined(PETSC_HAVE_MATLAB_ENGINE) 108 } else if (ismatlab) { 109 ierr = DMView_DA_Matlab(da,viewer);CHKERRQ(ierr); 110 #endif 111 } else SETERRQ1(((PetscObject)da)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for DMDA 1d",((PetscObject)viewer)->type_name); 112 PetscFunctionReturn(0); 113 } 114 115 /* 116 M is number of grid points 117 m is number of processors 118 119 */ 120 #undef __FUNCT__ 121 #define __FUNCT__ "DMDASplitComm2d" 122 PetscErrorCode DMDASplitComm2d(MPI_Comm comm,PetscInt M,PetscInt N,PetscInt sw,MPI_Comm *outcomm) 123 { 124 PetscErrorCode ierr; 125 PetscInt m,n = 0,x = 0,y = 0; 126 PetscMPIInt size,csize,rank; 127 128 PetscFunctionBegin; 129 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 130 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 131 132 csize = 4*size; 133 do { 134 if (csize % 4) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Cannot split communicator of size %d tried %d %D %D",size,csize,x,y); 135 csize = csize/4; 136 137 m = (PetscInt)(0.5 + sqrt(((double)M)*((double)csize)/((double)N))); 138 if (!m) m = 1; 139 while (m > 0) { 140 n = csize/m; 141 if (m*n == csize) break; 142 m--; 143 } 144 if (M > N && m < n) {PetscInt _m = m; m = n; n = _m;} 145 146 x = M/m + ((M % m) > ((csize-1) % m)); 147 y = (N + (csize-1)/m)/n; 148 } while ((x < 4 || y < 4) && csize > 1); 149 if (size != csize) { 150 MPI_Group entire_group,sub_group; 151 PetscMPIInt i,*groupies; 152 153 ierr = MPI_Comm_group(comm,&entire_group);CHKERRQ(ierr); 154 ierr = PetscMalloc(csize*sizeof(PetscInt),&groupies);CHKERRQ(ierr); 155 for (i=0; i<csize; i++) { 156 groupies[i] = (rank/csize)*csize + i; 157 } 158 ierr = MPI_Group_incl(entire_group,csize,groupies,&sub_group);CHKERRQ(ierr); 159 ierr = PetscFree(groupies);CHKERRQ(ierr); 160 ierr = MPI_Comm_create(comm,sub_group,outcomm);CHKERRQ(ierr); 161 ierr = MPI_Group_free(&entire_group);CHKERRQ(ierr); 162 ierr = MPI_Group_free(&sub_group);CHKERRQ(ierr); 163 ierr = PetscInfo1(0,"DMDASplitComm2d:Creating redundant coarse problems of size %d\n",csize);CHKERRQ(ierr); 164 } else { 165 *outcomm = comm; 166 } 167 PetscFunctionReturn(0); 168 } 169 170 #undef __FUNCT__ 171 #define __FUNCT__ "DMDAFunction" 172 static PetscErrorCode DMDAFunction(DM dm,Vec x,Vec F) 173 { 174 PetscErrorCode ierr; 175 Vec localX; 176 177 PetscFunctionBegin; 178 ierr = DMGetLocalVector(dm,&localX);CHKERRQ(ierr); 179 ierr = DMGlobalToLocalBegin(dm,x,INSERT_VALUES,localX);CHKERRQ(ierr); 180 ierr = DMGlobalToLocalEnd(dm,x,INSERT_VALUES,localX);CHKERRQ(ierr); 181 ierr = DMDAFormFunction1(dm,localX,F,dm->ctx);CHKERRQ(ierr); 182 ierr = DMRestoreLocalVector(dm,&localX);CHKERRQ(ierr); 183 PetscFunctionReturn(0); 184 } 185 186 #undef __FUNCT__ 187 #define __FUNCT__ "DMDASetLocalFunction" 188 /*@C 189 DMDASetLocalFunction - Caches in a DM a local function. 190 191 Logically Collective on DMDA 192 193 Input Parameter: 194 + da - initial distributed array 195 - lf - the local function 196 197 Level: intermediate 198 199 Notes: The routine SNESDAFormFunction() uses this the cached function to evaluate the user provided function. 200 201 .keywords: distributed array, refine 202 203 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunctioni() 204 @*/ 205 PetscErrorCode DMDASetLocalFunction(DM da,DMDALocalFunction1 lf) 206 { 207 PetscErrorCode ierr; 208 DM_DA *dd = (DM_DA*)da->data; 209 210 PetscFunctionBegin; 211 PetscValidHeaderSpecific(da,DM_CLASSID,1); 212 ierr = DMSetFunction(da,DMDAFunction);CHKERRQ(ierr); 213 dd->lf = lf; 214 PetscFunctionReturn(0); 215 } 216 217 #undef __FUNCT__ 218 #define __FUNCT__ "DMDASetLocalFunctioni" 219 /*@C 220 DMDASetLocalFunctioni - Caches in a DM a local function that evaluates a single component 221 222 Logically Collective on DMDA 223 224 Input Parameter: 225 + da - initial distributed array 226 - lfi - the local function 227 228 Level: intermediate 229 230 .keywords: distributed array, refine 231 232 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction() 233 @*/ 234 PetscErrorCode DMDASetLocalFunctioni(DM da,PetscErrorCode (*lfi)(DMDALocalInfo*,MatStencil*,void*,PetscScalar*,void*)) 235 { 236 DM_DA *dd = (DM_DA*)da->data; 237 PetscFunctionBegin; 238 PetscValidHeaderSpecific(da,DM_CLASSID,1); 239 dd->lfi = lfi; 240 PetscFunctionReturn(0); 241 } 242 243 #undef __FUNCT__ 244 #define __FUNCT__ "DMDASetLocalFunctionib" 245 /*@C 246 DMDASetLocalFunctionib - Caches in a DM a block local function that evaluates a single component 247 248 Logically Collective on DMDA 249 250 Input Parameter: 251 + da - initial distributed array 252 - lfi - the local function 253 254 Level: intermediate 255 256 .keywords: distributed array, refine 257 258 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction() 259 @*/ 260 PetscErrorCode DMDASetLocalFunctionib(DM da,PetscErrorCode (*lfi)(DMDALocalInfo*,MatStencil*,void*,PetscScalar*,void*)) 261 { 262 DM_DA *dd = (DM_DA*)da->data; 263 PetscFunctionBegin; 264 PetscValidHeaderSpecific(da,DM_CLASSID,1); 265 dd->lfib = lfi; 266 PetscFunctionReturn(0); 267 } 268 269 #undef __FUNCT__ 270 #define __FUNCT__ "DMDASetLocalAdicFunction_Private" 271 PetscErrorCode DMDASetLocalAdicFunction_Private(DM da,DMDALocalFunction1 ad_lf) 272 { 273 DM_DA *dd = (DM_DA*)da->data; 274 PetscFunctionBegin; 275 PetscValidHeaderSpecific(da,DM_CLASSID,1); 276 dd->adic_lf = ad_lf; 277 PetscFunctionReturn(0); 278 } 279 280 /*MC 281 DMDASetLocalAdicFunctioni - Caches in a DM a local functioni computed by ADIC/ADIFOR 282 283 Synopsis: 284 PetscErrorCode DMDASetLocalAdicFunctioni(DM da,PetscInt (ad_lf*)(DMDALocalInfo*,MatStencil*,void*,void*,void*) 285 286 Logically Collective on DMDA 287 288 Input Parameter: 289 + da - initial distributed array 290 - ad_lfi - the local function as computed by ADIC/ADIFOR 291 292 Level: intermediate 293 294 .keywords: distributed array, refine 295 296 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(), 297 DMDASetLocalJacobian(), DMDASetLocalFunctioni() 298 M*/ 299 300 #undef __FUNCT__ 301 #define __FUNCT__ "DMDASetLocalAdicFunctioni_Private" 302 PetscErrorCode DMDASetLocalAdicFunctioni_Private(DM da,PetscErrorCode (*ad_lfi)(DMDALocalInfo*,MatStencil*,void*,void*,void*)) 303 { 304 DM_DA *dd = (DM_DA*)da->data; 305 PetscFunctionBegin; 306 PetscValidHeaderSpecific(da,DM_CLASSID,1); 307 dd->adic_lfi = ad_lfi; 308 PetscFunctionReturn(0); 309 } 310 311 /*MC 312 DMDASetLocalAdicMFFunctioni - Caches in a DM a local functioni computed by ADIC/ADIFOR 313 314 Synopsis: 315 PetscErrorCode DMDASetLocalAdicFunctioni(DM da,int (ad_lf*)(DMDALocalInfo*,MatStencil*,void*,void*,void*) 316 317 Logically Collective on DMDA 318 319 Input Parameter: 320 + da - initial distributed array 321 - admf_lfi - the local matrix-free function as computed by ADIC/ADIFOR 322 323 Level: intermediate 324 325 .keywords: distributed array, refine 326 327 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(), 328 DMDASetLocalJacobian(), DMDASetLocalFunctioni() 329 M*/ 330 331 #undef __FUNCT__ 332 #define __FUNCT__ "DMDASetLocalAdicMFFunctioni_Private" 333 PetscErrorCode DMDASetLocalAdicMFFunctioni_Private(DM da,PetscErrorCode (*admf_lfi)(DMDALocalInfo*,MatStencil*,void*,void*,void*)) 334 { 335 DM_DA *dd = (DM_DA*)da->data; 336 PetscFunctionBegin; 337 PetscValidHeaderSpecific(da,DM_CLASSID,1); 338 dd->adicmf_lfi = admf_lfi; 339 PetscFunctionReturn(0); 340 } 341 342 /*MC 343 DMDASetLocalAdicFunctionib - Caches in a DM a block local functioni computed by ADIC/ADIFOR 344 345 Synopsis: 346 PetscErrorCode DMDASetLocalAdicFunctionib(DM da,PetscInt (ad_lf*)(DMDALocalInfo*,MatStencil*,void*,void*,void*) 347 348 Logically Collective on DMDA 349 350 Input Parameter: 351 + da - initial distributed array 352 - ad_lfi - the local function as computed by ADIC/ADIFOR 353 354 Level: intermediate 355 356 .keywords: distributed array, refine 357 358 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(), 359 DMDASetLocalJacobian(), DMDASetLocalFunctionib() 360 M*/ 361 362 #undef __FUNCT__ 363 #define __FUNCT__ "DMDASetLocalAdicFunctionib_Private" 364 PetscErrorCode DMDASetLocalAdicFunctionib_Private(DM da,PetscErrorCode (*ad_lfi)(DMDALocalInfo*,MatStencil*,void*,void*,void*)) 365 { 366 DM_DA *dd = (DM_DA*)da->data; 367 PetscFunctionBegin; 368 PetscValidHeaderSpecific(da,DM_CLASSID,1); 369 dd->adic_lfib = ad_lfi; 370 PetscFunctionReturn(0); 371 } 372 373 /*MC 374 DMDASetLocalAdicMFFunctionib - Caches in a DM a block local functioni computed by ADIC/ADIFOR 375 376 Synopsis: 377 PetscErrorCode DMDASetLocalAdicFunctionib(DM da,int (ad_lf*)(DMDALocalInfo*,MatStencil*,void*,void*,void*) 378 379 Logically Collective on DMDA 380 381 Input Parameter: 382 + da - initial distributed array 383 - admf_lfi - the local matrix-free function as computed by ADIC/ADIFOR 384 385 Level: intermediate 386 387 .keywords: distributed array, refine 388 389 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(), 390 DMDASetLocalJacobian(), DMDASetLocalFunctionib() 391 M*/ 392 393 #undef __FUNCT__ 394 #define __FUNCT__ "DMDASetLocalAdicMFFunctionib_Private" 395 PetscErrorCode DMDASetLocalAdicMFFunctionib_Private(DM da,PetscErrorCode (*admf_lfi)(DMDALocalInfo*,MatStencil*,void*,void*,void*)) 396 { 397 DM_DA *dd = (DM_DA*)da->data; 398 PetscFunctionBegin; 399 PetscValidHeaderSpecific(da,DM_CLASSID,1); 400 dd->adicmf_lfib = admf_lfi; 401 PetscFunctionReturn(0); 402 } 403 404 /*MC 405 DMDASetLocalAdicMFFunction - Caches in a DM a local function computed by ADIC/ADIFOR 406 407 Synopsis: 408 PetscErrorCode DMDASetLocalAdicMFFunction(DM da,DMDALocalFunction1 ad_lf) 409 410 Logically Collective on DMDA 411 412 Input Parameter: 413 + da - initial distributed array 414 - ad_lf - the local function as computed by ADIC/ADIFOR 415 416 Level: intermediate 417 418 .keywords: distributed array, refine 419 420 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(), 421 DMDASetLocalJacobian() 422 M*/ 423 424 #undef __FUNCT__ 425 #define __FUNCT__ "DMDASetLocalAdicMFFunction_Private" 426 PetscErrorCode DMDASetLocalAdicMFFunction_Private(DM da,DMDALocalFunction1 ad_lf) 427 { 428 DM_DA *dd = (DM_DA*)da->data; 429 PetscFunctionBegin; 430 PetscValidHeaderSpecific(da,DM_CLASSID,1); 431 dd->adicmf_lf = ad_lf; 432 PetscFunctionReturn(0); 433 } 434 435 #undef __FUNCT__ 436 #define __FUNCT__ "DMDAJacobian" 437 static PetscErrorCode DMDAJacobian(DM dm,Vec x,Mat A,Mat B, MatStructure *str) 438 { 439 PetscErrorCode ierr; 440 Vec localX; 441 442 PetscFunctionBegin; 443 ierr = DMGetLocalVector(dm,&localX);CHKERRQ(ierr); 444 ierr = DMGlobalToLocalBegin(dm,x,INSERT_VALUES,localX);CHKERRQ(ierr); 445 ierr = DMGlobalToLocalEnd(dm,x,INSERT_VALUES,localX);CHKERRQ(ierr); 446 ierr = DMDAComputeJacobian1(dm,localX,B,dm->ctx);CHKERRQ(ierr); 447 ierr = DMRestoreLocalVector(dm,&localX);CHKERRQ(ierr); 448 /* Assemble true Jacobian; if it is different */ 449 if (A != B) { 450 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 451 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 452 } 453 ierr = MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr); 454 *str = SAME_NONZERO_PATTERN; 455 PetscFunctionReturn(0); 456 } 457 458 /*@C 459 DMDASetLocalJacobian - Caches in a DM a local Jacobian computation function 460 461 Logically Collective on DMDA 462 463 464 Input Parameter: 465 + da - initial distributed array 466 - lj - the local Jacobian 467 468 Level: intermediate 469 470 Notes: The routine SNESDAFormFunction() uses this the cached function to evaluate the user provided function. 471 472 .keywords: distributed array, refine 473 474 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction() 475 @*/ 476 #undef __FUNCT__ 477 #define __FUNCT__ "DMDASetLocalJacobian" 478 PetscErrorCode DMDASetLocalJacobian(DM da,DMDALocalFunction1 lj) 479 { 480 PetscErrorCode ierr; 481 DM_DA *dd = (DM_DA*)da->data; 482 483 PetscFunctionBegin; 484 PetscValidHeaderSpecific(da,DM_CLASSID,1); 485 ierr = DMSetJacobian(da,DMDAJacobian);CHKERRQ(ierr); 486 dd->lj = lj; 487 PetscFunctionReturn(0); 488 } 489 490 #undef __FUNCT__ 491 #define __FUNCT__ "DMDAGetLocalFunction" 492 /*@C 493 DMDAGetLocalFunction - Gets from a DM a local function and its ADIC/ADIFOR Jacobian 494 495 Note Collective 496 497 Input Parameter: 498 . da - initial distributed array 499 500 Output Parameter: 501 . lf - the local function 502 503 Level: intermediate 504 505 .keywords: distributed array, refine 506 507 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalJacobian(), DMDASetLocalFunction() 508 @*/ 509 PetscErrorCode DMDAGetLocalFunction(DM da,DMDALocalFunction1 *lf) 510 { 511 DM_DA *dd = (DM_DA*)da->data; 512 PetscFunctionBegin; 513 PetscValidHeaderSpecific(da,DM_CLASSID,1); 514 if (lf) *lf = dd->lf; 515 PetscFunctionReturn(0); 516 } 517 518 #undef __FUNCT__ 519 #define __FUNCT__ "DMDAGetLocalJacobian" 520 /*@C 521 DMDAGetLocalJacobian - Gets from a DM a local jacobian 522 523 Not Collective 524 525 Input Parameter: 526 . da - initial distributed array 527 528 Output Parameter: 529 . lj - the local jacobian 530 531 Level: intermediate 532 533 .keywords: distributed array, refine 534 535 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalJacobian() 536 @*/ 537 PetscErrorCode DMDAGetLocalJacobian(DM da,DMDALocalFunction1 *lj) 538 { 539 DM_DA *dd = (DM_DA*)da->data; 540 PetscFunctionBegin; 541 PetscValidHeaderSpecific(da,DM_CLASSID,1); 542 if (lj) *lj = dd->lj; 543 PetscFunctionReturn(0); 544 } 545 546 #undef __FUNCT__ 547 #define __FUNCT__ "DMDAFormFunction" 548 /*@ 549 DMDAFormFunction - Evaluates a user provided function on each processor that 550 share a DMDA 551 552 Input Parameters: 553 + da - the DM that defines the grid 554 . vu - input vector 555 . vfu - output vector 556 - w - any user data 557 558 Notes: Does NOT do ghost updates on vu upon entry 559 560 This should eventually replace DMDAFormFunction1 561 562 Level: advanced 563 564 .seealso: DMDAComputeJacobian1WithAdic() 565 566 @*/ 567 PetscErrorCode DMDAFormFunction(DM da,PetscErrorCode (*lf)(void),Vec vu,Vec vfu,void *w) 568 { 569 PetscErrorCode ierr; 570 void *u,*fu; 571 DMDALocalInfo info; 572 PetscErrorCode (*f)(DMDALocalInfo*,void*,void*,void*) = (PetscErrorCode (*)(DMDALocalInfo*,void*,void*,void*))lf; 573 574 PetscFunctionBegin; 575 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 576 ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr); 577 ierr = DMDAVecGetArray(da,vfu,&fu);CHKERRQ(ierr); 578 579 ierr = (*f)(&info,u,fu,w);CHKERRQ(ierr); 580 581 ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr); 582 ierr = DMDAVecRestoreArray(da,vfu,&fu);CHKERRQ(ierr); 583 PetscFunctionReturn(0); 584 } 585 586 #undef __FUNCT__ 587 #define __FUNCT__ "DMDAFormFunctionLocal" 588 /*@C 589 DMDAFormFunctionLocal - This is a universal function evaluation routine for 590 a local DM function. 591 592 Collective on DMDA 593 594 Input Parameters: 595 + da - the DM context 596 . func - The local function 597 . X - input vector 598 . F - function vector 599 - ctx - A user context 600 601 Level: intermediate 602 603 .seealso: DMDASetLocalFunction(), DMDASetLocalJacobian(), DMDASetLocalAdicFunction(), DMDASetLocalAdicMFFunction(), 604 SNESSetFunction(), SNESSetJacobian() 605 606 @*/ 607 PetscErrorCode DMDAFormFunctionLocal(DM da, DMDALocalFunction1 func, Vec X, Vec F, void *ctx) 608 { 609 Vec localX; 610 DMDALocalInfo info; 611 void *u; 612 void *fu; 613 PetscErrorCode ierr; 614 615 PetscFunctionBegin; 616 ierr = DMGetLocalVector(da,&localX);CHKERRQ(ierr); 617 /* 618 Scatter ghost points to local vector, using the 2-step process 619 DMGlobalToLocalBegin(), DMGlobalToLocalEnd(). 620 */ 621 ierr = DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);CHKERRQ(ierr); 622 ierr = DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);CHKERRQ(ierr); 623 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 624 ierr = DMDAVecGetArray(da,localX,&u);CHKERRQ(ierr); 625 ierr = DMDAVecGetArray(da,F,&fu);CHKERRQ(ierr); 626 ierr = (*func)(&info,u,fu,ctx);CHKERRQ(ierr); 627 ierr = DMDAVecRestoreArray(da,localX,&u);CHKERRQ(ierr); 628 ierr = DMDAVecRestoreArray(da,F,&fu);CHKERRQ(ierr); 629 ierr = DMRestoreLocalVector(da,&localX);CHKERRQ(ierr); 630 PetscFunctionReturn(0); 631 } 632 633 #undef __FUNCT__ 634 #define __FUNCT__ "DMDAFormFunctionLocalGhost" 635 /*@C 636 DMDAFormFunctionLocalGhost - This is a universal function evaluation routine for 637 a local DM function, but the ghost values of the output are communicated and added. 638 639 Collective on DMDA 640 641 Input Parameters: 642 + da - the DM context 643 . func - The local function 644 . X - input vector 645 . F - function vector 646 - ctx - A user context 647 648 Level: intermediate 649 650 .seealso: DMDASetLocalFunction(), DMDASetLocalJacobian(), DMDASetLocalAdicFunction(), DMDASetLocalAdicMFFunction(), 651 SNESSetFunction(), SNESSetJacobian() 652 653 @*/ 654 PetscErrorCode DMDAFormFunctionLocalGhost(DM da, DMDALocalFunction1 func, Vec X, Vec F, void *ctx) 655 { 656 Vec localX, localF; 657 DMDALocalInfo info; 658 void *u; 659 void *fu; 660 PetscErrorCode ierr; 661 662 PetscFunctionBegin; 663 ierr = DMGetLocalVector(da,&localX);CHKERRQ(ierr); 664 ierr = DMGetLocalVector(da,&localF);CHKERRQ(ierr); 665 /* 666 Scatter ghost points to local vector, using the 2-step process 667 DMGlobalToLocalBegin(), DMGlobalToLocalEnd(). 668 */ 669 ierr = DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);CHKERRQ(ierr); 670 ierr = DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);CHKERRQ(ierr); 671 ierr = VecSet(F, 0.0);CHKERRQ(ierr); 672 ierr = VecSet(localF, 0.0);CHKERRQ(ierr); 673 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 674 ierr = DMDAVecGetArray(da,localX,&u);CHKERRQ(ierr); 675 ierr = DMDAVecGetArray(da,localF,&fu);CHKERRQ(ierr); 676 ierr = (*func)(&info,u,fu,ctx);CHKERRQ(ierr); 677 ierr = DMLocalToGlobalBegin(da,localF,ADD_VALUES,F);CHKERRQ(ierr); 678 ierr = DMLocalToGlobalEnd(da,localF,ADD_VALUES,F);CHKERRQ(ierr); 679 ierr = DMDAVecRestoreArray(da,localX,&u);CHKERRQ(ierr); 680 ierr = DMDAVecRestoreArray(da,localF,&fu);CHKERRQ(ierr); 681 ierr = DMRestoreLocalVector(da,&localX);CHKERRQ(ierr); 682 ierr = DMRestoreLocalVector(da,&localF);CHKERRQ(ierr); 683 PetscFunctionReturn(0); 684 } 685 686 #undef __FUNCT__ 687 #define __FUNCT__ "DMDAFormFunction1" 688 /*@ 689 DMDAFormFunction1 - Evaluates a user provided function on each processor that 690 share a DMDA 691 692 Input Parameters: 693 + da - the DM that defines the grid 694 . vu - input vector 695 . vfu - output vector 696 - w - any user data 697 698 Notes: Does NOT do ghost updates on vu upon entry 699 700 Level: advanced 701 702 .seealso: DMDAComputeJacobian1WithAdic() 703 704 @*/ 705 PetscErrorCode DMDAFormFunction1(DM da,Vec vu,Vec vfu,void *w) 706 { 707 PetscErrorCode ierr; 708 void *u,*fu; 709 DMDALocalInfo info; 710 DM_DA *dd = (DM_DA*)da->data; 711 712 PetscFunctionBegin; 713 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 714 ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr); 715 ierr = DMDAVecGetArray(da,vfu,&fu);CHKERRQ(ierr); 716 717 CHKMEMQ; 718 ierr = (*dd->lf)(&info,u,fu,w);CHKERRQ(ierr); 719 CHKMEMQ; 720 721 ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr); 722 ierr = DMDAVecRestoreArray(da,vfu,&fu);CHKERRQ(ierr); 723 PetscFunctionReturn(0); 724 } 725 726 #undef __FUNCT__ 727 #define __FUNCT__ "DMDAFormFunctioniTest1" 728 PetscErrorCode DMDAFormFunctioniTest1(DM da,void *w) 729 { 730 Vec vu,fu,fui; 731 PetscErrorCode ierr; 732 PetscInt i,n; 733 PetscScalar *ui; 734 PetscRandom rnd; 735 PetscReal norm; 736 737 PetscFunctionBegin; 738 ierr = DMGetLocalVector(da,&vu);CHKERRQ(ierr); 739 ierr = PetscRandomCreate(PETSC_COMM_SELF,&rnd);CHKERRQ(ierr); 740 ierr = PetscRandomSetFromOptions(rnd);CHKERRQ(ierr); 741 ierr = VecSetRandom(vu,rnd);CHKERRQ(ierr); 742 ierr = PetscRandomDestroy(&rnd);CHKERRQ(ierr); 743 744 ierr = DMGetGlobalVector(da,&fu);CHKERRQ(ierr); 745 ierr = DMGetGlobalVector(da,&fui);CHKERRQ(ierr); 746 747 ierr = DMDAFormFunction1(da,vu,fu,w);CHKERRQ(ierr); 748 749 ierr = VecGetArray(fui,&ui);CHKERRQ(ierr); 750 ierr = VecGetLocalSize(fui,&n);CHKERRQ(ierr); 751 for (i=0; i<n; i++) { 752 ierr = DMDAFormFunctioni1(da,i,vu,ui+i,w);CHKERRQ(ierr); 753 } 754 ierr = VecRestoreArray(fui,&ui);CHKERRQ(ierr); 755 756 ierr = VecAXPY(fui,-1.0,fu);CHKERRQ(ierr); 757 ierr = VecNorm(fui,NORM_2,&norm);CHKERRQ(ierr); 758 ierr = PetscPrintf(((PetscObject)da)->comm,"Norm of difference in vectors %G\n",norm);CHKERRQ(ierr); 759 ierr = VecView(fu,0);CHKERRQ(ierr); 760 ierr = VecView(fui,0);CHKERRQ(ierr); 761 762 ierr = DMRestoreLocalVector(da,&vu);CHKERRQ(ierr); 763 ierr = DMRestoreGlobalVector(da,&fu);CHKERRQ(ierr); 764 ierr = DMRestoreGlobalVector(da,&fui);CHKERRQ(ierr); 765 PetscFunctionReturn(0); 766 } 767 768 #undef __FUNCT__ 769 #define __FUNCT__ "DMDAFormFunctioni1" 770 /*@ 771 DMDAFormFunctioni1 - Evaluates a user provided point-wise function 772 773 Input Parameters: 774 + da - the DM that defines the grid 775 . i - the component of the function we wish to compute (must be local) 776 . vu - input vector 777 . vfu - output value 778 - w - any user data 779 780 Notes: Does NOT do ghost updates on vu upon entry 781 782 Level: advanced 783 784 .seealso: DMDAComputeJacobian1WithAdic() 785 786 @*/ 787 PetscErrorCode DMDAFormFunctioni1(DM da,PetscInt i,Vec vu,PetscScalar *vfu,void *w) 788 { 789 PetscErrorCode ierr; 790 void *u; 791 DMDALocalInfo info; 792 MatStencil stencil; 793 DM_DA *dd = (DM_DA*)da->data; 794 795 PetscFunctionBegin; 796 797 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 798 ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr); 799 800 /* figure out stencil value from i */ 801 stencil.c = i % info.dof; 802 stencil.i = (i % (info.xm*info.dof))/info.dof; 803 stencil.j = (i % (info.xm*info.ym*info.dof))/(info.xm*info.dof); 804 stencil.k = i/(info.xm*info.ym*info.dof); 805 806 ierr = (*dd->lfi)(&info,&stencil,u,vfu,w);CHKERRQ(ierr); 807 808 ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr); 809 PetscFunctionReturn(0); 810 } 811 812 #undef __FUNCT__ 813 #define __FUNCT__ "DMDAFormFunctionib1" 814 /*@ 815 DMDAFormFunctionib1 - Evaluates a user provided point-block function 816 817 Input Parameters: 818 + da - the DM that defines the grid 819 . i - the component of the function we wish to compute (must be local) 820 . vu - input vector 821 . vfu - output value 822 - w - any user data 823 824 Notes: Does NOT do ghost updates on vu upon entry 825 826 Level: advanced 827 828 .seealso: DMDAComputeJacobian1WithAdic() 829 830 @*/ 831 PetscErrorCode DMDAFormFunctionib1(DM da,PetscInt i,Vec vu,PetscScalar *vfu,void *w) 832 { 833 PetscErrorCode ierr; 834 void *u; 835 DMDALocalInfo info; 836 MatStencil stencil; 837 DM_DA *dd = (DM_DA*)da->data; 838 839 PetscFunctionBegin; 840 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 841 ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr); 842 843 /* figure out stencil value from i */ 844 stencil.c = i % info.dof; 845 if (stencil.c) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_ARG_WRONG,"Point-block functions can only be called for the entire block"); 846 stencil.i = (i % (info.xm*info.dof))/info.dof; 847 stencil.j = (i % (info.xm*info.ym*info.dof))/(info.xm*info.dof); 848 stencil.k = i/(info.xm*info.ym*info.dof); 849 850 ierr = (*dd->lfib)(&info,&stencil,u,vfu,w);CHKERRQ(ierr); 851 852 ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr); 853 PetscFunctionReturn(0); 854 } 855 856 #if defined(new) 857 #undef __FUNCT__ 858 #define __FUNCT__ "DMDAGetDiagonal_MFFD" 859 /* 860 DMDAGetDiagonal_MFFD - Gets the diagonal for a matrix free matrix where local 861 function lives on a DMDA 862 863 y ~= (F(u + ha) - F(u))/h, 864 where F = nonlinear function, as set by SNESSetFunction() 865 u = current iterate 866 h = difference interval 867 */ 868 PetscErrorCode DMDAGetDiagonal_MFFD(DM da,Vec U,Vec a) 869 { 870 PetscScalar h,*aa,*ww,v; 871 PetscReal epsilon = PETSC_SQRT_MACHINE_EPSILON,umin = 100.0*PETSC_SQRT_MACHINE_EPSILON; 872 PetscErrorCode ierr; 873 PetscInt gI,nI; 874 MatStencil stencil; 875 DMDALocalInfo info; 876 877 PetscFunctionBegin; 878 ierr = (*ctx->func)(0,U,a,ctx->funcctx);CHKERRQ(ierr); 879 ierr = (*ctx->funcisetbase)(U,ctx->funcctx);CHKERRQ(ierr); 880 881 ierr = VecGetArray(U,&ww);CHKERRQ(ierr); 882 ierr = VecGetArray(a,&aa);CHKERRQ(ierr); 883 884 nI = 0; 885 h = ww[gI]; 886 if (h == 0.0) h = 1.0; 887 #if !defined(PETSC_USE_COMPLEX) 888 if (h < umin && h >= 0.0) h = umin; 889 else if (h < 0.0 && h > -umin) h = -umin; 890 #else 891 if (PetscAbsScalar(h) < umin && PetscRealPart(h) >= 0.0) h = umin; 892 else if (PetscRealPart(h) < 0.0 && PetscAbsScalar(h) < umin) h = -umin; 893 #endif 894 h *= epsilon; 895 896 ww[gI] += h; 897 ierr = (*ctx->funci)(i,w,&v,ctx->funcctx);CHKERRQ(ierr); 898 aa[nI] = (v - aa[nI])/h; 899 ww[gI] -= h; 900 nI++; 901 } 902 ierr = VecRestoreArray(U,&ww);CHKERRQ(ierr); 903 ierr = VecRestoreArray(a,&aa);CHKERRQ(ierr); 904 PetscFunctionReturn(0); 905 } 906 #endif 907 908 #if defined(PETSC_HAVE_ADIC) 909 EXTERN_C_BEGIN 910 #include <adic/ad_utils.h> 911 EXTERN_C_END 912 913 #undef __FUNCT__ 914 #define __FUNCT__ "DMDAComputeJacobian1WithAdic" 915 /*@C 916 DMDAComputeJacobian1WithAdic - Evaluates a adiC provided Jacobian function on each processor that 917 share a DMDA 918 919 Input Parameters: 920 + da - the DM that defines the grid 921 . vu - input vector (ghosted) 922 . J - output matrix 923 - w - any user data 924 925 Level: advanced 926 927 Notes: Does NOT do ghost updates on vu upon entry 928 929 .seealso: DMDAFormFunction1() 930 931 @*/ 932 PetscErrorCode DMDAComputeJacobian1WithAdic(DM da,Vec vu,Mat J,void *w) 933 { 934 PetscErrorCode ierr; 935 PetscInt gtdof,tdof; 936 PetscScalar *ustart; 937 DMDALocalInfo info; 938 void *ad_u,*ad_f,*ad_ustart,*ad_fstart; 939 ISColoring iscoloring; 940 941 PetscFunctionBegin; 942 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 943 944 PetscADResetIndep(); 945 946 /* get space for derivative objects. */ 947 ierr = DMDAGetAdicArray(da,PETSC_TRUE,&ad_u,&ad_ustart,>dof);CHKERRQ(ierr); 948 ierr = DMDAGetAdicArray(da,PETSC_FALSE,&ad_f,&ad_fstart,&tdof);CHKERRQ(ierr); 949 ierr = VecGetArray(vu,&ustart);CHKERRQ(ierr); 950 ierr = DMGetColoring(da,IS_COLORING_GHOSTED,MATAIJ,&iscoloring);CHKERRQ(ierr); 951 952 PetscADSetValueAndColor(ad_ustart,gtdof,iscoloring->colors,ustart); 953 954 ierr = VecRestoreArray(vu,&ustart);CHKERRQ(ierr); 955 ierr = ISColoringDestroy(&iscoloring);CHKERRQ(ierr); 956 ierr = PetscADIncrementTotalGradSize(iscoloring->n);CHKERRQ(ierr); 957 PetscADSetIndepDone(); 958 959 ierr = PetscLogEventBegin(DMDA_LocalADFunction,0,0,0,0);CHKERRQ(ierr); 960 ierr = (*dd->adic_lf)(&info,ad_u,ad_f,w);CHKERRQ(ierr); 961 ierr = PetscLogEventEnd(DMDA_LocalADFunction,0,0,0,0);CHKERRQ(ierr); 962 963 /* stick the values into the matrix */ 964 ierr = MatSetValuesAdic(J,(PetscScalar**)ad_fstart);CHKERRQ(ierr); 965 966 /* return space for derivative objects. */ 967 ierr = DMDARestoreAdicArray(da,PETSC_TRUE,&ad_u,&ad_ustart,>dof);CHKERRQ(ierr); 968 ierr = DMDARestoreAdicArray(da,PETSC_FALSE,&ad_f,&ad_fstart,&tdof);CHKERRQ(ierr); 969 PetscFunctionReturn(0); 970 } 971 972 #undef __FUNCT__ 973 #define __FUNCT__ "DMDAMultiplyByJacobian1WithAdic" 974 /*@C 975 DMDAMultiplyByJacobian1WithAdic - Applies an ADIC-provided Jacobian function to a vector on 976 each processor that shares a DMDA. 977 978 Input Parameters: 979 + da - the DM that defines the grid 980 . vu - Jacobian is computed at this point (ghosted) 981 . v - product is done on this vector (ghosted) 982 . fu - output vector = J(vu)*v (not ghosted) 983 - w - any user data 984 985 Notes: 986 This routine does NOT do ghost updates on vu upon entry. 987 988 Level: advanced 989 990 .seealso: DMDAFormFunction1() 991 992 @*/ 993 PetscErrorCode DMDAMultiplyByJacobian1WithAdic(DM da,Vec vu,Vec v,Vec f,void *w) 994 { 995 PetscErrorCode ierr; 996 PetscInt i,gtdof,tdof; 997 PetscScalar *avu,*av,*af,*ad_vustart,*ad_fstart; 998 DMDALocalInfo info; 999 void *ad_vu,*ad_f; 1000 1001 PetscFunctionBegin; 1002 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 1003 1004 /* get space for derivative objects. */ 1005 ierr = DMDAGetAdicMFArray(da,PETSC_TRUE,&ad_vu,&ad_vustart,>dof);CHKERRQ(ierr); 1006 ierr = DMDAGetAdicMFArray(da,PETSC_FALSE,&ad_f,&ad_fstart,&tdof);CHKERRQ(ierr); 1007 1008 /* copy input vector into derivative object */ 1009 ierr = VecGetArray(vu,&avu);CHKERRQ(ierr); 1010 ierr = VecGetArray(v,&av);CHKERRQ(ierr); 1011 for (i=0; i<gtdof; i++) { 1012 ad_vustart[2*i] = avu[i]; 1013 ad_vustart[2*i+1] = av[i]; 1014 } 1015 ierr = VecRestoreArray(vu,&avu);CHKERRQ(ierr); 1016 ierr = VecRestoreArray(v,&av);CHKERRQ(ierr); 1017 1018 PetscADResetIndep(); 1019 ierr = PetscADIncrementTotalGradSize(1);CHKERRQ(ierr); 1020 PetscADSetIndepDone(); 1021 1022 ierr = (*dd->adicmf_lf)(&info,ad_vu,ad_f,w);CHKERRQ(ierr); 1023 1024 /* stick the values into the vector */ 1025 ierr = VecGetArray(f,&af);CHKERRQ(ierr); 1026 for (i=0; i<tdof; i++) { 1027 af[i] = ad_fstart[2*i+1]; 1028 } 1029 ierr = VecRestoreArray(f,&af);CHKERRQ(ierr); 1030 1031 /* return space for derivative objects. */ 1032 ierr = DMDARestoreAdicMFArray(da,PETSC_TRUE,&ad_vu,&ad_vustart,>dof);CHKERRQ(ierr); 1033 ierr = DMDARestoreAdicMFArray(da,PETSC_FALSE,&ad_f,&ad_fstart,&tdof);CHKERRQ(ierr); 1034 PetscFunctionReturn(0); 1035 } 1036 #endif 1037 1038 #undef __FUNCT__ 1039 #define __FUNCT__ "DMDAComputeJacobian1" 1040 /*@ 1041 DMDAComputeJacobian1 - Evaluates a local Jacobian function on each processor that 1042 share a DMDA 1043 1044 Input Parameters: 1045 + da - the DM that defines the grid 1046 . vu - input vector (ghosted) 1047 . J - output matrix 1048 - w - any user data 1049 1050 Notes: Does NOT do ghost updates on vu upon entry 1051 1052 Level: advanced 1053 1054 .seealso: DMDAFormFunction1() 1055 1056 @*/ 1057 PetscErrorCode DMDAComputeJacobian1(DM da,Vec vu,Mat J,void *w) 1058 { 1059 PetscErrorCode ierr; 1060 void *u; 1061 DMDALocalInfo info; 1062 DM_DA *dd = (DM_DA*)da->data; 1063 1064 PetscFunctionBegin; 1065 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 1066 ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr); 1067 ierr = (*dd->lj)(&info,u,J,w);CHKERRQ(ierr); 1068 ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr); 1069 PetscFunctionReturn(0); 1070 } 1071 1072 1073 #undef __FUNCT__ 1074 #define __FUNCT__ "DMDAComputeJacobian1WithAdifor" 1075 /* 1076 DMDAComputeJacobian1WithAdifor - Evaluates a ADIFOR provided Jacobian local function on each processor that 1077 share a DMDA 1078 1079 Input Parameters: 1080 + da - the DM that defines the grid 1081 . vu - input vector (ghosted) 1082 . J - output matrix 1083 - w - any user data 1084 1085 Notes: Does NOT do ghost updates on vu upon entry 1086 1087 .seealso: DMDAFormFunction1() 1088 1089 */ 1090 PetscErrorCode DMDAComputeJacobian1WithAdifor(DM da,Vec vu,Mat J,void *w) 1091 { 1092 PetscErrorCode ierr; 1093 PetscInt i,Nc,N; 1094 ISColoringValue *color; 1095 DMDALocalInfo info; 1096 PetscScalar *u,*g_u,*g_f,*f = 0,*p_u; 1097 ISColoring iscoloring; 1098 DM_DA *dd = (DM_DA*)da->data; 1099 void (*lf)(PetscInt*,DMDALocalInfo*,PetscScalar*,PetscScalar*,PetscInt*,PetscScalar*,PetscScalar*,PetscInt*,void*,PetscErrorCode*) = 1100 (void (*)(PetscInt*,DMDALocalInfo*,PetscScalar*,PetscScalar*,PetscInt*,PetscScalar*,PetscScalar*,PetscInt*,void*,PetscErrorCode*))*dd->adifor_lf; 1101 1102 PetscFunctionBegin; 1103 ierr = DMGetColoring(da,IS_COLORING_GHOSTED,MATAIJ,&iscoloring);CHKERRQ(ierr); 1104 Nc = iscoloring->n; 1105 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 1106 N = info.gxm*info.gym*info.gzm*info.dof; 1107 1108 /* get space for derivative objects. */ 1109 ierr = PetscMalloc(Nc*info.gxm*info.gym*info.gzm*info.dof*sizeof(PetscScalar),&g_u);CHKERRQ(ierr); 1110 ierr = PetscMemzero(g_u,Nc*info.gxm*info.gym*info.gzm*info.dof*sizeof(PetscScalar));CHKERRQ(ierr); 1111 p_u = g_u; 1112 color = iscoloring->colors; 1113 for (i=0; i<N; i++) { 1114 p_u[*color++] = 1.0; 1115 p_u += Nc; 1116 } 1117 ierr = ISColoringDestroy(&iscoloring);CHKERRQ(ierr); 1118 ierr = PetscMalloc2(Nc*info.xm*info.ym*info.zm*info.dof,PetscScalar,&g_f,info.xm*info.ym*info.zm*info.dof,PetscScalar,&f);CHKERRQ(ierr); 1119 1120 /* Seed the input array g_u with coloring information */ 1121 1122 ierr = VecGetArray(vu,&u);CHKERRQ(ierr); 1123 (lf)(&Nc,&info,u,g_u,&Nc,f,g_f,&Nc,w,&ierr);CHKERRQ(ierr); 1124 ierr = VecRestoreArray(vu,&u);CHKERRQ(ierr); 1125 1126 /* stick the values into the matrix */ 1127 /* PetscScalarView(Nc*info.xm*info.ym,g_f,0); */ 1128 ierr = MatSetValuesAdifor(J,Nc,g_f);CHKERRQ(ierr); 1129 1130 /* return space for derivative objects. */ 1131 ierr = PetscFree(g_u);CHKERRQ(ierr); 1132 ierr = PetscFree2(g_f,f);CHKERRQ(ierr); 1133 PetscFunctionReturn(0); 1134 } 1135 1136 #undef __FUNCT__ 1137 #define __FUNCT__ "DMDAFormJacobianLocal" 1138 /*@C 1139 DMDAFormjacobianLocal - This is a universal Jacobian evaluation routine for 1140 a local DM function. 1141 1142 Collective on DMDA 1143 1144 Input Parameters: 1145 + da - the DM context 1146 . func - The local function 1147 . X - input vector 1148 . J - Jacobian matrix 1149 - ctx - A user context 1150 1151 Level: intermediate 1152 1153 .seealso: DMDASetLocalFunction(), DMDASetLocalJacobian(), DMDASetLocalAdicFunction(), DMDASetLocalAdicMFFunction(), 1154 SNESSetFunction(), SNESSetJacobian() 1155 1156 @*/ 1157 PetscErrorCode DMDAFormJacobianLocal(DM da, DMDALocalFunction1 func, Vec X, Mat J, void *ctx) 1158 { 1159 Vec localX; 1160 DMDALocalInfo info; 1161 void *u; 1162 PetscErrorCode ierr; 1163 1164 PetscFunctionBegin; 1165 ierr = DMGetLocalVector(da,&localX);CHKERRQ(ierr); 1166 /* 1167 Scatter ghost points to local vector, using the 2-step process 1168 DMGlobalToLocalBegin(), DMGlobalToLocalEnd(). 1169 */ 1170 ierr = DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);CHKERRQ(ierr); 1171 ierr = DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);CHKERRQ(ierr); 1172 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 1173 ierr = DMDAVecGetArray(da,localX,&u);CHKERRQ(ierr); 1174 ierr = (*func)(&info,u,J,ctx);CHKERRQ(ierr); 1175 ierr = DMDAVecRestoreArray(da,localX,&u);CHKERRQ(ierr); 1176 ierr = DMRestoreLocalVector(da,&localX);CHKERRQ(ierr); 1177 PetscFunctionReturn(0); 1178 } 1179 1180 #undef __FUNCT__ 1181 #define __FUNCT__ "DMDAMultiplyByJacobian1WithAD" 1182 /*@C 1183 DMDAMultiplyByJacobian1WithAD - Applies a Jacobian function supplied by ADIFOR or ADIC 1184 to a vector on each processor that shares a DMDA. 1185 1186 Input Parameters: 1187 + da - the DM that defines the grid 1188 . vu - Jacobian is computed at this point (ghosted) 1189 . v - product is done on this vector (ghosted) 1190 . fu - output vector = J(vu)*v (not ghosted) 1191 - w - any user data 1192 1193 Notes: 1194 This routine does NOT do ghost updates on vu and v upon entry. 1195 1196 Automatically calls DMDAMultiplyByJacobian1WithAdifor() or DMDAMultiplyByJacobian1WithAdic() 1197 depending on whether DMDASetLocalAdicMFFunction() or DMDASetLocalAdiforMFFunction() was called. 1198 1199 Level: advanced 1200 1201 .seealso: DMDAFormFunction1(), DMDAMultiplyByJacobian1WithAdifor(), DMDAMultiplyByJacobian1WithAdic() 1202 1203 @*/ 1204 PetscErrorCode DMDAMultiplyByJacobian1WithAD(DM da,Vec u,Vec v,Vec f,void *w) 1205 { 1206 PetscErrorCode ierr; 1207 DM_DA *dd = (DM_DA*)da->data; 1208 1209 PetscFunctionBegin; 1210 if (dd->adicmf_lf) { 1211 #if defined(PETSC_HAVE_ADIC) 1212 ierr = DMDAMultiplyByJacobian1WithAdic(da,u,v,f,w);CHKERRQ(ierr); 1213 #else 1214 SETERRQ(((PetscObject)da)->comm,PETSC_ERR_SUP_SYS,"Requires ADIC to be installed and cannot use complex numbers"); 1215 #endif 1216 } else if (dd->adiformf_lf) { 1217 ierr = DMDAMultiplyByJacobian1WithAdifor(da,u,v,f,w);CHKERRQ(ierr); 1218 } else { 1219 SETERRQ(((PetscObject)da)->comm,PETSC_ERR_ORDER,"Must call DMDASetLocalAdiforMFFunction() or DMDASetLocalAdicMFFunction() before using"); 1220 } 1221 PetscFunctionReturn(0); 1222 } 1223 1224 1225 #undef __FUNCT__ 1226 #define __FUNCT__ "DMDAMultiplyByJacobian1WithAdifor" 1227 /*@C 1228 DMDAMultiplyByJacobian1WithAdifor - Applies a ADIFOR provided Jacobian function on each processor that 1229 share a DM to a vector 1230 1231 Input Parameters: 1232 + da - the DM that defines the grid 1233 . vu - Jacobian is computed at this point (ghosted) 1234 . v - product is done on this vector (ghosted) 1235 . fu - output vector = J(vu)*v (not ghosted) 1236 - w - any user data 1237 1238 Notes: Does NOT do ghost updates on vu and v upon entry 1239 1240 Level: advanced 1241 1242 .seealso: DMDAFormFunction1() 1243 1244 @*/ 1245 PetscErrorCode DMDAMultiplyByJacobian1WithAdifor(DM da,Vec u,Vec v,Vec f,void *w) 1246 { 1247 PetscErrorCode ierr; 1248 PetscScalar *au,*av,*af,*awork; 1249 Vec work; 1250 DMDALocalInfo info; 1251 DM_DA *dd = (DM_DA*)da->data; 1252 void (*lf)(DMDALocalInfo*,PetscScalar*,PetscScalar*,PetscScalar*,PetscScalar*,void*,PetscErrorCode*) = 1253 (void (*)(DMDALocalInfo*,PetscScalar*,PetscScalar*,PetscScalar*,PetscScalar*,void*,PetscErrorCode*))*dd->adiformf_lf; 1254 1255 PetscFunctionBegin; 1256 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 1257 1258 ierr = DMGetGlobalVector(da,&work);CHKERRQ(ierr); 1259 ierr = VecGetArray(u,&au);CHKERRQ(ierr); 1260 ierr = VecGetArray(v,&av);CHKERRQ(ierr); 1261 ierr = VecGetArray(f,&af);CHKERRQ(ierr); 1262 ierr = VecGetArray(work,&awork);CHKERRQ(ierr); 1263 (lf)(&info,au,av,awork,af,w,&ierr);CHKERRQ(ierr); 1264 ierr = VecRestoreArray(u,&au);CHKERRQ(ierr); 1265 ierr = VecRestoreArray(v,&av);CHKERRQ(ierr); 1266 ierr = VecRestoreArray(f,&af);CHKERRQ(ierr); 1267 ierr = VecRestoreArray(work,&awork);CHKERRQ(ierr); 1268 ierr = DMRestoreGlobalVector(da,&work);CHKERRQ(ierr); 1269 1270 PetscFunctionReturn(0); 1271 } 1272 1273 #undef __FUNCT__ 1274 #define __FUNCT__ "DMSetUp_DA_2D" 1275 PetscErrorCode DMSetUp_DA_2D(DM da) 1276 { 1277 DM_DA *dd = (DM_DA*)da->data; 1278 const PetscInt M = dd->M; 1279 const PetscInt N = dd->N; 1280 PetscInt m = dd->m; 1281 PetscInt n = dd->n; 1282 const PetscInt dof = dd->w; 1283 const PetscInt s = dd->s; 1284 const DMDABoundaryType bx = dd->bx; 1285 const DMDABoundaryType by = dd->by; 1286 const DMDAStencilType stencil_type = dd->stencil_type; 1287 PetscInt *lx = dd->lx; 1288 PetscInt *ly = dd->ly; 1289 MPI_Comm comm; 1290 PetscMPIInt rank,size; 1291 PetscInt xs,xe,ys,ye,x,y,Xs,Xe,Ys,Ye,start,end,IXs,IXe,IYs,IYe; 1292 PetscInt up,down,left,right,i,n0,n1,n2,n3,n5,n6,n7,n8,*idx,nn,*idx_cpy; 1293 const PetscInt *idx_full; 1294 PetscInt xbase,*bases,*ldims,j,x_t,y_t,s_t,base,count; 1295 PetscInt s_x,s_y; /* s proportionalized to w */ 1296 PetscInt sn0 = 0,sn2 = 0,sn6 = 0,sn8 = 0; 1297 Vec local,global; 1298 VecScatter ltog,gtol; 1299 IS to,from,ltogis; 1300 PetscErrorCode ierr; 1301 1302 PetscFunctionBegin; 1303 ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr); 1304 1305 if (dof < 1) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have 1 or more degrees of freedom per node: %D",dof); 1306 if (s < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Stencil width cannot be negative: %D",s); 1307 1308 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 1309 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 1310 1311 dd->dim = 2; 1312 ierr = PetscMalloc(dof*sizeof(char*),&dd->fieldname);CHKERRQ(ierr); 1313 ierr = PetscMemzero(dd->fieldname,dof*sizeof(char*));CHKERRQ(ierr); 1314 1315 if (m != PETSC_DECIDE) { 1316 if (m < 1) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Non-positive number of processors in X direction: %D",m); 1317 else if (m > size) SETERRQ2(comm,PETSC_ERR_ARG_OUTOFRANGE,"Too many processors in X direction: %D %d",m,size); 1318 } 1319 if (n != PETSC_DECIDE) { 1320 if (n < 1) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Non-positive number of processors in Y direction: %D",n); 1321 else if (n > size) SETERRQ2(comm,PETSC_ERR_ARG_OUTOFRANGE,"Too many processors in Y direction: %D %d",n,size); 1322 } 1323 1324 if (m == PETSC_DECIDE || n == PETSC_DECIDE) { 1325 if (n != PETSC_DECIDE) { 1326 m = size/n; 1327 } else if (m != PETSC_DECIDE) { 1328 n = size/m; 1329 } else { 1330 /* try for squarish distribution */ 1331 m = (PetscInt)(0.5 + sqrt(((double)M)*((double)size)/((double)N))); 1332 if (!m) m = 1; 1333 while (m > 0) { 1334 n = size/m; 1335 if (m*n == size) break; 1336 m--; 1337 } 1338 if (M > N && m < n) {PetscInt _m = m; m = n; n = _m;} 1339 } 1340 if (m*n != size) SETERRQ(comm,PETSC_ERR_PLIB,"Unable to create partition, check the size of the communicator and input m and n "); 1341 } else if (m*n != size) SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"Given Bad partition"); 1342 1343 if (M < m) SETERRQ2(comm,PETSC_ERR_ARG_OUTOFRANGE,"Partition in x direction is too fine! %D %D",M,m); 1344 if (N < n) SETERRQ2(comm,PETSC_ERR_ARG_OUTOFRANGE,"Partition in y direction is too fine! %D %D",N,n); 1345 1346 /* 1347 Determine locally owned region 1348 xs is the first local node number, x is the number of local nodes 1349 */ 1350 if (!lx) { 1351 ierr = PetscMalloc(m*sizeof(PetscInt), &dd->lx);CHKERRQ(ierr); 1352 lx = dd->lx; 1353 for (i=0; i<m; i++) { 1354 lx[i] = M/m + ((M % m) > i); 1355 } 1356 } 1357 x = lx[rank % m]; 1358 xs = 0; 1359 for (i=0; i<(rank % m); i++) { 1360 xs += lx[i]; 1361 } 1362 #if defined(PETSC_USE_DEBUG) 1363 left = xs; 1364 for (i=(rank % m); i<m; i++) { 1365 left += lx[i]; 1366 } 1367 if (left != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of lx across processors not equal to M: %D %D",left,M); 1368 #endif 1369 1370 /* 1371 Determine locally owned region 1372 ys is the first local node number, y is the number of local nodes 1373 */ 1374 if (!ly) { 1375 ierr = PetscMalloc(n*sizeof(PetscInt), &dd->ly);CHKERRQ(ierr); 1376 ly = dd->ly; 1377 for (i=0; i<n; i++) { 1378 ly[i] = N/n + ((N % n) > i); 1379 } 1380 } 1381 y = ly[rank/m]; 1382 ys = 0; 1383 for (i=0; i<(rank/m); i++) { 1384 ys += ly[i]; 1385 } 1386 #if defined(PETSC_USE_DEBUG) 1387 left = ys; 1388 for (i=(rank/m); i<n; i++) { 1389 left += ly[i]; 1390 } 1391 if (left != N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of ly across processors not equal to N: %D %D",left,N); 1392 #endif 1393 1394 /* 1395 check if the scatter requires more than one process neighbor or wraps around 1396 the domain more than once 1397 */ 1398 if ((x < s) && ((m > 1) || (bx == DMDA_BOUNDARY_PERIODIC))) { 1399 SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local x-width of domain x %D is smaller than stencil width s %D",x,s); 1400 } 1401 if ((y < s) && ((n > 1) || (by == DMDA_BOUNDARY_PERIODIC))) { 1402 SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local y-width of domain y %D is smaller than stencil width s %D",y,s); 1403 } 1404 xe = xs + x; 1405 ye = ys + y; 1406 1407 /* determine ghost region (Xs) and region scattered into (IXs) */ 1408 /* Assume No Periodicity */ 1409 if (xs-s > 0) { Xs = xs - s; IXs = xs - s; } else { Xs = 0; IXs = 0; } 1410 if (xe+s <= M) { Xe = xe + s; IXe = xe + s; } else { Xe = M; IXe = M; } 1411 if (ys-s > 0) { Ys = ys - s; IYs = ys - s; } else { Ys = 0; IYs = 0; } 1412 if (ye+s <= N) { Ye = ye + s; IYe = ye + s; } else { Ye = N; IYe = N; } 1413 1414 /* fix for periodicity/ghosted */ 1415 if (bx) { Xs = xs - s; Xe = xe + s; } 1416 if (bx == DMDA_BOUNDARY_PERIODIC) { IXs = xs - s; IXe = xe + s; } 1417 if (by) { Ys = ys - s; Ye = ye + s; } 1418 if (by == DMDA_BOUNDARY_PERIODIC) { IYs = ys - s; IYe = ye + s; } 1419 1420 /* Resize all X parameters to reflect w */ 1421 s_x = s; 1422 s_y = s; 1423 1424 /* determine starting point of each processor */ 1425 nn = x*y; 1426 ierr = PetscMalloc2(size+1,PetscInt,&bases,size,PetscInt,&ldims);CHKERRQ(ierr); 1427 ierr = MPI_Allgather(&nn,1,MPIU_INT,ldims,1,MPIU_INT,comm);CHKERRQ(ierr); 1428 bases[0] = 0; 1429 for (i=1; i<=size; i++) { 1430 bases[i] = ldims[i-1]; 1431 } 1432 for (i=1; i<=size; i++) { 1433 bases[i] += bases[i-1]; 1434 } 1435 base = bases[rank]*dof; 1436 1437 /* allocate the base parallel and sequential vectors */ 1438 dd->Nlocal = x*y*dof; 1439 ierr = VecCreateMPIWithArray(comm,dd->Nlocal,PETSC_DECIDE,0,&global);CHKERRQ(ierr); 1440 ierr = VecSetBlockSize(global,dof);CHKERRQ(ierr); 1441 dd->nlocal = (Xe-Xs)*(Ye-Ys)*dof; 1442 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,dd->nlocal,0,&local);CHKERRQ(ierr); 1443 ierr = VecSetBlockSize(local,dof);CHKERRQ(ierr); 1444 1445 /* generate appropriate vector scatters */ 1446 /* local to global inserts non-ghost point region into global */ 1447 ierr = VecGetOwnershipRange(global,&start,&end);CHKERRQ(ierr); 1448 ierr = ISCreateStride(comm,x*y*dof,start,1,&to);CHKERRQ(ierr); 1449 1450 count = x*y; 1451 ierr = PetscMalloc(x*y*sizeof(PetscInt),&idx);CHKERRQ(ierr); 1452 left = xs - Xs; right = left + x; 1453 down = ys - Ys; up = down + y; 1454 count = 0; 1455 for (i=down; i<up; i++) { 1456 for (j=left; j<right; j++) { 1457 idx[count++] = i*(Xe-Xs) + j; 1458 } 1459 } 1460 1461 ierr = ISCreateBlock(comm,dof,count,idx,PETSC_OWN_POINTER,&from);CHKERRQ(ierr); 1462 ierr = VecScatterCreate(local,from,global,to,<og);CHKERRQ(ierr); 1463 ierr = PetscLogObjectParent(dd,ltog);CHKERRQ(ierr); 1464 ierr = ISDestroy(&from);CHKERRQ(ierr); 1465 ierr = ISDestroy(&to);CHKERRQ(ierr); 1466 1467 /* global to local must include ghost points within the domain, 1468 but not ghost points outside the domain that aren't periodic */ 1469 if (stencil_type == DMDA_STENCIL_BOX) { 1470 count = (IXe-IXs)*(IYe-IYs); 1471 ierr = PetscMalloc(count*sizeof(PetscInt),&idx);CHKERRQ(ierr); 1472 1473 left = IXs - Xs; right = left + (IXe-IXs); 1474 down = IYs - Ys; up = down + (IYe-IYs); 1475 count = 0; 1476 for (i=down; i<up; i++) { 1477 for (j=left; j<right; j++) { 1478 idx[count++] = j + i*(Xe-Xs); 1479 } 1480 } 1481 ierr = ISCreateBlock(comm,dof,count,idx,PETSC_OWN_POINTER,&to);CHKERRQ(ierr); 1482 1483 } else { 1484 /* must drop into cross shape region */ 1485 /* ---------| 1486 | top | 1487 |--- ---| up 1488 | middle | 1489 | | 1490 ---- ---- down 1491 | bottom | 1492 ----------- 1493 Xs xs xe Xe */ 1494 count = (ys-IYs)*x + y*(IXe-IXs) + (IYe-ye)*x; 1495 ierr = PetscMalloc(count*sizeof(PetscInt),&idx);CHKERRQ(ierr); 1496 1497 left = xs - Xs; right = left + x; 1498 down = ys - Ys; up = down + y; 1499 count = 0; 1500 /* bottom */ 1501 for (i=(IYs-Ys); i<down; i++) { 1502 for (j=left; j<right; j++) { 1503 idx[count++] = j + i*(Xe-Xs); 1504 } 1505 } 1506 /* middle */ 1507 for (i=down; i<up; i++) { 1508 for (j=(IXs-Xs); j<(IXe-Xs); j++) { 1509 idx[count++] = j + i*(Xe-Xs); 1510 } 1511 } 1512 /* top */ 1513 for (i=up; i<up+IYe-ye; i++) { 1514 for (j=left; j<right; j++) { 1515 idx[count++] = j + i*(Xe-Xs); 1516 } 1517 } 1518 ierr = ISCreateBlock(comm,dof,count,idx,PETSC_OWN_POINTER,&to);CHKERRQ(ierr); 1519 } 1520 1521 1522 /* determine who lies on each side of us stored in n6 n7 n8 1523 n3 n5 1524 n0 n1 n2 1525 */ 1526 1527 /* Assume the Non-Periodic Case */ 1528 n1 = rank - m; 1529 if (rank % m) { 1530 n0 = n1 - 1; 1531 } else { 1532 n0 = -1; 1533 } 1534 if ((rank+1) % m) { 1535 n2 = n1 + 1; 1536 n5 = rank + 1; 1537 n8 = rank + m + 1; if (n8 >= m*n) n8 = -1; 1538 } else { 1539 n2 = -1; n5 = -1; n8 = -1; 1540 } 1541 if (rank % m) { 1542 n3 = rank - 1; 1543 n6 = n3 + m; if (n6 >= m*n) n6 = -1; 1544 } else { 1545 n3 = -1; n6 = -1; 1546 } 1547 n7 = rank + m; if (n7 >= m*n) n7 = -1; 1548 1549 if (bx == DMDA_BOUNDARY_PERIODIC && by == DMDA_BOUNDARY_PERIODIC) { 1550 /* Modify for Periodic Cases */ 1551 /* Handle all four corners */ 1552 if ((n6 < 0) && (n7 < 0) && (n3 < 0)) n6 = m-1; 1553 if ((n8 < 0) && (n7 < 0) && (n5 < 0)) n8 = 0; 1554 if ((n2 < 0) && (n5 < 0) && (n1 < 0)) n2 = size-m; 1555 if ((n0 < 0) && (n3 < 0) && (n1 < 0)) n0 = size-1; 1556 1557 /* Handle Top and Bottom Sides */ 1558 if (n1 < 0) n1 = rank + m * (n-1); 1559 if (n7 < 0) n7 = rank - m * (n-1); 1560 if ((n3 >= 0) && (n0 < 0)) n0 = size - m + rank - 1; 1561 if ((n3 >= 0) && (n6 < 0)) n6 = (rank%m)-1; 1562 if ((n5 >= 0) && (n2 < 0)) n2 = size - m + rank + 1; 1563 if ((n5 >= 0) && (n8 < 0)) n8 = (rank%m)+1; 1564 1565 /* Handle Left and Right Sides */ 1566 if (n3 < 0) n3 = rank + (m-1); 1567 if (n5 < 0) n5 = rank - (m-1); 1568 if ((n1 >= 0) && (n0 < 0)) n0 = rank-1; 1569 if ((n1 >= 0) && (n2 < 0)) n2 = rank-2*m+1; 1570 if ((n7 >= 0) && (n6 < 0)) n6 = rank+2*m-1; 1571 if ((n7 >= 0) && (n8 < 0)) n8 = rank+1; 1572 } else if (by == DMDA_BOUNDARY_PERIODIC) { /* Handle Top and Bottom Sides */ 1573 if (n1 < 0) n1 = rank + m * (n-1); 1574 if (n7 < 0) n7 = rank - m * (n-1); 1575 if ((n3 >= 0) && (n0 < 0)) n0 = size - m + rank - 1; 1576 if ((n3 >= 0) && (n6 < 0)) n6 = (rank%m)-1; 1577 if ((n5 >= 0) && (n2 < 0)) n2 = size - m + rank + 1; 1578 if ((n5 >= 0) && (n8 < 0)) n8 = (rank%m)+1; 1579 } else if (bx == DMDA_BOUNDARY_PERIODIC) { /* Handle Left and Right Sides */ 1580 if (n3 < 0) n3 = rank + (m-1); 1581 if (n5 < 0) n5 = rank - (m-1); 1582 if ((n1 >= 0) && (n0 < 0)) n0 = rank-1; 1583 if ((n1 >= 0) && (n2 < 0)) n2 = rank-2*m+1; 1584 if ((n7 >= 0) && (n6 < 0)) n6 = rank+2*m-1; 1585 if ((n7 >= 0) && (n8 < 0)) n8 = rank+1; 1586 } 1587 1588 ierr = PetscMalloc(9*sizeof(PetscInt),&dd->neighbors);CHKERRQ(ierr); 1589 dd->neighbors[0] = n0; 1590 dd->neighbors[1] = n1; 1591 dd->neighbors[2] = n2; 1592 dd->neighbors[3] = n3; 1593 dd->neighbors[4] = rank; 1594 dd->neighbors[5] = n5; 1595 dd->neighbors[6] = n6; 1596 dd->neighbors[7] = n7; 1597 dd->neighbors[8] = n8; 1598 1599 if (stencil_type == DMDA_STENCIL_STAR) { 1600 /* save corner processor numbers */ 1601 sn0 = n0; sn2 = n2; sn6 = n6; sn8 = n8; 1602 n0 = n2 = n6 = n8 = -1; 1603 } 1604 1605 ierr = PetscMalloc((Xe-Xs)*(Ye-Ys)*sizeof(PetscInt),&idx);CHKERRQ(ierr); 1606 ierr = PetscLogObjectMemory(da,(Xe-Xs)*(Ye-Ys)*sizeof(PetscInt));CHKERRQ(ierr); 1607 1608 nn = 0; 1609 xbase = bases[rank]; 1610 for (i=1; i<=s_y; i++) { 1611 if (n0 >= 0) { /* left below */ 1612 x_t = lx[n0 % m]; 1613 y_t = ly[(n0/m)]; 1614 s_t = bases[n0] + x_t*y_t - (s_y-i)*x_t - s_x; 1615 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1616 } 1617 if (n1 >= 0) { /* directly below */ 1618 x_t = x; 1619 y_t = ly[(n1/m)]; 1620 s_t = bases[n1] + x_t*y_t - (s_y+1-i)*x_t; 1621 for (j=0; j<x_t; j++) { idx[nn++] = s_t++;} 1622 } 1623 if (n2 >= 0) { /* right below */ 1624 x_t = lx[n2 % m]; 1625 y_t = ly[(n2/m)]; 1626 s_t = bases[n2] + x_t*y_t - (s_y+1-i)*x_t; 1627 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1628 } 1629 } 1630 1631 for (i=0; i<y; i++) { 1632 if (n3 >= 0) { /* directly left */ 1633 x_t = lx[n3 % m]; 1634 /* y_t = y; */ 1635 s_t = bases[n3] + (i+1)*x_t - s_x; 1636 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1637 } 1638 1639 for (j=0; j<x; j++) { idx[nn++] = xbase++; } /* interior */ 1640 1641 if (n5 >= 0) { /* directly right */ 1642 x_t = lx[n5 % m]; 1643 /* y_t = y; */ 1644 s_t = bases[n5] + (i)*x_t; 1645 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1646 } 1647 } 1648 1649 for (i=1; i<=s_y; i++) { 1650 if (n6 >= 0) { /* left above */ 1651 x_t = lx[n6 % m]; 1652 /* y_t = ly[(n6/m)]; */ 1653 s_t = bases[n6] + (i)*x_t - s_x; 1654 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1655 } 1656 if (n7 >= 0) { /* directly above */ 1657 x_t = x; 1658 /* y_t = ly[(n7/m)]; */ 1659 s_t = bases[n7] + (i-1)*x_t; 1660 for (j=0; j<x_t; j++) { idx[nn++] = s_t++;} 1661 } 1662 if (n8 >= 0) { /* right above */ 1663 x_t = lx[n8 % m]; 1664 /* y_t = ly[(n8/m)]; */ 1665 s_t = bases[n8] + (i-1)*x_t; 1666 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1667 } 1668 } 1669 1670 ierr = ISCreateBlock(comm,dof,nn,idx,PETSC_COPY_VALUES,&from);CHKERRQ(ierr); 1671 ierr = VecScatterCreate(global,from,local,to,>ol);CHKERRQ(ierr); 1672 ierr = PetscLogObjectParent(da,gtol);CHKERRQ(ierr); 1673 ierr = ISDestroy(&to);CHKERRQ(ierr); 1674 ierr = ISDestroy(&from);CHKERRQ(ierr); 1675 1676 if (stencil_type == DMDA_STENCIL_STAR) { 1677 n0 = sn0; n2 = sn2; n6 = sn6; n8 = sn8; 1678 } 1679 1680 if ((stencil_type == DMDA_STENCIL_STAR) || 1681 (bx && bx != DMDA_BOUNDARY_PERIODIC) || 1682 (by && by != DMDA_BOUNDARY_PERIODIC)) { 1683 /* 1684 Recompute the local to global mappings, this time keeping the 1685 information about the cross corner processor numbers and any ghosted 1686 but not periodic indices. 1687 */ 1688 nn = 0; 1689 xbase = bases[rank]; 1690 for (i=1; i<=s_y; i++) { 1691 if (n0 >= 0) { /* left below */ 1692 x_t = lx[n0 % m]; 1693 y_t = ly[(n0/m)]; 1694 s_t = bases[n0] + x_t*y_t - (s_y-i)*x_t - s_x; 1695 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1696 } else if (xs-Xs > 0 && ys-Ys > 0) { 1697 for (j=0; j<s_x; j++) { idx[nn++] = -1;} 1698 } 1699 if (n1 >= 0) { /* directly below */ 1700 x_t = x; 1701 y_t = ly[(n1/m)]; 1702 s_t = bases[n1] + x_t*y_t - (s_y+1-i)*x_t; 1703 for (j=0; j<x_t; j++) { idx[nn++] = s_t++;} 1704 } else if (ys-Ys > 0) { 1705 for (j=0; j<x; j++) { idx[nn++] = -1;} 1706 } 1707 if (n2 >= 0) { /* right below */ 1708 x_t = lx[n2 % m]; 1709 y_t = ly[(n2/m)]; 1710 s_t = bases[n2] + x_t*y_t - (s_y+1-i)*x_t; 1711 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1712 } else if (Xe-xe> 0 && ys-Ys > 0) { 1713 for (j=0; j<s_x; j++) { idx[nn++] = -1;} 1714 } 1715 } 1716 1717 for (i=0; i<y; i++) { 1718 if (n3 >= 0) { /* directly left */ 1719 x_t = lx[n3 % m]; 1720 /* y_t = y; */ 1721 s_t = bases[n3] + (i+1)*x_t - s_x; 1722 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1723 } else if (xs-Xs > 0) { 1724 for (j=0; j<s_x; j++) { idx[nn++] = -1;} 1725 } 1726 1727 for (j=0; j<x; j++) { idx[nn++] = xbase++; } /* interior */ 1728 1729 if (n5 >= 0) { /* directly right */ 1730 x_t = lx[n5 % m]; 1731 /* y_t = y; */ 1732 s_t = bases[n5] + (i)*x_t; 1733 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1734 } else if (Xe-xe > 0) { 1735 for (j=0; j<s_x; j++) { idx[nn++] = -1;} 1736 } 1737 } 1738 1739 for (i=1; i<=s_y; i++) { 1740 if (n6 >= 0) { /* left above */ 1741 x_t = lx[n6 % m]; 1742 /* y_t = ly[(n6/m)]; */ 1743 s_t = bases[n6] + (i)*x_t - s_x; 1744 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1745 } else if (xs-Xs > 0 && Ye-ye > 0) { 1746 for (j=0; j<s_x; j++) { idx[nn++] = -1;} 1747 } 1748 if (n7 >= 0) { /* directly above */ 1749 x_t = x; 1750 /* y_t = ly[(n7/m)]; */ 1751 s_t = bases[n7] + (i-1)*x_t; 1752 for (j=0; j<x_t; j++) { idx[nn++] = s_t++;} 1753 } else if (Ye-ye > 0) { 1754 for (j=0; j<x; j++) { idx[nn++] = -1;} 1755 } 1756 if (n8 >= 0) { /* right above */ 1757 x_t = lx[n8 % m]; 1758 /* y_t = ly[(n8/m)]; */ 1759 s_t = bases[n8] + (i-1)*x_t; 1760 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1761 } else if (Xe-xe > 0 && Ye-ye > 0) { 1762 for (j=0; j<s_x; j++) { idx[nn++] = -1;} 1763 } 1764 } 1765 } 1766 /* 1767 Set the local to global ordering in the global vector, this allows use 1768 of VecSetValuesLocal(). 1769 */ 1770 ierr = ISCreateBlock(comm,dof,nn,idx,PETSC_OWN_POINTER,<ogis);CHKERRQ(ierr); 1771 ierr = PetscMalloc(nn*dof*sizeof(PetscInt),&idx_cpy);CHKERRQ(ierr); 1772 ierr = PetscLogObjectMemory(da,nn*dof*sizeof(PetscInt));CHKERRQ(ierr); 1773 ierr = ISGetIndices(ltogis, &idx_full); 1774 ierr = PetscMemcpy(idx_cpy,idx_full,nn*dof*sizeof(PetscInt));CHKERRQ(ierr); 1775 ierr = ISRestoreIndices(ltogis, &idx_full); 1776 ierr = ISLocalToGlobalMappingCreateIS(ltogis,&da->ltogmap);CHKERRQ(ierr); 1777 ierr = PetscLogObjectParent(da,da->ltogmap);CHKERRQ(ierr); 1778 ierr = ISDestroy(<ogis);CHKERRQ(ierr); 1779 ierr = ISLocalToGlobalMappingBlock(da->ltogmap,dd->w,&da->ltogmapb);CHKERRQ(ierr); 1780 ierr = PetscLogObjectParent(da,da->ltogmap);CHKERRQ(ierr); 1781 1782 ierr = PetscFree2(bases,ldims);CHKERRQ(ierr); 1783 dd->m = m; dd->n = n; 1784 /* note petsc expects xs/xe/Xs/Xe to be multiplied by #dofs in many places */ 1785 dd->xs = xs*dof; dd->xe = xe*dof; dd->ys = ys; dd->ye = ye; dd->zs = 0; dd->ze = 1; 1786 dd->Xs = Xs*dof; dd->Xe = Xe*dof; dd->Ys = Ys; dd->Ye = Ye; dd->Zs = 0; dd->Ze = 1; 1787 1788 ierr = VecDestroy(&local);CHKERRQ(ierr); 1789 ierr = VecDestroy(&global);CHKERRQ(ierr); 1790 1791 dd->gtol = gtol; 1792 dd->ltog = ltog; 1793 dd->idx = idx_cpy; 1794 dd->Nl = nn*dof; 1795 dd->base = base; 1796 da->ops->view = DMView_DA_2d; 1797 dd->ltol = PETSC_NULL; 1798 dd->ao = PETSC_NULL; 1799 1800 PetscFunctionReturn(0); 1801 } 1802 1803 #undef __FUNCT__ 1804 #define __FUNCT__ "DMDACreate2d" 1805 /*@C 1806 DMDACreate2d - Creates an object that will manage the communication of two-dimensional 1807 regular array data that is distributed across some processors. 1808 1809 Collective on MPI_Comm 1810 1811 Input Parameters: 1812 + comm - MPI communicator 1813 . bx,by - type of ghost nodes the array have. 1814 Use one of DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_GHOSTED, DMDA_BOUNDARY_PERIODIC. 1815 . stencil_type - stencil type. Use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR. 1816 . M,N - global dimension in each direction of the array (use -M and or -N to indicate that it may be set to a different value 1817 from the command line with -da_grid_x <M> -da_grid_y <N>) 1818 . m,n - corresponding number of processors in each dimension 1819 (or PETSC_DECIDE to have calculated) 1820 . dof - number of degrees of freedom per node 1821 . s - stencil width 1822 - lx, ly - arrays containing the number of nodes in each cell along 1823 the x and y coordinates, or PETSC_NULL. If non-null, these 1824 must be of length as m and n, and the corresponding 1825 m and n cannot be PETSC_DECIDE. The sum of the lx[] entries 1826 must be M, and the sum of the ly[] entries must be N. 1827 1828 Output Parameter: 1829 . da - the resulting distributed array object 1830 1831 Options Database Key: 1832 + -da_view - Calls DMView() at the conclusion of DMDACreate2d() 1833 . -da_grid_x <nx> - number of grid points in x direction, if M < 0 1834 . -da_grid_y <ny> - number of grid points in y direction, if N < 0 1835 . -da_processors_x <nx> - number of processors in x direction 1836 . -da_processors_y <ny> - number of processors in y direction 1837 . -da_refine_x - refinement ratio in x direction 1838 - -da_refine_y - refinement ratio in y direction 1839 1840 Level: beginner 1841 1842 Notes: 1843 The stencil type DMDA_STENCIL_STAR with width 1 corresponds to the 1844 standard 5-pt stencil, while DMDA_STENCIL_BOX with width 1 denotes 1845 the standard 9-pt stencil. 1846 1847 The array data itself is NOT stored in the DMDA, it is stored in Vec objects; 1848 The appropriate vector objects can be obtained with calls to DMCreateGlobalVector() 1849 and DMCreateLocalVector() and calls to VecDuplicate() if more are needed. 1850 1851 .keywords: distributed array, create, two-dimensional 1852 1853 .seealso: DMDestroy(), DMView(), DMDACreate1d(), DMDACreate3d(), DMGlobalToLocalBegin(), DMDAGetRefinementFactor(), 1854 DMGlobalToLocalEnd(), DMLocalToGlobalBegin(), DMDALocalToLocalBegin(), DMDALocalToLocalEnd(), DMDASetRefinementFactor(), 1855 DMDAGetInfo(), DMCreateGlobalVector(), DMCreateLocalVector(), DMDACreateNaturalVector(), DMDALoad(), DMDAGetOwnershipRanges() 1856 1857 @*/ 1858 PetscErrorCode DMDACreate2d(MPI_Comm comm,DMDABoundaryType bx,DMDABoundaryType by,DMDAStencilType stencil_type, 1859 PetscInt M,PetscInt N,PetscInt m,PetscInt n,PetscInt dof,PetscInt s,const PetscInt lx[],const PetscInt ly[],DM *da) 1860 { 1861 PetscErrorCode ierr; 1862 1863 PetscFunctionBegin; 1864 ierr = DMDACreate(comm, da);CHKERRQ(ierr); 1865 ierr = DMDASetDim(*da, 2);CHKERRQ(ierr); 1866 ierr = DMDASetSizes(*da, M, N, 1);CHKERRQ(ierr); 1867 ierr = DMDASetNumProcs(*da, m, n, PETSC_DECIDE);CHKERRQ(ierr); 1868 ierr = DMDASetBoundaryType(*da, bx, by, DMDA_BOUNDARY_NONE);CHKERRQ(ierr); 1869 ierr = DMDASetDof(*da, dof);CHKERRQ(ierr); 1870 ierr = DMDASetStencilType(*da, stencil_type);CHKERRQ(ierr); 1871 ierr = DMDASetStencilWidth(*da, s);CHKERRQ(ierr); 1872 ierr = DMDASetOwnershipRanges(*da, lx, ly, PETSC_NULL);CHKERRQ(ierr); 1873 /* This violates the behavior for other classes, but right now users expect negative dimensions to be handled this way */ 1874 ierr = DMSetFromOptions(*da);CHKERRQ(ierr); 1875 ierr = DMSetUp(*da);CHKERRQ(ierr); 1876 ierr = DMView_DA_Private(*da);CHKERRQ(ierr); 1877 PetscFunctionReturn(0); 1878 } 1879