1 2 #define PETSCDM_DLL 3 4 #include "private/daimpl.h" /*I "petscdm.h" I*/ 5 6 #undef __FUNCT__ 7 #define __FUNCT__ "DMView_DA_2d" 8 PetscErrorCode DMView_DA_2d(DM da,PetscViewer viewer) 9 { 10 PetscErrorCode ierr; 11 PetscMPIInt rank; 12 PetscBool iascii,isdraw,isbinary; 13 DM_DA *dd = (DM_DA*)da->data; 14 #if defined(PETSC_HAVE_MATLAB_ENGINE) 15 PetscBool ismatlab; 16 #endif 17 18 PetscFunctionBegin; 19 ierr = MPI_Comm_rank(((PetscObject)da)->comm,&rank);CHKERRQ(ierr); 20 21 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 22 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 23 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 24 #if defined(PETSC_HAVE_MATLAB_ENGINE) 25 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);CHKERRQ(ierr); 26 #endif 27 if (iascii) { 28 PetscViewerFormat format; 29 30 ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr); 31 if (format != PETSC_VIEWER_ASCII_VTK && format != PETSC_VIEWER_ASCII_VTK_CELL) { 32 DMDALocalInfo info; 33 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 34 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D N %D m %D n %D w %D s %D\n",rank,dd->M,dd->N,dd->m,dd->n,dd->w,dd->s);CHKERRQ(ierr); 35 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D, Y range of indices: %D %D\n",info.xs,info.xs+info.xm,info.ys,info.ys+info.ym);CHKERRQ(ierr); 36 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 37 } else { 38 ierr = DMView_DA_VTK(da,viewer);CHKERRQ(ierr); 39 } 40 } else if (isdraw) { 41 PetscDraw draw; 42 double ymin = -1*dd->s-1,ymax = dd->N+dd->s; 43 double xmin = -1*dd->s-1,xmax = dd->M+dd->s; 44 double x,y; 45 PetscInt base,*idx; 46 char node[10]; 47 PetscBool isnull; 48 49 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 50 ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0); 51 if (!dd->coordinates) { 52 ierr = PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);CHKERRQ(ierr); 53 } 54 ierr = PetscDrawSynchronizedClear(draw);CHKERRQ(ierr); 55 56 /* first processor draw all node lines */ 57 if (!rank) { 58 ymin = 0.0; ymax = dd->N - 1; 59 for (xmin=0; xmin<dd->M; xmin++) { 60 ierr = PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_BLACK);CHKERRQ(ierr); 61 } 62 xmin = 0.0; xmax = dd->M - 1; 63 for (ymin=0; ymin<dd->N; ymin++) { 64 ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);CHKERRQ(ierr); 65 } 66 } 67 ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr); 68 ierr = PetscDrawPause(draw);CHKERRQ(ierr); 69 70 /* draw my box */ 71 ymin = dd->ys; ymax = dd->ye - 1; xmin = dd->xs/dd->w; 72 xmax =(dd->xe-1)/dd->w; 73 ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);CHKERRQ(ierr); 74 ierr = PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);CHKERRQ(ierr); 75 ierr = PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr); 76 ierr = PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr); 77 78 /* put in numbers */ 79 base = (dd->base)/dd->w; 80 for (y=ymin; y<=ymax; y++) { 81 for (x=xmin; x<=xmax; x++) { 82 sprintf(node,"%d",(int)base++); 83 ierr = PetscDrawString(draw,x,y,PETSC_DRAW_BLACK,node);CHKERRQ(ierr); 84 } 85 } 86 87 ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr); 88 ierr = PetscDrawPause(draw);CHKERRQ(ierr); 89 /* overlay ghost numbers, useful for error checking */ 90 /* put in numbers */ 91 92 base = 0; idx = dd->idx; 93 ymin = dd->Ys; ymax = dd->Ye; xmin = dd->Xs; xmax = dd->Xe; 94 for (y=ymin; y<ymax; y++) { 95 for (x=xmin; x<xmax; x++) { 96 if ((base % dd->w) == 0) { 97 sprintf(node,"%d",(int)(idx[base]/dd->w)); 98 ierr = PetscDrawString(draw,x/dd->w,y,PETSC_DRAW_BLUE,node);CHKERRQ(ierr); 99 } 100 base++; 101 } 102 } 103 ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr); 104 ierr = PetscDrawPause(draw);CHKERRQ(ierr); 105 } else if (isbinary){ 106 ierr = DMView_DA_Binary(da,viewer);CHKERRQ(ierr); 107 #if defined(PETSC_HAVE_MATLAB_ENGINE) 108 } else if (ismatlab) { 109 ierr = DMView_DA_Matlab(da,viewer);CHKERRQ(ierr); 110 #endif 111 } else SETERRQ1(((PetscObject)da)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for DMDA 1d",((PetscObject)viewer)->type_name); 112 PetscFunctionReturn(0); 113 } 114 115 /* 116 M is number of grid points 117 m is number of processors 118 119 */ 120 #undef __FUNCT__ 121 #define __FUNCT__ "DMDASplitComm2d" 122 PetscErrorCode DMDASplitComm2d(MPI_Comm comm,PetscInt M,PetscInt N,PetscInt sw,MPI_Comm *outcomm) 123 { 124 PetscErrorCode ierr; 125 PetscInt m,n = 0,x = 0,y = 0; 126 PetscMPIInt size,csize,rank; 127 128 PetscFunctionBegin; 129 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 130 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 131 132 csize = 4*size; 133 do { 134 if (csize % 4) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Cannot split communicator of size %d tried %d %D %D",size,csize,x,y); 135 csize = csize/4; 136 137 m = (PetscInt)(0.5 + sqrt(((double)M)*((double)csize)/((double)N))); 138 if (!m) m = 1; 139 while (m > 0) { 140 n = csize/m; 141 if (m*n == csize) break; 142 m--; 143 } 144 if (M > N && m < n) {PetscInt _m = m; m = n; n = _m;} 145 146 x = M/m + ((M % m) > ((csize-1) % m)); 147 y = (N + (csize-1)/m)/n; 148 } while ((x < 4 || y < 4) && csize > 1); 149 if (size != csize) { 150 MPI_Group entire_group,sub_group; 151 PetscMPIInt i,*groupies; 152 153 ierr = MPI_Comm_group(comm,&entire_group);CHKERRQ(ierr); 154 ierr = PetscMalloc(csize*sizeof(PetscInt),&groupies);CHKERRQ(ierr); 155 for (i=0; i<csize; i++) { 156 groupies[i] = (rank/csize)*csize + i; 157 } 158 ierr = MPI_Group_incl(entire_group,csize,groupies,&sub_group);CHKERRQ(ierr); 159 ierr = PetscFree(groupies);CHKERRQ(ierr); 160 ierr = MPI_Comm_create(comm,sub_group,outcomm);CHKERRQ(ierr); 161 ierr = MPI_Group_free(&entire_group);CHKERRQ(ierr); 162 ierr = MPI_Group_free(&sub_group);CHKERRQ(ierr); 163 ierr = PetscInfo1(0,"DMDASplitComm2d:Creating redundant coarse problems of size %d\n",csize);CHKERRQ(ierr); 164 } else { 165 *outcomm = comm; 166 } 167 PetscFunctionReturn(0); 168 } 169 170 #undef __FUNCT__ 171 #define __FUNCT__ "DMDASetLocalFunction" 172 /*@C 173 DMDASetLocalFunction - Caches in a DM a local function. 174 175 Logically Collective on DMDA 176 177 Input Parameter: 178 + da - initial distributed array 179 - lf - the local function 180 181 Level: intermediate 182 183 Notes: The routine SNESDAFormFunction() uses this the cached function to evaluate the user provided function. 184 185 .keywords: distributed array, refine 186 187 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunctioni() 188 @*/ 189 PetscErrorCode DMDASetLocalFunction(DM da,DMDALocalFunction1 lf) 190 { 191 DM_DA *dd = (DM_DA*)da->data; 192 PetscFunctionBegin; 193 PetscValidHeaderSpecific(da,DM_CLASSID,1); 194 dd->lf = lf; 195 PetscFunctionReturn(0); 196 } 197 198 #undef __FUNCT__ 199 #define __FUNCT__ "DMDASetLocalFunctioni" 200 /*@C 201 DMDASetLocalFunctioni - Caches in a DM a local function that evaluates a single component 202 203 Logically Collective on DMDA 204 205 Input Parameter: 206 + da - initial distributed array 207 - lfi - the local function 208 209 Level: intermediate 210 211 .keywords: distributed array, refine 212 213 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction() 214 @*/ 215 PetscErrorCode DMDASetLocalFunctioni(DM da,PetscErrorCode (*lfi)(DMDALocalInfo*,MatStencil*,void*,PetscScalar*,void*)) 216 { 217 DM_DA *dd = (DM_DA*)da->data; 218 PetscFunctionBegin; 219 PetscValidHeaderSpecific(da,DM_CLASSID,1); 220 dd->lfi = lfi; 221 PetscFunctionReturn(0); 222 } 223 224 #undef __FUNCT__ 225 #define __FUNCT__ "DMDASetLocalFunctionib" 226 /*@C 227 DMDASetLocalFunctionib - Caches in a DM a block local function that evaluates a single component 228 229 Logically Collective on DMDA 230 231 Input Parameter: 232 + da - initial distributed array 233 - lfi - the local function 234 235 Level: intermediate 236 237 .keywords: distributed array, refine 238 239 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction() 240 @*/ 241 PetscErrorCode DMDASetLocalFunctionib(DM da,PetscErrorCode (*lfi)(DMDALocalInfo*,MatStencil*,void*,PetscScalar*,void*)) 242 { 243 DM_DA *dd = (DM_DA*)da->data; 244 PetscFunctionBegin; 245 PetscValidHeaderSpecific(da,DM_CLASSID,1); 246 dd->lfib = lfi; 247 PetscFunctionReturn(0); 248 } 249 250 #undef __FUNCT__ 251 #define __FUNCT__ "DMDASetLocalAdicFunction_Private" 252 PetscErrorCode DMDASetLocalAdicFunction_Private(DM da,DMDALocalFunction1 ad_lf) 253 { 254 DM_DA *dd = (DM_DA*)da->data; 255 PetscFunctionBegin; 256 PetscValidHeaderSpecific(da,DM_CLASSID,1); 257 dd->adic_lf = ad_lf; 258 PetscFunctionReturn(0); 259 } 260 261 /*MC 262 DMDASetLocalAdicFunctioni - Caches in a DM a local functioni computed by ADIC/ADIFOR 263 264 Synopsis: 265 PetscErrorCode DMDASetLocalAdicFunctioni(DM da,PetscInt (ad_lf*)(DMDALocalInfo*,MatStencil*,void*,void*,void*) 266 267 Logically Collective on DMDA 268 269 Input Parameter: 270 + da - initial distributed array 271 - ad_lfi - the local function as computed by ADIC/ADIFOR 272 273 Level: intermediate 274 275 .keywords: distributed array, refine 276 277 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(), 278 DMDASetLocalJacobian(), DMDASetLocalFunctioni() 279 M*/ 280 281 #undef __FUNCT__ 282 #define __FUNCT__ "DMDASetLocalAdicFunctioni_Private" 283 PetscErrorCode DMDASetLocalAdicFunctioni_Private(DM da,PetscErrorCode (*ad_lfi)(DMDALocalInfo*,MatStencil*,void*,void*,void*)) 284 { 285 DM_DA *dd = (DM_DA*)da->data; 286 PetscFunctionBegin; 287 PetscValidHeaderSpecific(da,DM_CLASSID,1); 288 dd->adic_lfi = ad_lfi; 289 PetscFunctionReturn(0); 290 } 291 292 /*MC 293 DMDASetLocalAdicMFFunctioni - Caches in a DM a local functioni computed by ADIC/ADIFOR 294 295 Synopsis: 296 PetscErrorCode DMDASetLocalAdicFunctioni(DM da,int (ad_lf*)(DMDALocalInfo*,MatStencil*,void*,void*,void*) 297 298 Logically Collective on DMDA 299 300 Input Parameter: 301 + da - initial distributed array 302 - admf_lfi - the local matrix-free function as computed by ADIC/ADIFOR 303 304 Level: intermediate 305 306 .keywords: distributed array, refine 307 308 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(), 309 DMDASetLocalJacobian(), DMDASetLocalFunctioni() 310 M*/ 311 312 #undef __FUNCT__ 313 #define __FUNCT__ "DMDASetLocalAdicMFFunctioni_Private" 314 PetscErrorCode DMDASetLocalAdicMFFunctioni_Private(DM da,PetscErrorCode (*admf_lfi)(DMDALocalInfo*,MatStencil*,void*,void*,void*)) 315 { 316 DM_DA *dd = (DM_DA*)da->data; 317 PetscFunctionBegin; 318 PetscValidHeaderSpecific(da,DM_CLASSID,1); 319 dd->adicmf_lfi = admf_lfi; 320 PetscFunctionReturn(0); 321 } 322 323 /*MC 324 DMDASetLocalAdicFunctionib - Caches in a DM a block local functioni computed by ADIC/ADIFOR 325 326 Synopsis: 327 PetscErrorCode DMDASetLocalAdicFunctionib(DM da,PetscInt (ad_lf*)(DMDALocalInfo*,MatStencil*,void*,void*,void*) 328 329 Logically Collective on DMDA 330 331 Input Parameter: 332 + da - initial distributed array 333 - ad_lfi - the local function as computed by ADIC/ADIFOR 334 335 Level: intermediate 336 337 .keywords: distributed array, refine 338 339 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(), 340 DMDASetLocalJacobian(), DMDASetLocalFunctionib() 341 M*/ 342 343 #undef __FUNCT__ 344 #define __FUNCT__ "DMDASetLocalAdicFunctionib_Private" 345 PetscErrorCode DMDASetLocalAdicFunctionib_Private(DM da,PetscErrorCode (*ad_lfi)(DMDALocalInfo*,MatStencil*,void*,void*,void*)) 346 { 347 DM_DA *dd = (DM_DA*)da->data; 348 PetscFunctionBegin; 349 PetscValidHeaderSpecific(da,DM_CLASSID,1); 350 dd->adic_lfib = ad_lfi; 351 PetscFunctionReturn(0); 352 } 353 354 /*MC 355 DMDASetLocalAdicMFFunctionib - Caches in a DM a block local functioni computed by ADIC/ADIFOR 356 357 Synopsis: 358 PetscErrorCode DMDASetLocalAdicFunctionib(DM da,int (ad_lf*)(DMDALocalInfo*,MatStencil*,void*,void*,void*) 359 360 Logically Collective on DMDA 361 362 Input Parameter: 363 + da - initial distributed array 364 - admf_lfi - the local matrix-free function as computed by ADIC/ADIFOR 365 366 Level: intermediate 367 368 .keywords: distributed array, refine 369 370 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(), 371 DMDASetLocalJacobian(), DMDASetLocalFunctionib() 372 M*/ 373 374 #undef __FUNCT__ 375 #define __FUNCT__ "DMDASetLocalAdicMFFunctionib_Private" 376 PetscErrorCode DMDASetLocalAdicMFFunctionib_Private(DM da,PetscErrorCode (*admf_lfi)(DMDALocalInfo*,MatStencil*,void*,void*,void*)) 377 { 378 DM_DA *dd = (DM_DA*)da->data; 379 PetscFunctionBegin; 380 PetscValidHeaderSpecific(da,DM_CLASSID,1); 381 dd->adicmf_lfib = admf_lfi; 382 PetscFunctionReturn(0); 383 } 384 385 /*MC 386 DMDASetLocalAdicMFFunction - Caches in a DM a local function computed by ADIC/ADIFOR 387 388 Synopsis: 389 PetscErrorCode DMDASetLocalAdicMFFunction(DM da,DMDALocalFunction1 ad_lf) 390 391 Logically Collective on DMDA 392 393 Input Parameter: 394 + da - initial distributed array 395 - ad_lf - the local function as computed by ADIC/ADIFOR 396 397 Level: intermediate 398 399 .keywords: distributed array, refine 400 401 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(), 402 DMDASetLocalJacobian() 403 M*/ 404 405 #undef __FUNCT__ 406 #define __FUNCT__ "DMDASetLocalAdicMFFunction_Private" 407 PetscErrorCode DMDASetLocalAdicMFFunction_Private(DM da,DMDALocalFunction1 ad_lf) 408 { 409 DM_DA *dd = (DM_DA*)da->data; 410 PetscFunctionBegin; 411 PetscValidHeaderSpecific(da,DM_CLASSID,1); 412 dd->adicmf_lf = ad_lf; 413 PetscFunctionReturn(0); 414 } 415 416 /*@C 417 DMDASetLocalJacobian - Caches in a DM a local Jacobian computation function 418 419 Logically Collective on DMDA 420 421 422 Input Parameter: 423 + da - initial distributed array 424 - lj - the local Jacobian 425 426 Level: intermediate 427 428 Notes: The routine SNESDAFormFunction() uses this the cached function to evaluate the user provided function. 429 430 .keywords: distributed array, refine 431 432 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction() 433 @*/ 434 #undef __FUNCT__ 435 #define __FUNCT__ "DMDASetLocalJacobian" 436 PetscErrorCode DMDASetLocalJacobian(DM da,DMDALocalFunction1 lj) 437 { 438 DM_DA *dd = (DM_DA*)da->data; 439 PetscFunctionBegin; 440 PetscValidHeaderSpecific(da,DM_CLASSID,1); 441 dd->lj = lj; 442 PetscFunctionReturn(0); 443 } 444 445 #undef __FUNCT__ 446 #define __FUNCT__ "DMDAGetLocalFunction" 447 /*@C 448 DMDAGetLocalFunction - Gets from a DM a local function and its ADIC/ADIFOR Jacobian 449 450 Note Collective 451 452 Input Parameter: 453 . da - initial distributed array 454 455 Output Parameter: 456 . lf - the local function 457 458 Level: intermediate 459 460 .keywords: distributed array, refine 461 462 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalJacobian(), DMDASetLocalFunction() 463 @*/ 464 PetscErrorCode DMDAGetLocalFunction(DM da,DMDALocalFunction1 *lf) 465 { 466 DM_DA *dd = (DM_DA*)da->data; 467 PetscFunctionBegin; 468 PetscValidHeaderSpecific(da,DM_CLASSID,1); 469 if (lf) *lf = dd->lf; 470 PetscFunctionReturn(0); 471 } 472 473 #undef __FUNCT__ 474 #define __FUNCT__ "DMDAGetLocalJacobian" 475 /*@C 476 DMDAGetLocalJacobian - Gets from a DM a local jacobian 477 478 Not Collective 479 480 Input Parameter: 481 . da - initial distributed array 482 483 Output Parameter: 484 . lj - the local jacobian 485 486 Level: intermediate 487 488 .keywords: distributed array, refine 489 490 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalJacobian() 491 @*/ 492 PetscErrorCode DMDAGetLocalJacobian(DM da,DMDALocalFunction1 *lj) 493 { 494 DM_DA *dd = (DM_DA*)da->data; 495 PetscFunctionBegin; 496 PetscValidHeaderSpecific(da,DM_CLASSID,1); 497 if (lj) *lj = dd->lj; 498 PetscFunctionReturn(0); 499 } 500 501 #undef __FUNCT__ 502 #define __FUNCT__ "DMDAFormFunction" 503 /*@ 504 DMDAFormFunction - Evaluates a user provided function on each processor that 505 share a DMDA 506 507 Input Parameters: 508 + da - the DM that defines the grid 509 . vu - input vector 510 . vfu - output vector 511 - w - any user data 512 513 Notes: Does NOT do ghost updates on vu upon entry 514 515 This should eventually replace DMDAFormFunction1 516 517 Level: advanced 518 519 .seealso: DMDAComputeJacobian1WithAdic() 520 521 @*/ 522 PetscErrorCode DMDAFormFunction(DM da,PetscErrorCode (*lf)(void),Vec vu,Vec vfu,void *w) 523 { 524 PetscErrorCode ierr; 525 void *u,*fu; 526 DMDALocalInfo info; 527 PetscErrorCode (*f)(DMDALocalInfo*,void*,void*,void*) = (PetscErrorCode (*)(DMDALocalInfo*,void*,void*,void*))lf; 528 529 PetscFunctionBegin; 530 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 531 ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr); 532 ierr = DMDAVecGetArray(da,vfu,&fu);CHKERRQ(ierr); 533 534 ierr = (*f)(&info,u,fu,w);CHKERRQ(ierr); 535 536 ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr); 537 ierr = DMDAVecRestoreArray(da,vfu,&fu);CHKERRQ(ierr); 538 PetscFunctionReturn(0); 539 } 540 541 #undef __FUNCT__ 542 #define __FUNCT__ "DMDAFormFunctionLocal" 543 /*@C 544 DMDAFormFunctionLocal - This is a universal function evaluation routine for 545 a local DM function. 546 547 Collective on DMDA 548 549 Input Parameters: 550 + da - the DM context 551 . func - The local function 552 . X - input vector 553 . F - function vector 554 - ctx - A user context 555 556 Level: intermediate 557 558 .seealso: DMDASetLocalFunction(), DMDASetLocalJacobian(), DMDASetLocalAdicFunction(), DMDASetLocalAdicMFFunction(), 559 SNESSetFunction(), SNESSetJacobian() 560 561 @*/ 562 PetscErrorCode DMDAFormFunctionLocal(DM da, DMDALocalFunction1 func, Vec X, Vec F, void *ctx) 563 { 564 Vec localX; 565 DMDALocalInfo info; 566 void *u; 567 void *fu; 568 PetscErrorCode ierr; 569 570 PetscFunctionBegin; 571 ierr = DMGetLocalVector(da,&localX);CHKERRQ(ierr); 572 /* 573 Scatter ghost points to local vector, using the 2-step process 574 DMGlobalToLocalBegin(), DMGlobalToLocalEnd(). 575 */ 576 ierr = DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);CHKERRQ(ierr); 577 ierr = DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);CHKERRQ(ierr); 578 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 579 ierr = DMDAVecGetArray(da,localX,&u);CHKERRQ(ierr); 580 ierr = DMDAVecGetArray(da,F,&fu);CHKERRQ(ierr); 581 ierr = (*func)(&info,u,fu,ctx);CHKERRQ(ierr); 582 ierr = DMDAVecRestoreArray(da,localX,&u);CHKERRQ(ierr); 583 ierr = DMDAVecRestoreArray(da,F,&fu);CHKERRQ(ierr); 584 ierr = DMRestoreLocalVector(da,&localX);CHKERRQ(ierr); 585 PetscFunctionReturn(0); 586 } 587 588 #undef __FUNCT__ 589 #define __FUNCT__ "DMDAFormFunctionLocalGhost" 590 /*@C 591 DMDAFormFunctionLocalGhost - This is a universal function evaluation routine for 592 a local DM function, but the ghost values of the output are communicated and added. 593 594 Collective on DMDA 595 596 Input Parameters: 597 + da - the DM context 598 . func - The local function 599 . X - input vector 600 . F - function vector 601 - ctx - A user context 602 603 Level: intermediate 604 605 .seealso: DMDASetLocalFunction(), DMDASetLocalJacobian(), DMDASetLocalAdicFunction(), DMDASetLocalAdicMFFunction(), 606 SNESSetFunction(), SNESSetJacobian() 607 608 @*/ 609 PetscErrorCode DMDAFormFunctionLocalGhost(DM da, DMDALocalFunction1 func, Vec X, Vec F, void *ctx) 610 { 611 Vec localX, localF; 612 DMDALocalInfo info; 613 void *u; 614 void *fu; 615 PetscErrorCode ierr; 616 617 PetscFunctionBegin; 618 ierr = DMGetLocalVector(da,&localX);CHKERRQ(ierr); 619 ierr = DMGetLocalVector(da,&localF);CHKERRQ(ierr); 620 /* 621 Scatter ghost points to local vector, using the 2-step process 622 DMGlobalToLocalBegin(), DMGlobalToLocalEnd(). 623 */ 624 ierr = DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);CHKERRQ(ierr); 625 ierr = DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);CHKERRQ(ierr); 626 ierr = VecSet(F, 0.0);CHKERRQ(ierr); 627 ierr = VecSet(localF, 0.0);CHKERRQ(ierr); 628 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 629 ierr = DMDAVecGetArray(da,localX,&u);CHKERRQ(ierr); 630 ierr = DMDAVecGetArray(da,localF,&fu);CHKERRQ(ierr); 631 ierr = (*func)(&info,u,fu,ctx);CHKERRQ(ierr); 632 ierr = DMLocalToGlobalBegin(da,localF,ADD_VALUES,F);CHKERRQ(ierr); 633 ierr = DMLocalToGlobalEnd(da,localF,ADD_VALUES,F);CHKERRQ(ierr); 634 ierr = DMDAVecRestoreArray(da,localX,&u);CHKERRQ(ierr); 635 ierr = DMDAVecRestoreArray(da,localF,&fu);CHKERRQ(ierr); 636 ierr = DMRestoreLocalVector(da,&localX);CHKERRQ(ierr); 637 ierr = DMRestoreLocalVector(da,&localF);CHKERRQ(ierr); 638 PetscFunctionReturn(0); 639 } 640 641 #undef __FUNCT__ 642 #define __FUNCT__ "DMDAFormFunction1" 643 /*@ 644 DMDAFormFunction1 - Evaluates a user provided function on each processor that 645 share a DMDA 646 647 Input Parameters: 648 + da - the DM that defines the grid 649 . vu - input vector 650 . vfu - output vector 651 - w - any user data 652 653 Notes: Does NOT do ghost updates on vu upon entry 654 655 Level: advanced 656 657 .seealso: DMDAComputeJacobian1WithAdic() 658 659 @*/ 660 PetscErrorCode DMDAFormFunction1(DM da,Vec vu,Vec vfu,void *w) 661 { 662 PetscErrorCode ierr; 663 void *u,*fu; 664 DMDALocalInfo info; 665 DM_DA *dd = (DM_DA*)da->data; 666 667 PetscFunctionBegin; 668 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 669 ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr); 670 ierr = DMDAVecGetArray(da,vfu,&fu);CHKERRQ(ierr); 671 672 CHKMEMQ; 673 ierr = (*dd->lf)(&info,u,fu,w);CHKERRQ(ierr); 674 CHKMEMQ; 675 676 ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr); 677 ierr = DMDAVecRestoreArray(da,vfu,&fu);CHKERRQ(ierr); 678 PetscFunctionReturn(0); 679 } 680 681 #undef __FUNCT__ 682 #define __FUNCT__ "DMDAFormFunctioniTest1" 683 PetscErrorCode DMDAFormFunctioniTest1(DM da,void *w) 684 { 685 Vec vu,fu,fui; 686 PetscErrorCode ierr; 687 PetscInt i,n; 688 PetscScalar *ui; 689 PetscRandom rnd; 690 PetscReal norm; 691 692 PetscFunctionBegin; 693 ierr = DMGetLocalVector(da,&vu);CHKERRQ(ierr); 694 ierr = PetscRandomCreate(PETSC_COMM_SELF,&rnd);CHKERRQ(ierr); 695 ierr = PetscRandomSetFromOptions(rnd);CHKERRQ(ierr); 696 ierr = VecSetRandom(vu,rnd);CHKERRQ(ierr); 697 ierr = PetscRandomDestroy(rnd);CHKERRQ(ierr); 698 699 ierr = DMGetGlobalVector(da,&fu);CHKERRQ(ierr); 700 ierr = DMGetGlobalVector(da,&fui);CHKERRQ(ierr); 701 702 ierr = DMDAFormFunction1(da,vu,fu,w);CHKERRQ(ierr); 703 704 ierr = VecGetArray(fui,&ui);CHKERRQ(ierr); 705 ierr = VecGetLocalSize(fui,&n);CHKERRQ(ierr); 706 for (i=0; i<n; i++) { 707 ierr = DMDAFormFunctioni1(da,i,vu,ui+i,w);CHKERRQ(ierr); 708 } 709 ierr = VecRestoreArray(fui,&ui);CHKERRQ(ierr); 710 711 ierr = VecAXPY(fui,-1.0,fu);CHKERRQ(ierr); 712 ierr = VecNorm(fui,NORM_2,&norm);CHKERRQ(ierr); 713 ierr = PetscPrintf(((PetscObject)da)->comm,"Norm of difference in vectors %G\n",norm);CHKERRQ(ierr); 714 ierr = VecView(fu,0);CHKERRQ(ierr); 715 ierr = VecView(fui,0);CHKERRQ(ierr); 716 717 ierr = DMRestoreLocalVector(da,&vu);CHKERRQ(ierr); 718 ierr = DMRestoreGlobalVector(da,&fu);CHKERRQ(ierr); 719 ierr = DMRestoreGlobalVector(da,&fui);CHKERRQ(ierr); 720 PetscFunctionReturn(0); 721 } 722 723 #undef __FUNCT__ 724 #define __FUNCT__ "DMDAFormFunctioni1" 725 /*@ 726 DMDAFormFunctioni1 - Evaluates a user provided point-wise function 727 728 Input Parameters: 729 + da - the DM that defines the grid 730 . i - the component of the function we wish to compute (must be local) 731 . vu - input vector 732 . vfu - output value 733 - w - any user data 734 735 Notes: Does NOT do ghost updates on vu upon entry 736 737 Level: advanced 738 739 .seealso: DMDAComputeJacobian1WithAdic() 740 741 @*/ 742 PetscErrorCode DMDAFormFunctioni1(DM da,PetscInt i,Vec vu,PetscScalar *vfu,void *w) 743 { 744 PetscErrorCode ierr; 745 void *u; 746 DMDALocalInfo info; 747 MatStencil stencil; 748 DM_DA *dd = (DM_DA*)da->data; 749 750 PetscFunctionBegin; 751 752 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 753 ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr); 754 755 /* figure out stencil value from i */ 756 stencil.c = i % info.dof; 757 stencil.i = (i % (info.xm*info.dof))/info.dof; 758 stencil.j = (i % (info.xm*info.ym*info.dof))/(info.xm*info.dof); 759 stencil.k = i/(info.xm*info.ym*info.dof); 760 761 ierr = (*dd->lfi)(&info,&stencil,u,vfu,w);CHKERRQ(ierr); 762 763 ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr); 764 PetscFunctionReturn(0); 765 } 766 767 #undef __FUNCT__ 768 #define __FUNCT__ "DMDAFormFunctionib1" 769 /*@ 770 DMDAFormFunctionib1 - Evaluates a user provided point-block function 771 772 Input Parameters: 773 + da - the DM that defines the grid 774 . i - the component of the function we wish to compute (must be local) 775 . vu - input vector 776 . vfu - output value 777 - w - any user data 778 779 Notes: Does NOT do ghost updates on vu upon entry 780 781 Level: advanced 782 783 .seealso: DMDAComputeJacobian1WithAdic() 784 785 @*/ 786 PetscErrorCode DMDAFormFunctionib1(DM da,PetscInt i,Vec vu,PetscScalar *vfu,void *w) 787 { 788 PetscErrorCode ierr; 789 void *u; 790 DMDALocalInfo info; 791 MatStencil stencil; 792 DM_DA *dd = (DM_DA*)da->data; 793 794 PetscFunctionBegin; 795 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 796 ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr); 797 798 /* figure out stencil value from i */ 799 stencil.c = i % info.dof; 800 if (stencil.c) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_ARG_WRONG,"Point-block functions can only be called for the entire block"); 801 stencil.i = (i % (info.xm*info.dof))/info.dof; 802 stencil.j = (i % (info.xm*info.ym*info.dof))/(info.xm*info.dof); 803 stencil.k = i/(info.xm*info.ym*info.dof); 804 805 ierr = (*dd->lfib)(&info,&stencil,u,vfu,w);CHKERRQ(ierr); 806 807 ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr); 808 PetscFunctionReturn(0); 809 } 810 811 #if defined(new) 812 #undef __FUNCT__ 813 #define __FUNCT__ "DMDAGetDiagonal_MFFD" 814 /* 815 DMDAGetDiagonal_MFFD - Gets the diagonal for a matrix free matrix where local 816 function lives on a DMDA 817 818 y ~= (F(u + ha) - F(u))/h, 819 where F = nonlinear function, as set by SNESSetFunction() 820 u = current iterate 821 h = difference interval 822 */ 823 PetscErrorCode DMDAGetDiagonal_MFFD(DM da,Vec U,Vec a) 824 { 825 PetscScalar h,*aa,*ww,v; 826 PetscReal epsilon = PETSC_SQRT_MACHINE_EPSILON,umin = 100.0*PETSC_SQRT_MACHINE_EPSILON; 827 PetscErrorCode ierr; 828 PetscInt gI,nI; 829 MatStencil stencil; 830 DMDALocalInfo info; 831 832 PetscFunctionBegin; 833 ierr = (*ctx->func)(0,U,a,ctx->funcctx);CHKERRQ(ierr); 834 ierr = (*ctx->funcisetbase)(U,ctx->funcctx);CHKERRQ(ierr); 835 836 ierr = VecGetArray(U,&ww);CHKERRQ(ierr); 837 ierr = VecGetArray(a,&aa);CHKERRQ(ierr); 838 839 nI = 0; 840 h = ww[gI]; 841 if (h == 0.0) h = 1.0; 842 #if !defined(PETSC_USE_COMPLEX) 843 if (h < umin && h >= 0.0) h = umin; 844 else if (h < 0.0 && h > -umin) h = -umin; 845 #else 846 if (PetscAbsScalar(h) < umin && PetscRealPart(h) >= 0.0) h = umin; 847 else if (PetscRealPart(h) < 0.0 && PetscAbsScalar(h) < umin) h = -umin; 848 #endif 849 h *= epsilon; 850 851 ww[gI] += h; 852 ierr = (*ctx->funci)(i,w,&v,ctx->funcctx);CHKERRQ(ierr); 853 aa[nI] = (v - aa[nI])/h; 854 ww[gI] -= h; 855 nI++; 856 } 857 ierr = VecRestoreArray(U,&ww);CHKERRQ(ierr); 858 ierr = VecRestoreArray(a,&aa);CHKERRQ(ierr); 859 PetscFunctionReturn(0); 860 } 861 #endif 862 863 #if defined(PETSC_HAVE_ADIC) 864 EXTERN_C_BEGIN 865 #include "adic/ad_utils.h" 866 EXTERN_C_END 867 868 #undef __FUNCT__ 869 #define __FUNCT__ "DMDAComputeJacobian1WithAdic" 870 /*@C 871 DMDAComputeJacobian1WithAdic - Evaluates a adiC provided Jacobian function on each processor that 872 share a DMDA 873 874 Input Parameters: 875 + da - the DM that defines the grid 876 . vu - input vector (ghosted) 877 . J - output matrix 878 - w - any user data 879 880 Level: advanced 881 882 Notes: Does NOT do ghost updates on vu upon entry 883 884 .seealso: DMDAFormFunction1() 885 886 @*/ 887 PetscErrorCode DMDAComputeJacobian1WithAdic(DM da,Vec vu,Mat J,void *w) 888 { 889 PetscErrorCode ierr; 890 PetscInt gtdof,tdof; 891 PetscScalar *ustart; 892 DMDALocalInfo info; 893 void *ad_u,*ad_f,*ad_ustart,*ad_fstart; 894 ISColoring iscoloring; 895 896 PetscFunctionBegin; 897 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 898 899 PetscADResetIndep(); 900 901 /* get space for derivative objects. */ 902 ierr = DMDAGetAdicArray(da,PETSC_TRUE,&ad_u,&ad_ustart,>dof);CHKERRQ(ierr); 903 ierr = DMDAGetAdicArray(da,PETSC_FALSE,&ad_f,&ad_fstart,&tdof);CHKERRQ(ierr); 904 ierr = VecGetArray(vu,&ustart);CHKERRQ(ierr); 905 ierr = DMGetColoring(da,IS_COLORING_GHOSTED,MATAIJ,&iscoloring);CHKERRQ(ierr); 906 907 PetscADSetValueAndColor(ad_ustart,gtdof,iscoloring->colors,ustart); 908 909 ierr = VecRestoreArray(vu,&ustart);CHKERRQ(ierr); 910 ierr = ISColoringDestroy(iscoloring);CHKERRQ(ierr); 911 ierr = PetscADIncrementTotalGradSize(iscoloring->n);CHKERRQ(ierr); 912 PetscADSetIndepDone(); 913 914 ierr = PetscLogEventBegin(DMDA_LocalADFunction,0,0,0,0);CHKERRQ(ierr); 915 ierr = (*dd->adic_lf)(&info,ad_u,ad_f,w);CHKERRQ(ierr); 916 ierr = PetscLogEventEnd(DMDA_LocalADFunction,0,0,0,0);CHKERRQ(ierr); 917 918 /* stick the values into the matrix */ 919 ierr = MatSetValuesAdic(J,(PetscScalar**)ad_fstart);CHKERRQ(ierr); 920 921 /* return space for derivative objects. */ 922 ierr = DMDARestoreAdicArray(da,PETSC_TRUE,&ad_u,&ad_ustart,>dof);CHKERRQ(ierr); 923 ierr = DMDARestoreAdicArray(da,PETSC_FALSE,&ad_f,&ad_fstart,&tdof);CHKERRQ(ierr); 924 PetscFunctionReturn(0); 925 } 926 927 #undef __FUNCT__ 928 #define __FUNCT__ "DMDAMultiplyByJacobian1WithAdic" 929 /*@C 930 DMDAMultiplyByJacobian1WithAdic - Applies an ADIC-provided Jacobian function to a vector on 931 each processor that shares a DMDA. 932 933 Input Parameters: 934 + da - the DM that defines the grid 935 . vu - Jacobian is computed at this point (ghosted) 936 . v - product is done on this vector (ghosted) 937 . fu - output vector = J(vu)*v (not ghosted) 938 - w - any user data 939 940 Notes: 941 This routine does NOT do ghost updates on vu upon entry. 942 943 Level: advanced 944 945 .seealso: DMDAFormFunction1() 946 947 @*/ 948 PetscErrorCode DMDAMultiplyByJacobian1WithAdic(DM da,Vec vu,Vec v,Vec f,void *w) 949 { 950 PetscErrorCode ierr; 951 PetscInt i,gtdof,tdof; 952 PetscScalar *avu,*av,*af,*ad_vustart,*ad_fstart; 953 DMDALocalInfo info; 954 void *ad_vu,*ad_f; 955 956 PetscFunctionBegin; 957 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 958 959 /* get space for derivative objects. */ 960 ierr = DMDAGetAdicMFArray(da,PETSC_TRUE,&ad_vu,&ad_vustart,>dof);CHKERRQ(ierr); 961 ierr = DMDAGetAdicMFArray(da,PETSC_FALSE,&ad_f,&ad_fstart,&tdof);CHKERRQ(ierr); 962 963 /* copy input vector into derivative object */ 964 ierr = VecGetArray(vu,&avu);CHKERRQ(ierr); 965 ierr = VecGetArray(v,&av);CHKERRQ(ierr); 966 for (i=0; i<gtdof; i++) { 967 ad_vustart[2*i] = avu[i]; 968 ad_vustart[2*i+1] = av[i]; 969 } 970 ierr = VecRestoreArray(vu,&avu);CHKERRQ(ierr); 971 ierr = VecRestoreArray(v,&av);CHKERRQ(ierr); 972 973 PetscADResetIndep(); 974 ierr = PetscADIncrementTotalGradSize(1);CHKERRQ(ierr); 975 PetscADSetIndepDone(); 976 977 ierr = (*dd->adicmf_lf)(&info,ad_vu,ad_f,w);CHKERRQ(ierr); 978 979 /* stick the values into the vector */ 980 ierr = VecGetArray(f,&af);CHKERRQ(ierr); 981 for (i=0; i<tdof; i++) { 982 af[i] = ad_fstart[2*i+1]; 983 } 984 ierr = VecRestoreArray(f,&af);CHKERRQ(ierr); 985 986 /* return space for derivative objects. */ 987 ierr = DMDARestoreAdicMFArray(da,PETSC_TRUE,&ad_vu,&ad_vustart,>dof);CHKERRQ(ierr); 988 ierr = DMDARestoreAdicMFArray(da,PETSC_FALSE,&ad_f,&ad_fstart,&tdof);CHKERRQ(ierr); 989 PetscFunctionReturn(0); 990 } 991 #endif 992 993 #undef __FUNCT__ 994 #define __FUNCT__ "DMDAComputeJacobian1" 995 /*@ 996 DMDAComputeJacobian1 - Evaluates a local Jacobian function on each processor that 997 share a DMDA 998 999 Input Parameters: 1000 + da - the DM that defines the grid 1001 . vu - input vector (ghosted) 1002 . J - output matrix 1003 - w - any user data 1004 1005 Notes: Does NOT do ghost updates on vu upon entry 1006 1007 Level: advanced 1008 1009 .seealso: DMDAFormFunction1() 1010 1011 @*/ 1012 PetscErrorCode DMDAComputeJacobian1(DM da,Vec vu,Mat J,void *w) 1013 { 1014 PetscErrorCode ierr; 1015 void *u; 1016 DMDALocalInfo info; 1017 DM_DA *dd = (DM_DA*)da->data; 1018 1019 PetscFunctionBegin; 1020 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 1021 ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr); 1022 ierr = (*dd->lj)(&info,u,J,w);CHKERRQ(ierr); 1023 ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr); 1024 PetscFunctionReturn(0); 1025 } 1026 1027 1028 #undef __FUNCT__ 1029 #define __FUNCT__ "DMDAComputeJacobian1WithAdifor" 1030 /* 1031 DMDAComputeJacobian1WithAdifor - Evaluates a ADIFOR provided Jacobian local function on each processor that 1032 share a DMDA 1033 1034 Input Parameters: 1035 + da - the DM that defines the grid 1036 . vu - input vector (ghosted) 1037 . J - output matrix 1038 - w - any user data 1039 1040 Notes: Does NOT do ghost updates on vu upon entry 1041 1042 .seealso: DMDAFormFunction1() 1043 1044 */ 1045 PetscErrorCode DMDAComputeJacobian1WithAdifor(DM da,Vec vu,Mat J,void *w) 1046 { 1047 PetscErrorCode ierr; 1048 PetscInt i,Nc,N; 1049 ISColoringValue *color; 1050 DMDALocalInfo info; 1051 PetscScalar *u,*g_u,*g_f,*f = 0,*p_u; 1052 ISColoring iscoloring; 1053 DM_DA *dd = (DM_DA*)da->data; 1054 void (*lf)(PetscInt*,DMDALocalInfo*,PetscScalar*,PetscScalar*,PetscInt*,PetscScalar*,PetscScalar*,PetscInt*,void*,PetscErrorCode*) = 1055 (void (*)(PetscInt*,DMDALocalInfo*,PetscScalar*,PetscScalar*,PetscInt*,PetscScalar*,PetscScalar*,PetscInt*,void*,PetscErrorCode*))*dd->adifor_lf; 1056 1057 PetscFunctionBegin; 1058 ierr = DMGetColoring(da,IS_COLORING_GHOSTED,MATAIJ,&iscoloring);CHKERRQ(ierr); 1059 Nc = iscoloring->n; 1060 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 1061 N = info.gxm*info.gym*info.gzm*info.dof; 1062 1063 /* get space for derivative objects. */ 1064 ierr = PetscMalloc(Nc*info.gxm*info.gym*info.gzm*info.dof*sizeof(PetscScalar),&g_u);CHKERRQ(ierr); 1065 ierr = PetscMemzero(g_u,Nc*info.gxm*info.gym*info.gzm*info.dof*sizeof(PetscScalar));CHKERRQ(ierr); 1066 p_u = g_u; 1067 color = iscoloring->colors; 1068 for (i=0; i<N; i++) { 1069 p_u[*color++] = 1.0; 1070 p_u += Nc; 1071 } 1072 ierr = ISColoringDestroy(iscoloring);CHKERRQ(ierr); 1073 ierr = PetscMalloc2(Nc*info.xm*info.ym*info.zm*info.dof,PetscScalar,&g_f,info.xm*info.ym*info.zm*info.dof,PetscScalar,&f);CHKERRQ(ierr); 1074 1075 /* Seed the input array g_u with coloring information */ 1076 1077 ierr = VecGetArray(vu,&u);CHKERRQ(ierr); 1078 (lf)(&Nc,&info,u,g_u,&Nc,f,g_f,&Nc,w,&ierr);CHKERRQ(ierr); 1079 ierr = VecRestoreArray(vu,&u);CHKERRQ(ierr); 1080 1081 /* stick the values into the matrix */ 1082 /* PetscScalarView(Nc*info.xm*info.ym,g_f,0); */ 1083 ierr = MatSetValuesAdifor(J,Nc,g_f);CHKERRQ(ierr); 1084 1085 /* return space for derivative objects. */ 1086 ierr = PetscFree(g_u);CHKERRQ(ierr); 1087 ierr = PetscFree2(g_f,f);CHKERRQ(ierr); 1088 PetscFunctionReturn(0); 1089 } 1090 1091 #undef __FUNCT__ 1092 #define __FUNCT__ "DMDAFormJacobianLocal" 1093 /*@C 1094 DMDAFormjacobianLocal - This is a universal Jacobian evaluation routine for 1095 a local DM function. 1096 1097 Collective on DMDA 1098 1099 Input Parameters: 1100 + da - the DM context 1101 . func - The local function 1102 . X - input vector 1103 . J - Jacobian matrix 1104 - ctx - A user context 1105 1106 Level: intermediate 1107 1108 .seealso: DMDASetLocalFunction(), DMDASetLocalJacobian(), DMDASetLocalAdicFunction(), DMDASetLocalAdicMFFunction(), 1109 SNESSetFunction(), SNESSetJacobian() 1110 1111 @*/ 1112 PetscErrorCode DMDAFormJacobianLocal(DM da, DMDALocalFunction1 func, Vec X, Mat J, void *ctx) 1113 { 1114 Vec localX; 1115 DMDALocalInfo info; 1116 void *u; 1117 PetscErrorCode ierr; 1118 1119 PetscFunctionBegin; 1120 ierr = DMGetLocalVector(da,&localX);CHKERRQ(ierr); 1121 /* 1122 Scatter ghost points to local vector, using the 2-step process 1123 DMGlobalToLocalBegin(), DMGlobalToLocalEnd(). 1124 */ 1125 ierr = DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);CHKERRQ(ierr); 1126 ierr = DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);CHKERRQ(ierr); 1127 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 1128 ierr = DMDAVecGetArray(da,localX,&u);CHKERRQ(ierr); 1129 ierr = (*func)(&info,u,J,ctx);CHKERRQ(ierr); 1130 ierr = DMDAVecRestoreArray(da,localX,&u);CHKERRQ(ierr); 1131 ierr = DMRestoreLocalVector(da,&localX);CHKERRQ(ierr); 1132 PetscFunctionReturn(0); 1133 } 1134 1135 #undef __FUNCT__ 1136 #define __FUNCT__ "DMDAMultiplyByJacobian1WithAD" 1137 /*@C 1138 DMDAMultiplyByJacobian1WithAD - Applies a Jacobian function supplied by ADIFOR or ADIC 1139 to a vector on each processor that shares a DMDA. 1140 1141 Input Parameters: 1142 + da - the DM that defines the grid 1143 . vu - Jacobian is computed at this point (ghosted) 1144 . v - product is done on this vector (ghosted) 1145 . fu - output vector = J(vu)*v (not ghosted) 1146 - w - any user data 1147 1148 Notes: 1149 This routine does NOT do ghost updates on vu and v upon entry. 1150 1151 Automatically calls DMDAMultiplyByJacobian1WithAdifor() or DMDAMultiplyByJacobian1WithAdic() 1152 depending on whether DMDASetLocalAdicMFFunction() or DMDASetLocalAdiforMFFunction() was called. 1153 1154 Level: advanced 1155 1156 .seealso: DMDAFormFunction1(), DMDAMultiplyByJacobian1WithAdifor(), DMDAMultiplyByJacobian1WithAdic() 1157 1158 @*/ 1159 PetscErrorCode DMDAMultiplyByJacobian1WithAD(DM da,Vec u,Vec v,Vec f,void *w) 1160 { 1161 PetscErrorCode ierr; 1162 DM_DA *dd = (DM_DA*)da->data; 1163 1164 PetscFunctionBegin; 1165 if (dd->adicmf_lf) { 1166 #if defined(PETSC_HAVE_ADIC) 1167 ierr = DMDAMultiplyByJacobian1WithAdic(da,u,v,f,w);CHKERRQ(ierr); 1168 #else 1169 SETERRQ(((PetscObject)da)->comm,PETSC_ERR_SUP_SYS,"Requires ADIC to be installed and cannot use complex numbers"); 1170 #endif 1171 } else if (dd->adiformf_lf) { 1172 ierr = DMDAMultiplyByJacobian1WithAdifor(da,u,v,f,w);CHKERRQ(ierr); 1173 } else { 1174 SETERRQ(((PetscObject)da)->comm,PETSC_ERR_ORDER,"Must call DMDASetLocalAdiforMFFunction() or DMDASetLocalAdicMFFunction() before using"); 1175 } 1176 PetscFunctionReturn(0); 1177 } 1178 1179 1180 #undef __FUNCT__ 1181 #define __FUNCT__ "DMDAMultiplyByJacobian1WithAdifor" 1182 /*@C 1183 DMDAMultiplyByJacobian1WithAdifor - Applies a ADIFOR provided Jacobian function on each processor that 1184 share a DM to a vector 1185 1186 Input Parameters: 1187 + da - the DM that defines the grid 1188 . vu - Jacobian is computed at this point (ghosted) 1189 . v - product is done on this vector (ghosted) 1190 . fu - output vector = J(vu)*v (not ghosted) 1191 - w - any user data 1192 1193 Notes: Does NOT do ghost updates on vu and v upon entry 1194 1195 Level: advanced 1196 1197 .seealso: DMDAFormFunction1() 1198 1199 @*/ 1200 PetscErrorCode DMDAMultiplyByJacobian1WithAdifor(DM da,Vec u,Vec v,Vec f,void *w) 1201 { 1202 PetscErrorCode ierr; 1203 PetscScalar *au,*av,*af,*awork; 1204 Vec work; 1205 DMDALocalInfo info; 1206 DM_DA *dd = (DM_DA*)da->data; 1207 void (*lf)(DMDALocalInfo*,PetscScalar*,PetscScalar*,PetscScalar*,PetscScalar*,void*,PetscErrorCode*) = 1208 (void (*)(DMDALocalInfo*,PetscScalar*,PetscScalar*,PetscScalar*,PetscScalar*,void*,PetscErrorCode*))*dd->adiformf_lf; 1209 1210 PetscFunctionBegin; 1211 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 1212 1213 ierr = DMGetGlobalVector(da,&work);CHKERRQ(ierr); 1214 ierr = VecGetArray(u,&au);CHKERRQ(ierr); 1215 ierr = VecGetArray(v,&av);CHKERRQ(ierr); 1216 ierr = VecGetArray(f,&af);CHKERRQ(ierr); 1217 ierr = VecGetArray(work,&awork);CHKERRQ(ierr); 1218 (lf)(&info,au,av,awork,af,w,&ierr);CHKERRQ(ierr); 1219 ierr = VecRestoreArray(u,&au);CHKERRQ(ierr); 1220 ierr = VecRestoreArray(v,&av);CHKERRQ(ierr); 1221 ierr = VecRestoreArray(f,&af);CHKERRQ(ierr); 1222 ierr = VecRestoreArray(work,&awork);CHKERRQ(ierr); 1223 ierr = DMRestoreGlobalVector(da,&work);CHKERRQ(ierr); 1224 1225 PetscFunctionReturn(0); 1226 } 1227 1228 #undef __FUNCT__ 1229 #define __FUNCT__ "DMSetUp_DA_2D" 1230 PetscErrorCode DMSetUp_DA_2D(DM da) 1231 { 1232 DM_DA *dd = (DM_DA*)da->data; 1233 const PetscInt M = dd->M; 1234 const PetscInt N = dd->N; 1235 PetscInt m = dd->m; 1236 PetscInt n = dd->n; 1237 const PetscInt dof = dd->w; 1238 const PetscInt s = dd->s; 1239 const DMDAPeriodicType wrap = dd->wrap; 1240 const DMDAStencilType stencil_type = dd->stencil_type; 1241 PetscInt *lx = dd->lx; 1242 PetscInt *ly = dd->ly; 1243 MPI_Comm comm; 1244 PetscMPIInt rank,size; 1245 PetscInt xs,xe,ys,ye,x,y,Xs,Xe,Ys,Ye,start,end; 1246 PetscInt up,down,left,i,n0,n1,n2,n3,n5,n6,n7,n8,*idx,nn; 1247 PetscInt xbase,*bases,*ldims,j,x_t,y_t,s_t,base,count; 1248 PetscInt s_x,s_y; /* s proportionalized to w */ 1249 PetscInt sn0 = 0,sn2 = 0,sn6 = 0,sn8 = 0; 1250 Vec local,global; 1251 VecScatter ltog,gtol; 1252 IS to,from; 1253 PetscErrorCode ierr; 1254 1255 PetscFunctionBegin; 1256 ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr); 1257 1258 if (dof < 1) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have 1 or more degrees of freedom per node: %D",dof); 1259 if (s < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Stencil width cannot be negative: %D",s); 1260 1261 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 1262 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 1263 1264 dd->dim = 2; 1265 ierr = PetscMalloc(dof*sizeof(char*),&dd->fieldname);CHKERRQ(ierr); 1266 ierr = PetscMemzero(dd->fieldname,dof*sizeof(char*));CHKERRQ(ierr); 1267 1268 if (m != PETSC_DECIDE) { 1269 if (m < 1) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Non-positive number of processors in X direction: %D",m); 1270 else if (m > size) SETERRQ2(comm,PETSC_ERR_ARG_OUTOFRANGE,"Too many processors in X direction: %D %d",m,size); 1271 } 1272 if (n != PETSC_DECIDE) { 1273 if (n < 1) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Non-positive number of processors in Y direction: %D",n); 1274 else if (n > size) SETERRQ2(comm,PETSC_ERR_ARG_OUTOFRANGE,"Too many processors in Y direction: %D %d",n,size); 1275 } 1276 1277 if (m == PETSC_DECIDE || n == PETSC_DECIDE) { 1278 if (n != PETSC_DECIDE) { 1279 m = size/n; 1280 } else if (m != PETSC_DECIDE) { 1281 n = size/m; 1282 } else { 1283 /* try for squarish distribution */ 1284 m = (PetscInt)(0.5 + sqrt(((double)M)*((double)size)/((double)N))); 1285 if (!m) m = 1; 1286 while (m > 0) { 1287 n = size/m; 1288 if (m*n == size) break; 1289 m--; 1290 } 1291 if (M > N && m < n) {PetscInt _m = m; m = n; n = _m;} 1292 } 1293 if (m*n != size) SETERRQ(comm,PETSC_ERR_PLIB,"Unable to create partition, check the size of the communicator and input m and n "); 1294 } else if (m*n != size) SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"Given Bad partition"); 1295 1296 if (M < m) SETERRQ2(comm,PETSC_ERR_ARG_OUTOFRANGE,"Partition in x direction is too fine! %D %D",M,m); 1297 if (N < n) SETERRQ2(comm,PETSC_ERR_ARG_OUTOFRANGE,"Partition in y direction is too fine! %D %D",N,n); 1298 1299 /* 1300 Determine locally owned region 1301 xs is the first local node number, x is the number of local nodes 1302 */ 1303 if (!lx) { 1304 ierr = PetscMalloc(m*sizeof(PetscInt), &dd->lx);CHKERRQ(ierr); 1305 lx = dd->lx; 1306 for (i=0; i<m; i++) { 1307 lx[i] = M/m + ((M % m) > i); 1308 } 1309 } 1310 x = lx[rank % m]; 1311 xs = 0; 1312 for (i=0; i<(rank % m); i++) { 1313 xs += lx[i]; 1314 } 1315 #if defined(PETSC_USE_DEBUG) 1316 left = xs; 1317 for (i=(rank % m); i<m; i++) { 1318 left += lx[i]; 1319 } 1320 if (left != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of lx across processors not equal to M: %D %D",left,M); 1321 #endif 1322 1323 /* 1324 Determine locally owned region 1325 ys is the first local node number, y is the number of local nodes 1326 */ 1327 if (!ly) { 1328 ierr = PetscMalloc(n*sizeof(PetscInt), &dd->ly);CHKERRQ(ierr); 1329 ly = dd->ly; 1330 for (i=0; i<n; i++) { 1331 ly[i] = N/n + ((N % n) > i); 1332 } 1333 } 1334 y = ly[rank/m]; 1335 ys = 0; 1336 for (i=0; i<(rank/m); i++) { 1337 ys += ly[i]; 1338 } 1339 #if defined(PETSC_USE_DEBUG) 1340 left = ys; 1341 for (i=(rank/m); i<n; i++) { 1342 left += ly[i]; 1343 } 1344 if (left != N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of ly across processors not equal to N: %D %D",left,N); 1345 #endif 1346 1347 if (x < s) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local x-width of domain x %D is smaller than stencil width s %D",x,s); 1348 if (y < s) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local y-width of domain y %D is smaller than stencil width s %D",y,s); 1349 xe = xs + x; 1350 ye = ys + y; 1351 1352 /* determine ghost region */ 1353 /* Assume No Periodicity */ 1354 if (xs-s > 0) Xs = xs - s; else Xs = 0; 1355 if (ys-s > 0) Ys = ys - s; else Ys = 0; 1356 if (xe+s <= M) Xe = xe + s; else Xe = M; 1357 if (ye+s <= N) Ye = ye + s; else Ye = N; 1358 1359 /* X Periodic */ 1360 if (DMDAXPeriodic(wrap)){ 1361 Xs = xs - s; 1362 Xe = xe + s; 1363 } 1364 1365 /* Y Periodic */ 1366 if (DMDAYPeriodic(wrap)){ 1367 Ys = ys - s; 1368 Ye = ye + s; 1369 } 1370 1371 /* Resize all X parameters to reflect w */ 1372 x *= dof; 1373 xs *= dof; 1374 xe *= dof; 1375 Xs *= dof; 1376 Xe *= dof; 1377 s_x = s*dof; 1378 s_y = s; 1379 1380 /* determine starting point of each processor */ 1381 nn = x*y; 1382 ierr = PetscMalloc2(size+1,PetscInt,&bases,size,PetscInt,&ldims);CHKERRQ(ierr); 1383 ierr = MPI_Allgather(&nn,1,MPIU_INT,ldims,1,MPIU_INT,comm);CHKERRQ(ierr); 1384 bases[0] = 0; 1385 for (i=1; i<=size; i++) { 1386 bases[i] = ldims[i-1]; 1387 } 1388 for (i=1; i<=size; i++) { 1389 bases[i] += bases[i-1]; 1390 } 1391 1392 /* allocate the base parallel and sequential vectors */ 1393 dd->Nlocal = x*y; 1394 ierr = VecCreateMPIWithArray(comm,dd->Nlocal,PETSC_DECIDE,0,&global);CHKERRQ(ierr); 1395 ierr = VecSetBlockSize(global,dof);CHKERRQ(ierr); 1396 dd->nlocal = (Xe-Xs)*(Ye-Ys); 1397 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,dd->nlocal,0,&local);CHKERRQ(ierr); 1398 ierr = VecSetBlockSize(local,dof);CHKERRQ(ierr); 1399 1400 1401 /* generate appropriate vector scatters */ 1402 /* local to global inserts non-ghost point region into global */ 1403 ierr = VecGetOwnershipRange(global,&start,&end);CHKERRQ(ierr); 1404 ierr = ISCreateStride(comm,x*y,start,1,&to);CHKERRQ(ierr); 1405 1406 left = xs - Xs; down = ys - Ys; up = down + y; 1407 ierr = PetscMalloc(x*(up - down)*sizeof(PetscInt),&idx);CHKERRQ(ierr); 1408 count = 0; 1409 for (i=down; i<up; i++) { 1410 for (j=0; j<x/dof; j++) { 1411 idx[count++] = (left + i*(Xe-Xs) + j*dof)/dof; 1412 } 1413 } 1414 ierr = ISCreateBlock(comm,dof,count,idx,PETSC_OWN_POINTER,&from);CHKERRQ(ierr); 1415 1416 ierr = VecScatterCreate(local,from,global,to,<og);CHKERRQ(ierr); 1417 ierr = PetscLogObjectParent(dd,ltog);CHKERRQ(ierr); 1418 ierr = ISDestroy(from);CHKERRQ(ierr); 1419 ierr = ISDestroy(to);CHKERRQ(ierr); 1420 1421 /* global to local must include ghost points */ 1422 if (stencil_type == DMDA_STENCIL_BOX) { 1423 ierr = ISCreateStride(comm,(Xe-Xs)*(Ye-Ys),0,1,&to);CHKERRQ(ierr); 1424 } else { 1425 /* must drop into cross shape region */ 1426 /* ---------| 1427 | top | 1428 |--- ---| 1429 | middle | 1430 | | 1431 ---- ---- 1432 | bottom | 1433 ----------- 1434 Xs xs xe Xe */ 1435 /* bottom */ 1436 left = xs - Xs; down = ys - Ys; up = down + y; 1437 count = down*(xe-xs) + (up-down)*(Xe-Xs) + (Ye-Ys-up)*(xe-xs); 1438 ierr = PetscMalloc(count*sizeof(PetscInt)/dof,&idx);CHKERRQ(ierr); 1439 count = 0; 1440 for (i=0; i<down; i++) { 1441 for (j=0; j<xe-xs; j += dof) { 1442 idx[count++] = left + i*(Xe-Xs) + j; 1443 } 1444 } 1445 /* middle */ 1446 for (i=down; i<up; i++) { 1447 for (j=0; j<Xe-Xs; j += dof) { 1448 idx[count++] = i*(Xe-Xs) + j; 1449 } 1450 } 1451 /* top */ 1452 for (i=up; i<Ye-Ys; i++) { 1453 for (j=0; j<xe-xs; j += dof) { 1454 idx[count++] = (left + i*(Xe-Xs) + j); 1455 } 1456 } 1457 for (i=0; i<count; i++) idx[i] = idx[i]/dof; 1458 ierr = ISCreateBlock(comm,dof,count,idx,PETSC_OWN_POINTER,&to);CHKERRQ(ierr); 1459 } 1460 1461 1462 /* determine who lies on each side of us stored in n6 n7 n8 1463 n3 n5 1464 n0 n1 n2 1465 */ 1466 1467 /* Assume the Non-Periodic Case */ 1468 n1 = rank - m; 1469 if (rank % m) { 1470 n0 = n1 - 1; 1471 } else { 1472 n0 = -1; 1473 } 1474 if ((rank+1) % m) { 1475 n2 = n1 + 1; 1476 n5 = rank + 1; 1477 n8 = rank + m + 1; if (n8 >= m*n) n8 = -1; 1478 } else { 1479 n2 = -1; n5 = -1; n8 = -1; 1480 } 1481 if (rank % m) { 1482 n3 = rank - 1; 1483 n6 = n3 + m; if (n6 >= m*n) n6 = -1; 1484 } else { 1485 n3 = -1; n6 = -1; 1486 } 1487 n7 = rank + m; if (n7 >= m*n) n7 = -1; 1488 1489 1490 /* Modify for Periodic Cases */ 1491 if (wrap == DMDA_YPERIODIC) { /* Handle Top and Bottom Sides */ 1492 if (n1 < 0) n1 = rank + m * (n-1); 1493 if (n7 < 0) n7 = rank - m * (n-1); 1494 if ((n3 >= 0) && (n0 < 0)) n0 = size - m + rank - 1; 1495 if ((n3 >= 0) && (n6 < 0)) n6 = (rank%m)-1; 1496 if ((n5 >= 0) && (n2 < 0)) n2 = size - m + rank + 1; 1497 if ((n5 >= 0) && (n8 < 0)) n8 = (rank%m)+1; 1498 } else if (wrap == DMDA_XPERIODIC) { /* Handle Left and Right Sides */ 1499 if (n3 < 0) n3 = rank + (m-1); 1500 if (n5 < 0) n5 = rank - (m-1); 1501 if ((n1 >= 0) && (n0 < 0)) n0 = rank-1; 1502 if ((n1 >= 0) && (n2 < 0)) n2 = rank-2*m+1; 1503 if ((n7 >= 0) && (n6 < 0)) n6 = rank+2*m-1; 1504 if ((n7 >= 0) && (n8 < 0)) n8 = rank+1; 1505 } else if (wrap == DMDA_XYPERIODIC) { 1506 1507 /* Handle all four corners */ 1508 if ((n6 < 0) && (n7 < 0) && (n3 < 0)) n6 = m-1; 1509 if ((n8 < 0) && (n7 < 0) && (n5 < 0)) n8 = 0; 1510 if ((n2 < 0) && (n5 < 0) && (n1 < 0)) n2 = size-m; 1511 if ((n0 < 0) && (n3 < 0) && (n1 < 0)) n0 = size-1; 1512 1513 /* Handle Top and Bottom Sides */ 1514 if (n1 < 0) n1 = rank + m * (n-1); 1515 if (n7 < 0) n7 = rank - m * (n-1); 1516 if ((n3 >= 0) && (n0 < 0)) n0 = size - m + rank - 1; 1517 if ((n3 >= 0) && (n6 < 0)) n6 = (rank%m)-1; 1518 if ((n5 >= 0) && (n2 < 0)) n2 = size - m + rank + 1; 1519 if ((n5 >= 0) && (n8 < 0)) n8 = (rank%m)+1; 1520 1521 /* Handle Left and Right Sides */ 1522 if (n3 < 0) n3 = rank + (m-1); 1523 if (n5 < 0) n5 = rank - (m-1); 1524 if ((n1 >= 0) && (n0 < 0)) n0 = rank-1; 1525 if ((n1 >= 0) && (n2 < 0)) n2 = rank-2*m+1; 1526 if ((n7 >= 0) && (n6 < 0)) n6 = rank+2*m-1; 1527 if ((n7 >= 0) && (n8 < 0)) n8 = rank+1; 1528 } 1529 ierr = PetscMalloc(9*sizeof(PetscInt),&dd->neighbors);CHKERRQ(ierr); 1530 dd->neighbors[0] = n0; 1531 dd->neighbors[1] = n1; 1532 dd->neighbors[2] = n2; 1533 dd->neighbors[3] = n3; 1534 dd->neighbors[4] = rank; 1535 dd->neighbors[5] = n5; 1536 dd->neighbors[6] = n6; 1537 dd->neighbors[7] = n7; 1538 dd->neighbors[8] = n8; 1539 1540 if (stencil_type == DMDA_STENCIL_STAR) { 1541 /* save corner processor numbers */ 1542 sn0 = n0; sn2 = n2; sn6 = n6; sn8 = n8; 1543 n0 = n2 = n6 = n8 = -1; 1544 } 1545 1546 ierr = PetscMalloc((x+2*s_x)*(y+2*s_y)*sizeof(PetscInt),&idx);CHKERRQ(ierr); 1547 ierr = PetscLogObjectMemory(da,(x+2*s_x)*(y+2*s_y)*sizeof(PetscInt));CHKERRQ(ierr); 1548 nn = 0; 1549 1550 xbase = bases[rank]; 1551 for (i=1; i<=s_y; i++) { 1552 if (n0 >= 0) { /* left below */ 1553 x_t = lx[n0 % m]*dof; 1554 y_t = ly[(n0/m)]; 1555 s_t = bases[n0] + x_t*y_t - (s_y-i)*x_t - s_x; 1556 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1557 } 1558 if (n1 >= 0) { /* directly below */ 1559 x_t = x; 1560 y_t = ly[(n1/m)]; 1561 s_t = bases[n1] + x_t*y_t - (s_y+1-i)*x_t; 1562 for (j=0; j<x_t; j++) { idx[nn++] = s_t++;} 1563 } 1564 if (n2 >= 0) { /* right below */ 1565 x_t = lx[n2 % m]*dof; 1566 y_t = ly[(n2/m)]; 1567 s_t = bases[n2] + x_t*y_t - (s_y+1-i)*x_t; 1568 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1569 } 1570 } 1571 1572 for (i=0; i<y; i++) { 1573 if (n3 >= 0) { /* directly left */ 1574 x_t = lx[n3 % m]*dof; 1575 /* y_t = y; */ 1576 s_t = bases[n3] + (i+1)*x_t - s_x; 1577 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1578 } 1579 1580 for (j=0; j<x; j++) { idx[nn++] = xbase++; } /* interior */ 1581 1582 if (n5 >= 0) { /* directly right */ 1583 x_t = lx[n5 % m]*dof; 1584 /* y_t = y; */ 1585 s_t = bases[n5] + (i)*x_t; 1586 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1587 } 1588 } 1589 1590 for (i=1; i<=s_y; i++) { 1591 if (n6 >= 0) { /* left above */ 1592 x_t = lx[n6 % m]*dof; 1593 /* y_t = ly[(n6/m)]; */ 1594 s_t = bases[n6] + (i)*x_t - s_x; 1595 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1596 } 1597 if (n7 >= 0) { /* directly above */ 1598 x_t = x; 1599 /* y_t = ly[(n7/m)]; */ 1600 s_t = bases[n7] + (i-1)*x_t; 1601 for (j=0; j<x_t; j++) { idx[nn++] = s_t++;} 1602 } 1603 if (n8 >= 0) { /* right above */ 1604 x_t = lx[n8 % m]*dof; 1605 /* y_t = ly[(n8/m)]; */ 1606 s_t = bases[n8] + (i-1)*x_t; 1607 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1608 } 1609 } 1610 1611 base = bases[rank]; 1612 { 1613 PetscInt nnn = nn/dof,*iidx; 1614 ierr = PetscMalloc(nnn*sizeof(PetscInt),&iidx);CHKERRQ(ierr); 1615 for (i=0; i<nnn; i++) { 1616 iidx[i] = idx[dof*i]/dof; 1617 } 1618 ierr = ISCreateBlock(comm,dof,nnn,iidx,PETSC_OWN_POINTER,&from);CHKERRQ(ierr); 1619 } 1620 ierr = VecScatterCreate(global,from,local,to,>ol);CHKERRQ(ierr); 1621 ierr = PetscLogObjectParent(da,gtol);CHKERRQ(ierr); 1622 ierr = ISDestroy(to);CHKERRQ(ierr); 1623 ierr = ISDestroy(from);CHKERRQ(ierr); 1624 1625 if (stencil_type == DMDA_STENCIL_STAR) { 1626 /* 1627 Recompute the local to global mappings, this time keeping the 1628 information about the cross corner processor numbers. 1629 */ 1630 n0 = sn0; n2 = sn2; n6 = sn6; n8 = sn8; 1631 nn = 0; 1632 xbase = bases[rank]; 1633 for (i=1; i<=s_y; i++) { 1634 if (n0 >= 0) { /* left below */ 1635 x_t = lx[n0 % m]*dof; 1636 y_t = ly[(n0/m)]; 1637 s_t = bases[n0] + x_t*y_t - (s_y-i)*x_t - s_x; 1638 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1639 } 1640 if (n1 >= 0) { /* directly below */ 1641 x_t = x; 1642 y_t = ly[(n1/m)]; 1643 s_t = bases[n1] + x_t*y_t - (s_y+1-i)*x_t; 1644 for (j=0; j<x_t; j++) { idx[nn++] = s_t++;} 1645 } 1646 if (n2 >= 0) { /* right below */ 1647 x_t = lx[n2 % m]*dof; 1648 y_t = ly[(n2/m)]; 1649 s_t = bases[n2] + x_t*y_t - (s_y+1-i)*x_t; 1650 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1651 } 1652 } 1653 1654 for (i=0; i<y; i++) { 1655 if (n3 >= 0) { /* directly left */ 1656 x_t = lx[n3 % m]*dof; 1657 /* y_t = y; */ 1658 s_t = bases[n3] + (i+1)*x_t - s_x; 1659 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1660 } 1661 1662 for (j=0; j<x; j++) { idx[nn++] = xbase++; } /* interior */ 1663 1664 if (n5 >= 0) { /* directly right */ 1665 x_t = lx[n5 % m]*dof; 1666 /* y_t = y; */ 1667 s_t = bases[n5] + (i)*x_t; 1668 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1669 } 1670 } 1671 1672 for (i=1; i<=s_y; i++) { 1673 if (n6 >= 0) { /* left above */ 1674 x_t = lx[n6 % m]*dof; 1675 /* y_t = ly[(n6/m)]; */ 1676 s_t = bases[n6] + (i)*x_t - s_x; 1677 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1678 } 1679 if (n7 >= 0) { /* directly above */ 1680 x_t = x; 1681 /* y_t = ly[(n7/m)]; */ 1682 s_t = bases[n7] + (i-1)*x_t; 1683 for (j=0; j<x_t; j++) { idx[nn++] = s_t++;} 1684 } 1685 if (n8 >= 0) { /* right above */ 1686 x_t = lx[n8 % m]*dof; 1687 /* y_t = ly[(n8/m)]; */ 1688 s_t = bases[n8] + (i-1)*x_t; 1689 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1690 } 1691 } 1692 } 1693 ierr = PetscFree2(bases,ldims);CHKERRQ(ierr); 1694 1695 dd->m = m; dd->n = n; 1696 dd->xs = xs; dd->xe = xe; dd->ys = ys; dd->ye = ye; dd->zs = 0; dd->ze = 1; 1697 dd->Xs = Xs; dd->Xe = Xe; dd->Ys = Ys; dd->Ye = Ye; dd->Zs = 0; dd->Ze = 1; 1698 1699 ierr = VecDestroy(local);CHKERRQ(ierr); 1700 ierr = VecDestroy(global);CHKERRQ(ierr); 1701 1702 dd->gtol = gtol; 1703 dd->ltog = ltog; 1704 dd->idx = idx; 1705 dd->Nl = nn; 1706 dd->base = base; 1707 da->ops->view = DMView_DA_2d; 1708 1709 /* 1710 Set the local to global ordering in the global vector, this allows use 1711 of VecSetValuesLocal(). 1712 */ 1713 ierr = ISLocalToGlobalMappingCreate(comm,nn,idx,PETSC_OWN_POINTER,&da->ltogmap);CHKERRQ(ierr); 1714 ierr = ISLocalToGlobalMappingBlock(da->ltogmap,dd->w,&da->ltogmapb);CHKERRQ(ierr); 1715 ierr = PetscLogObjectParent(da,da->ltogmap);CHKERRQ(ierr); 1716 1717 dd->ltol = PETSC_NULL; 1718 dd->ao = PETSC_NULL; 1719 1720 PetscFunctionReturn(0); 1721 } 1722 1723 #undef __FUNCT__ 1724 #define __FUNCT__ "DMDACreate2d" 1725 /*@C 1726 DMDACreate2d - Creates an object that will manage the communication of two-dimensional 1727 regular array data that is distributed across some processors. 1728 1729 Collective on MPI_Comm 1730 1731 Input Parameters: 1732 + comm - MPI communicator 1733 . wrap - type of periodicity should the array have. 1734 Use one of DMDA_NONPERIODIC, DMDA_XPERIODIC, DMDA_YPERIODIC, or DMDA_XYPERIODIC. 1735 . stencil_type - stencil type. Use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR. 1736 . M,N - global dimension in each direction of the array (use -M and or -N to indicate that it may be set to a different value 1737 from the command line with -da_grid_x <M> -da_grid_y <N>) 1738 . m,n - corresponding number of processors in each dimension 1739 (or PETSC_DECIDE to have calculated) 1740 . dof - number of degrees of freedom per node 1741 . s - stencil width 1742 - lx, ly - arrays containing the number of nodes in each cell along 1743 the x and y coordinates, or PETSC_NULL. If non-null, these 1744 must be of length as m and n, and the corresponding 1745 m and n cannot be PETSC_DECIDE. The sum of the lx[] entries 1746 must be M, and the sum of the ly[] entries must be N. 1747 1748 Output Parameter: 1749 . da - the resulting distributed array object 1750 1751 Options Database Key: 1752 + -da_view - Calls DMView() at the conclusion of DMDACreate2d() 1753 . -da_grid_x <nx> - number of grid points in x direction, if M < 0 1754 . -da_grid_y <ny> - number of grid points in y direction, if N < 0 1755 . -da_processors_x <nx> - number of processors in x direction 1756 . -da_processors_y <ny> - number of processors in y direction 1757 . -da_refine_x - refinement ratio in x direction 1758 - -da_refine_y - refinement ratio in y direction 1759 1760 Level: beginner 1761 1762 Notes: 1763 The stencil type DMDA_STENCIL_STAR with width 1 corresponds to the 1764 standard 5-pt stencil, while DMDA_STENCIL_BOX with width 1 denotes 1765 the standard 9-pt stencil. 1766 1767 The array data itself is NOT stored in the DMDA, it is stored in Vec objects; 1768 The appropriate vector objects can be obtained with calls to DMCreateGlobalVector() 1769 and DMCreateLocalVector() and calls to VecDuplicate() if more are needed. 1770 1771 .keywords: distributed array, create, two-dimensional 1772 1773 .seealso: DMDestroy(), DMView(), DMDACreate1d(), DMDACreate3d(), DMGlobalToLocalBegin(), DMDAGetRefinementFactor(), 1774 DMGlobalToLocalEnd(), DMLocalToGlobalBegin(), DMDALocalToLocalBegin(), DMDALocalToLocalEnd(), DMDASetRefinementFactor(), 1775 DMDAGetInfo(), DMCreateGlobalVector(), DMCreateLocalVector(), DMDACreateNaturalVector(), DMDALoad(), DMDAGetOwnershipRanges() 1776 1777 @*/ 1778 PetscErrorCode DMDACreate2d(MPI_Comm comm,DMDAPeriodicType wrap,DMDAStencilType stencil_type, 1779 PetscInt M,PetscInt N,PetscInt m,PetscInt n,PetscInt dof,PetscInt s,const PetscInt lx[],const PetscInt ly[],DM *da) 1780 { 1781 PetscErrorCode ierr; 1782 1783 PetscFunctionBegin; 1784 ierr = DMDACreate(comm, da);CHKERRQ(ierr); 1785 ierr = DMDASetDim(*da, 2);CHKERRQ(ierr); 1786 ierr = DMDASetSizes(*da, M, N, 1);CHKERRQ(ierr); 1787 ierr = DMDASetNumProcs(*da, m, n, PETSC_DECIDE);CHKERRQ(ierr); 1788 ierr = DMDASetPeriodicity(*da, wrap);CHKERRQ(ierr); 1789 ierr = DMDASetDof(*da, dof);CHKERRQ(ierr); 1790 ierr = DMDASetStencilType(*da, stencil_type);CHKERRQ(ierr); 1791 ierr = DMDASetStencilWidth(*da, s);CHKERRQ(ierr); 1792 ierr = DMDASetOwnershipRanges(*da, lx, ly, PETSC_NULL);CHKERRQ(ierr); 1793 /* This violates the behavior for other classes, but right now users expect negative dimensions to be handled this way */ 1794 ierr = DMSetFromOptions(*da);CHKERRQ(ierr); 1795 ierr = DMSetUp(*da);CHKERRQ(ierr); 1796 ierr = DMView_DA_Private(*da);CHKERRQ(ierr); 1797 PetscFunctionReturn(0); 1798 } 1799