1 2 #define PETSCDM_DLL 3 4 #include "private/daimpl.h" /*I "petscdm.h" I*/ 5 6 #undef __FUNCT__ 7 #define __FUNCT__ "DMView_DA_2d" 8 PetscErrorCode DMView_DA_2d(DM da,PetscViewer viewer) 9 { 10 PetscErrorCode ierr; 11 PetscMPIInt rank; 12 PetscBool iascii,isdraw,isbinary; 13 DM_DA *dd = (DM_DA*)da->data; 14 #if defined(PETSC_HAVE_MATLAB_ENGINE) 15 PetscBool ismatlab; 16 #endif 17 18 PetscFunctionBegin; 19 ierr = MPI_Comm_rank(((PetscObject)da)->comm,&rank);CHKERRQ(ierr); 20 21 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 22 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 23 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 24 #if defined(PETSC_HAVE_MATLAB_ENGINE) 25 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);CHKERRQ(ierr); 26 #endif 27 if (iascii) { 28 PetscViewerFormat format; 29 30 ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr); 31 if (format != PETSC_VIEWER_ASCII_VTK && format != PETSC_VIEWER_ASCII_VTK_CELL) { 32 DMDALocalInfo info; 33 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 34 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D N %D m %D n %D w %D s %D\n",rank,dd->M,dd->N,dd->m,dd->n,dd->w,dd->s);CHKERRQ(ierr); 35 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D, Y range of indices: %D %D\n",info.xs,info.xs+info.xm,info.ys,info.ys+info.ym);CHKERRQ(ierr); 36 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 37 } else { 38 ierr = DMView_DA_VTK(da,viewer);CHKERRQ(ierr); 39 } 40 } else if (isdraw) { 41 PetscDraw draw; 42 double ymin = -1*dd->s-1,ymax = dd->N+dd->s; 43 double xmin = -1*dd->s-1,xmax = dd->M+dd->s; 44 double x,y; 45 PetscInt base,*idx; 46 char node[10]; 47 PetscBool isnull; 48 49 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 50 ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0); 51 if (!dd->coordinates) { 52 ierr = PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);CHKERRQ(ierr); 53 } 54 ierr = PetscDrawSynchronizedClear(draw);CHKERRQ(ierr); 55 56 /* first processor draw all node lines */ 57 if (!rank) { 58 ymin = 0.0; ymax = dd->N - 1; 59 for (xmin=0; xmin<dd->M; xmin++) { 60 ierr = PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_BLACK);CHKERRQ(ierr); 61 } 62 xmin = 0.0; xmax = dd->M - 1; 63 for (ymin=0; ymin<dd->N; ymin++) { 64 ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);CHKERRQ(ierr); 65 } 66 } 67 ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr); 68 ierr = PetscDrawPause(draw);CHKERRQ(ierr); 69 70 /* draw my box */ 71 ymin = dd->ys; ymax = dd->ye - 1; xmin = dd->xs/dd->w; 72 xmax =(dd->xe-1)/dd->w; 73 ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);CHKERRQ(ierr); 74 ierr = PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);CHKERRQ(ierr); 75 ierr = PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr); 76 ierr = PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr); 77 78 /* put in numbers */ 79 base = (dd->base)/dd->w; 80 for (y=ymin; y<=ymax; y++) { 81 for (x=xmin; x<=xmax; x++) { 82 sprintf(node,"%d",(int)base++); 83 ierr = PetscDrawString(draw,x,y,PETSC_DRAW_BLACK,node);CHKERRQ(ierr); 84 } 85 } 86 87 ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr); 88 ierr = PetscDrawPause(draw);CHKERRQ(ierr); 89 /* overlay ghost numbers, useful for error checking */ 90 /* put in numbers */ 91 92 base = 0; idx = dd->idx; 93 ymin = dd->Ys; ymax = dd->Ye; xmin = dd->Xs; xmax = dd->Xe; 94 for (y=ymin; y<ymax; y++) { 95 for (x=xmin; x<xmax; x++) { 96 if ((base % dd->w) == 0) { 97 sprintf(node,"%d",(int)(idx[base]/dd->w)); 98 ierr = PetscDrawString(draw,x/dd->w,y,PETSC_DRAW_BLUE,node);CHKERRQ(ierr); 99 } 100 base++; 101 } 102 } 103 ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr); 104 ierr = PetscDrawPause(draw);CHKERRQ(ierr); 105 } else if (isbinary){ 106 ierr = DMView_DA_Binary(da,viewer);CHKERRQ(ierr); 107 #if defined(PETSC_HAVE_MATLAB_ENGINE) 108 } else if (ismatlab) { 109 ierr = DMView_DA_Matlab(da,viewer);CHKERRQ(ierr); 110 #endif 111 } else SETERRQ1(((PetscObject)da)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for DMDA 1d",((PetscObject)viewer)->type_name); 112 PetscFunctionReturn(0); 113 } 114 115 /* 116 M is number of grid points 117 m is number of processors 118 119 */ 120 #undef __FUNCT__ 121 #define __FUNCT__ "DMDASplitComm2d" 122 PetscErrorCode DMDASplitComm2d(MPI_Comm comm,PetscInt M,PetscInt N,PetscInt sw,MPI_Comm *outcomm) 123 { 124 PetscErrorCode ierr; 125 PetscInt m,n = 0,x = 0,y = 0; 126 PetscMPIInt size,csize,rank; 127 128 PetscFunctionBegin; 129 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 130 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 131 132 csize = 4*size; 133 do { 134 if (csize % 4) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Cannot split communicator of size %d tried %d %D %D",size,csize,x,y); 135 csize = csize/4; 136 137 m = (PetscInt)(0.5 + sqrt(((double)M)*((double)csize)/((double)N))); 138 if (!m) m = 1; 139 while (m > 0) { 140 n = csize/m; 141 if (m*n == csize) break; 142 m--; 143 } 144 if (M > N && m < n) {PetscInt _m = m; m = n; n = _m;} 145 146 x = M/m + ((M % m) > ((csize-1) % m)); 147 y = (N + (csize-1)/m)/n; 148 } while ((x < 4 || y < 4) && csize > 1); 149 if (size != csize) { 150 MPI_Group entire_group,sub_group; 151 PetscMPIInt i,*groupies; 152 153 ierr = MPI_Comm_group(comm,&entire_group);CHKERRQ(ierr); 154 ierr = PetscMalloc(csize*sizeof(PetscInt),&groupies);CHKERRQ(ierr); 155 for (i=0; i<csize; i++) { 156 groupies[i] = (rank/csize)*csize + i; 157 } 158 ierr = MPI_Group_incl(entire_group,csize,groupies,&sub_group);CHKERRQ(ierr); 159 ierr = PetscFree(groupies);CHKERRQ(ierr); 160 ierr = MPI_Comm_create(comm,sub_group,outcomm);CHKERRQ(ierr); 161 ierr = MPI_Group_free(&entire_group);CHKERRQ(ierr); 162 ierr = MPI_Group_free(&sub_group);CHKERRQ(ierr); 163 ierr = PetscInfo1(0,"DMDASplitComm2d:Creating redundant coarse problems of size %d\n",csize);CHKERRQ(ierr); 164 } else { 165 *outcomm = comm; 166 } 167 PetscFunctionReturn(0); 168 } 169 170 #undef __FUNCT__ 171 #define __FUNCT__ "DMDASetLocalFunction" 172 /*@C 173 DMDASetLocalFunction - Caches in a DM a local function. 174 175 Logically Collective on DMDA 176 177 Input Parameter: 178 + da - initial distributed array 179 - lf - the local function 180 181 Level: intermediate 182 183 Notes: The routine SNESDAFormFunction() uses this the cached function to evaluate the user provided function. 184 185 .keywords: distributed array, refine 186 187 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunctioni() 188 @*/ 189 PetscErrorCode DMDASetLocalFunction(DM da,DMDALocalFunction1 lf) 190 { 191 DM_DA *dd = (DM_DA*)da->data; 192 PetscFunctionBegin; 193 PetscValidHeaderSpecific(da,DM_CLASSID,1); 194 dd->lf = lf; 195 PetscFunctionReturn(0); 196 } 197 198 #undef __FUNCT__ 199 #define __FUNCT__ "DMDASetLocalFunctioni" 200 /*@C 201 DMDASetLocalFunctioni - Caches in a DM a local function that evaluates a single component 202 203 Logically Collective on DMDA 204 205 Input Parameter: 206 + da - initial distributed array 207 - lfi - the local function 208 209 Level: intermediate 210 211 .keywords: distributed array, refine 212 213 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction() 214 @*/ 215 PetscErrorCode DMDASetLocalFunctioni(DM da,PetscErrorCode (*lfi)(DMDALocalInfo*,MatStencil*,void*,PetscScalar*,void*)) 216 { 217 DM_DA *dd = (DM_DA*)da->data; 218 PetscFunctionBegin; 219 PetscValidHeaderSpecific(da,DM_CLASSID,1); 220 dd->lfi = lfi; 221 PetscFunctionReturn(0); 222 } 223 224 #undef __FUNCT__ 225 #define __FUNCT__ "DMDASetLocalFunctionib" 226 /*@C 227 DMDASetLocalFunctionib - Caches in a DM a block local function that evaluates a single component 228 229 Logically Collective on DMDA 230 231 Input Parameter: 232 + da - initial distributed array 233 - lfi - the local function 234 235 Level: intermediate 236 237 .keywords: distributed array, refine 238 239 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction() 240 @*/ 241 PetscErrorCode DMDASetLocalFunctionib(DM da,PetscErrorCode (*lfi)(DMDALocalInfo*,MatStencil*,void*,PetscScalar*,void*)) 242 { 243 DM_DA *dd = (DM_DA*)da->data; 244 PetscFunctionBegin; 245 PetscValidHeaderSpecific(da,DM_CLASSID,1); 246 dd->lfib = lfi; 247 PetscFunctionReturn(0); 248 } 249 250 #undef __FUNCT__ 251 #define __FUNCT__ "DMDASetLocalAdicFunction_Private" 252 PetscErrorCode DMDASetLocalAdicFunction_Private(DM da,DMDALocalFunction1 ad_lf) 253 { 254 DM_DA *dd = (DM_DA*)da->data; 255 PetscFunctionBegin; 256 PetscValidHeaderSpecific(da,DM_CLASSID,1); 257 dd->adic_lf = ad_lf; 258 PetscFunctionReturn(0); 259 } 260 261 /*MC 262 DMDASetLocalAdicFunctioni - Caches in a DM a local functioni computed by ADIC/ADIFOR 263 264 Synopsis: 265 PetscErrorCode DMDASetLocalAdicFunctioni(DM da,PetscInt (ad_lf*)(DMDALocalInfo*,MatStencil*,void*,void*,void*) 266 267 Logically Collective on DMDA 268 269 Input Parameter: 270 + da - initial distributed array 271 - ad_lfi - the local function as computed by ADIC/ADIFOR 272 273 Level: intermediate 274 275 .keywords: distributed array, refine 276 277 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(), 278 DMDASetLocalJacobian(), DMDASetLocalFunctioni() 279 M*/ 280 281 #undef __FUNCT__ 282 #define __FUNCT__ "DMDASetLocalAdicFunctioni_Private" 283 PetscErrorCode DMDASetLocalAdicFunctioni_Private(DM da,PetscErrorCode (*ad_lfi)(DMDALocalInfo*,MatStencil*,void*,void*,void*)) 284 { 285 DM_DA *dd = (DM_DA*)da->data; 286 PetscFunctionBegin; 287 PetscValidHeaderSpecific(da,DM_CLASSID,1); 288 dd->adic_lfi = ad_lfi; 289 PetscFunctionReturn(0); 290 } 291 292 /*MC 293 DMDASetLocalAdicMFFunctioni - Caches in a DM a local functioni computed by ADIC/ADIFOR 294 295 Synopsis: 296 PetscErrorCode DMDASetLocalAdicFunctioni(DM da,int (ad_lf*)(DMDALocalInfo*,MatStencil*,void*,void*,void*) 297 298 Logically Collective on DMDA 299 300 Input Parameter: 301 + da - initial distributed array 302 - admf_lfi - the local matrix-free function as computed by ADIC/ADIFOR 303 304 Level: intermediate 305 306 .keywords: distributed array, refine 307 308 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(), 309 DMDASetLocalJacobian(), DMDASetLocalFunctioni() 310 M*/ 311 312 #undef __FUNCT__ 313 #define __FUNCT__ "DMDASetLocalAdicMFFunctioni_Private" 314 PetscErrorCode DMDASetLocalAdicMFFunctioni_Private(DM da,PetscErrorCode (*admf_lfi)(DMDALocalInfo*,MatStencil*,void*,void*,void*)) 315 { 316 DM_DA *dd = (DM_DA*)da->data; 317 PetscFunctionBegin; 318 PetscValidHeaderSpecific(da,DM_CLASSID,1); 319 dd->adicmf_lfi = admf_lfi; 320 PetscFunctionReturn(0); 321 } 322 323 /*MC 324 DMDASetLocalAdicFunctionib - Caches in a DM a block local functioni computed by ADIC/ADIFOR 325 326 Synopsis: 327 PetscErrorCode DMDASetLocalAdicFunctionib(DM da,PetscInt (ad_lf*)(DMDALocalInfo*,MatStencil*,void*,void*,void*) 328 329 Logically Collective on DMDA 330 331 Input Parameter: 332 + da - initial distributed array 333 - ad_lfi - the local function as computed by ADIC/ADIFOR 334 335 Level: intermediate 336 337 .keywords: distributed array, refine 338 339 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(), 340 DMDASetLocalJacobian(), DMDASetLocalFunctionib() 341 M*/ 342 343 #undef __FUNCT__ 344 #define __FUNCT__ "DMDASetLocalAdicFunctionib_Private" 345 PetscErrorCode DMDASetLocalAdicFunctionib_Private(DM da,PetscErrorCode (*ad_lfi)(DMDALocalInfo*,MatStencil*,void*,void*,void*)) 346 { 347 DM_DA *dd = (DM_DA*)da->data; 348 PetscFunctionBegin; 349 PetscValidHeaderSpecific(da,DM_CLASSID,1); 350 dd->adic_lfib = ad_lfi; 351 PetscFunctionReturn(0); 352 } 353 354 /*MC 355 DMDASetLocalAdicMFFunctionib - Caches in a DM a block local functioni computed by ADIC/ADIFOR 356 357 Synopsis: 358 PetscErrorCode DMDASetLocalAdicFunctionib(DM da,int (ad_lf*)(DMDALocalInfo*,MatStencil*,void*,void*,void*) 359 360 Logically Collective on DMDA 361 362 Input Parameter: 363 + da - initial distributed array 364 - admf_lfi - the local matrix-free function as computed by ADIC/ADIFOR 365 366 Level: intermediate 367 368 .keywords: distributed array, refine 369 370 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(), 371 DMDASetLocalJacobian(), DMDASetLocalFunctionib() 372 M*/ 373 374 #undef __FUNCT__ 375 #define __FUNCT__ "DMDASetLocalAdicMFFunctionib_Private" 376 PetscErrorCode DMDASetLocalAdicMFFunctionib_Private(DM da,PetscErrorCode (*admf_lfi)(DMDALocalInfo*,MatStencil*,void*,void*,void*)) 377 { 378 DM_DA *dd = (DM_DA*)da->data; 379 PetscFunctionBegin; 380 PetscValidHeaderSpecific(da,DM_CLASSID,1); 381 dd->adicmf_lfib = admf_lfi; 382 PetscFunctionReturn(0); 383 } 384 385 /*MC 386 DMDASetLocalAdicMFFunction - Caches in a DM a local function computed by ADIC/ADIFOR 387 388 Synopsis: 389 PetscErrorCode DMDASetLocalAdicMFFunction(DM da,DMDALocalFunction1 ad_lf) 390 391 Logically Collective on DMDA 392 393 Input Parameter: 394 + da - initial distributed array 395 - ad_lf - the local function as computed by ADIC/ADIFOR 396 397 Level: intermediate 398 399 .keywords: distributed array, refine 400 401 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(), 402 DMDASetLocalJacobian() 403 M*/ 404 405 #undef __FUNCT__ 406 #define __FUNCT__ "DMDASetLocalAdicMFFunction_Private" 407 PetscErrorCode DMDASetLocalAdicMFFunction_Private(DM da,DMDALocalFunction1 ad_lf) 408 { 409 DM_DA *dd = (DM_DA*)da->data; 410 PetscFunctionBegin; 411 PetscValidHeaderSpecific(da,DM_CLASSID,1); 412 dd->adicmf_lf = ad_lf; 413 PetscFunctionReturn(0); 414 } 415 416 /*@C 417 DMDASetLocalJacobian - Caches in a DM a local Jacobian computation function 418 419 Logically Collective on DMDA 420 421 422 Input Parameter: 423 + da - initial distributed array 424 - lj - the local Jacobian 425 426 Level: intermediate 427 428 Notes: The routine SNESDAFormFunction() uses this the cached function to evaluate the user provided function. 429 430 .keywords: distributed array, refine 431 432 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction() 433 @*/ 434 #undef __FUNCT__ 435 #define __FUNCT__ "DMDASetLocalJacobian" 436 PetscErrorCode DMDASetLocalJacobian(DM da,DMDALocalFunction1 lj) 437 { 438 DM_DA *dd = (DM_DA*)da->data; 439 PetscFunctionBegin; 440 PetscValidHeaderSpecific(da,DM_CLASSID,1); 441 dd->lj = lj; 442 PetscFunctionReturn(0); 443 } 444 445 #undef __FUNCT__ 446 #define __FUNCT__ "DMDAGetLocalFunction" 447 /*@C 448 DMDAGetLocalFunction - Gets from a DM a local function and its ADIC/ADIFOR Jacobian 449 450 Note Collective 451 452 Input Parameter: 453 . da - initial distributed array 454 455 Output Parameter: 456 . lf - the local function 457 458 Level: intermediate 459 460 .keywords: distributed array, refine 461 462 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalJacobian(), DMDASetLocalFunction() 463 @*/ 464 PetscErrorCode DMDAGetLocalFunction(DM da,DMDALocalFunction1 *lf) 465 { 466 DM_DA *dd = (DM_DA*)da->data; 467 PetscFunctionBegin; 468 PetscValidHeaderSpecific(da,DM_CLASSID,1); 469 if (lf) *lf = dd->lf; 470 PetscFunctionReturn(0); 471 } 472 473 #undef __FUNCT__ 474 #define __FUNCT__ "DMDAGetLocalJacobian" 475 /*@C 476 DMDAGetLocalJacobian - Gets from a DM a local jacobian 477 478 Not Collective 479 480 Input Parameter: 481 . da - initial distributed array 482 483 Output Parameter: 484 . lj - the local jacobian 485 486 Level: intermediate 487 488 .keywords: distributed array, refine 489 490 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalJacobian() 491 @*/ 492 PetscErrorCode DMDAGetLocalJacobian(DM da,DMDALocalFunction1 *lj) 493 { 494 DM_DA *dd = (DM_DA*)da->data; 495 PetscFunctionBegin; 496 PetscValidHeaderSpecific(da,DM_CLASSID,1); 497 if (lj) *lj = dd->lj; 498 PetscFunctionReturn(0); 499 } 500 501 #undef __FUNCT__ 502 #define __FUNCT__ "DMDAFormFunction" 503 /*@ 504 DMDAFormFunction - Evaluates a user provided function on each processor that 505 share a DMDA 506 507 Input Parameters: 508 + da - the DM that defines the grid 509 . vu - input vector 510 . vfu - output vector 511 - w - any user data 512 513 Notes: Does NOT do ghost updates on vu upon entry 514 515 This should eventually replace DMDAFormFunction1 516 517 Level: advanced 518 519 .seealso: DMDAComputeJacobian1WithAdic() 520 521 @*/ 522 PetscErrorCode DMDAFormFunction(DM da,PetscErrorCode (*lf)(void),Vec vu,Vec vfu,void *w) 523 { 524 PetscErrorCode ierr; 525 void *u,*fu; 526 DMDALocalInfo info; 527 PetscErrorCode (*f)(DMDALocalInfo*,void*,void*,void*) = (PetscErrorCode (*)(DMDALocalInfo*,void*,void*,void*))lf; 528 529 PetscFunctionBegin; 530 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 531 ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr); 532 ierr = DMDAVecGetArray(da,vfu,&fu);CHKERRQ(ierr); 533 534 ierr = (*f)(&info,u,fu,w);CHKERRQ(ierr); 535 536 ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr); 537 ierr = DMDAVecRestoreArray(da,vfu,&fu);CHKERRQ(ierr); 538 PetscFunctionReturn(0); 539 } 540 541 #undef __FUNCT__ 542 #define __FUNCT__ "DMDAFormFunctionLocal" 543 /*@C 544 DMDAFormFunctionLocal - This is a universal function evaluation routine for 545 a local DM function. 546 547 Collective on DMDA 548 549 Input Parameters: 550 + da - the DM context 551 . func - The local function 552 . X - input vector 553 . F - function vector 554 - ctx - A user context 555 556 Level: intermediate 557 558 .seealso: DMDASetLocalFunction(), DMDASetLocalJacobian(), DMDASetLocalAdicFunction(), DMDASetLocalAdicMFFunction(), 559 SNESSetFunction(), SNESSetJacobian() 560 561 @*/ 562 PetscErrorCode DMDAFormFunctionLocal(DM da, DMDALocalFunction1 func, Vec X, Vec F, void *ctx) 563 { 564 Vec localX; 565 DMDALocalInfo info; 566 void *u; 567 void *fu; 568 PetscErrorCode ierr; 569 570 PetscFunctionBegin; 571 ierr = DMGetLocalVector(da,&localX);CHKERRQ(ierr); 572 /* 573 Scatter ghost points to local vector, using the 2-step process 574 DMGlobalToLocalBegin(), DMGlobalToLocalEnd(). 575 */ 576 ierr = DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);CHKERRQ(ierr); 577 ierr = DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);CHKERRQ(ierr); 578 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 579 ierr = DMDAVecGetArray(da,localX,&u);CHKERRQ(ierr); 580 ierr = DMDAVecGetArray(da,F,&fu);CHKERRQ(ierr); 581 ierr = (*func)(&info,u,fu,ctx);CHKERRQ(ierr); 582 ierr = DMDAVecRestoreArray(da,localX,&u);CHKERRQ(ierr); 583 ierr = DMDAVecRestoreArray(da,F,&fu);CHKERRQ(ierr); 584 ierr = DMRestoreLocalVector(da,&localX);CHKERRQ(ierr); 585 PetscFunctionReturn(0); 586 } 587 588 #undef __FUNCT__ 589 #define __FUNCT__ "DMDAFormFunctionLocalGhost" 590 /*@C 591 DMDAFormFunctionLocalGhost - This is a universal function evaluation routine for 592 a local DM function, but the ghost values of the output are communicated and added. 593 594 Collective on DMDA 595 596 Input Parameters: 597 + da - the DM context 598 . func - The local function 599 . X - input vector 600 . F - function vector 601 - ctx - A user context 602 603 Level: intermediate 604 605 .seealso: DMDASetLocalFunction(), DMDASetLocalJacobian(), DMDASetLocalAdicFunction(), DMDASetLocalAdicMFFunction(), 606 SNESSetFunction(), SNESSetJacobian() 607 608 @*/ 609 PetscErrorCode DMDAFormFunctionLocalGhost(DM da, DMDALocalFunction1 func, Vec X, Vec F, void *ctx) 610 { 611 Vec localX, localF; 612 DMDALocalInfo info; 613 void *u; 614 void *fu; 615 PetscErrorCode ierr; 616 617 PetscFunctionBegin; 618 ierr = DMGetLocalVector(da,&localX);CHKERRQ(ierr); 619 ierr = DMGetLocalVector(da,&localF);CHKERRQ(ierr); 620 /* 621 Scatter ghost points to local vector, using the 2-step process 622 DMGlobalToLocalBegin(), DMGlobalToLocalEnd(). 623 */ 624 ierr = DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);CHKERRQ(ierr); 625 ierr = DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);CHKERRQ(ierr); 626 ierr = VecSet(F, 0.0);CHKERRQ(ierr); 627 ierr = VecSet(localF, 0.0);CHKERRQ(ierr); 628 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 629 ierr = DMDAVecGetArray(da,localX,&u);CHKERRQ(ierr); 630 ierr = DMDAVecGetArray(da,localF,&fu);CHKERRQ(ierr); 631 ierr = (*func)(&info,u,fu,ctx);CHKERRQ(ierr); 632 ierr = DMLocalToGlobalBegin(da,localF,ADD_VALUES,F);CHKERRQ(ierr); 633 ierr = DMLocalToGlobalEnd(da,localF,ADD_VALUES,F);CHKERRQ(ierr); 634 ierr = DMDAVecRestoreArray(da,localX,&u);CHKERRQ(ierr); 635 ierr = DMDAVecRestoreArray(da,localF,&fu);CHKERRQ(ierr); 636 ierr = DMRestoreLocalVector(da,&localX);CHKERRQ(ierr); 637 ierr = DMRestoreLocalVector(da,&localF);CHKERRQ(ierr); 638 PetscFunctionReturn(0); 639 } 640 641 #undef __FUNCT__ 642 #define __FUNCT__ "DMDAFormFunction1" 643 /*@ 644 DMDAFormFunction1 - Evaluates a user provided function on each processor that 645 share a DMDA 646 647 Input Parameters: 648 + da - the DM that defines the grid 649 . vu - input vector 650 . vfu - output vector 651 - w - any user data 652 653 Notes: Does NOT do ghost updates on vu upon entry 654 655 Level: advanced 656 657 .seealso: DMDAComputeJacobian1WithAdic() 658 659 @*/ 660 PetscErrorCode DMDAFormFunction1(DM da,Vec vu,Vec vfu,void *w) 661 { 662 PetscErrorCode ierr; 663 void *u,*fu; 664 DMDALocalInfo info; 665 DM_DA *dd = (DM_DA*)da->data; 666 667 PetscFunctionBegin; 668 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 669 ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr); 670 ierr = DMDAVecGetArray(da,vfu,&fu);CHKERRQ(ierr); 671 672 CHKMEMQ; 673 ierr = (*dd->lf)(&info,u,fu,w);CHKERRQ(ierr); 674 CHKMEMQ; 675 676 ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr); 677 ierr = DMDAVecRestoreArray(da,vfu,&fu);CHKERRQ(ierr); 678 PetscFunctionReturn(0); 679 } 680 681 #undef __FUNCT__ 682 #define __FUNCT__ "DMDAFormFunctioniTest1" 683 PetscErrorCode DMDAFormFunctioniTest1(DM da,void *w) 684 { 685 Vec vu,fu,fui; 686 PetscErrorCode ierr; 687 PetscInt i,n; 688 PetscScalar *ui; 689 PetscRandom rnd; 690 PetscReal norm; 691 692 PetscFunctionBegin; 693 ierr = DMGetLocalVector(da,&vu);CHKERRQ(ierr); 694 ierr = PetscRandomCreate(PETSC_COMM_SELF,&rnd);CHKERRQ(ierr); 695 ierr = PetscRandomSetFromOptions(rnd);CHKERRQ(ierr); 696 ierr = VecSetRandom(vu,rnd);CHKERRQ(ierr); 697 ierr = PetscRandomDestroy(rnd);CHKERRQ(ierr); 698 699 ierr = DMGetGlobalVector(da,&fu);CHKERRQ(ierr); 700 ierr = DMGetGlobalVector(da,&fui);CHKERRQ(ierr); 701 702 ierr = DMDAFormFunction1(da,vu,fu,w);CHKERRQ(ierr); 703 704 ierr = VecGetArray(fui,&ui);CHKERRQ(ierr); 705 ierr = VecGetLocalSize(fui,&n);CHKERRQ(ierr); 706 for (i=0; i<n; i++) { 707 ierr = DMDAFormFunctioni1(da,i,vu,ui+i,w);CHKERRQ(ierr); 708 } 709 ierr = VecRestoreArray(fui,&ui);CHKERRQ(ierr); 710 711 ierr = VecAXPY(fui,-1.0,fu);CHKERRQ(ierr); 712 ierr = VecNorm(fui,NORM_2,&norm);CHKERRQ(ierr); 713 ierr = PetscPrintf(((PetscObject)da)->comm,"Norm of difference in vectors %G\n",norm);CHKERRQ(ierr); 714 ierr = VecView(fu,0);CHKERRQ(ierr); 715 ierr = VecView(fui,0);CHKERRQ(ierr); 716 717 ierr = DMRestoreLocalVector(da,&vu);CHKERRQ(ierr); 718 ierr = DMRestoreGlobalVector(da,&fu);CHKERRQ(ierr); 719 ierr = DMRestoreGlobalVector(da,&fui);CHKERRQ(ierr); 720 PetscFunctionReturn(0); 721 } 722 723 #undef __FUNCT__ 724 #define __FUNCT__ "DMDAFormFunctioni1" 725 /*@ 726 DMDAFormFunctioni1 - Evaluates a user provided point-wise function 727 728 Input Parameters: 729 + da - the DM that defines the grid 730 . i - the component of the function we wish to compute (must be local) 731 . vu - input vector 732 . vfu - output value 733 - w - any user data 734 735 Notes: Does NOT do ghost updates on vu upon entry 736 737 Level: advanced 738 739 .seealso: DMDAComputeJacobian1WithAdic() 740 741 @*/ 742 PetscErrorCode DMDAFormFunctioni1(DM da,PetscInt i,Vec vu,PetscScalar *vfu,void *w) 743 { 744 PetscErrorCode ierr; 745 void *u; 746 DMDALocalInfo info; 747 MatStencil stencil; 748 DM_DA *dd = (DM_DA*)da->data; 749 750 PetscFunctionBegin; 751 752 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 753 ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr); 754 755 /* figure out stencil value from i */ 756 stencil.c = i % info.dof; 757 stencil.i = (i % (info.xm*info.dof))/info.dof; 758 stencil.j = (i % (info.xm*info.ym*info.dof))/(info.xm*info.dof); 759 stencil.k = i/(info.xm*info.ym*info.dof); 760 761 ierr = (*dd->lfi)(&info,&stencil,u,vfu,w);CHKERRQ(ierr); 762 763 ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr); 764 PetscFunctionReturn(0); 765 } 766 767 #undef __FUNCT__ 768 #define __FUNCT__ "DMDAFormFunctionib1" 769 /*@ 770 DMDAFormFunctionib1 - Evaluates a user provided point-block function 771 772 Input Parameters: 773 + da - the DM that defines the grid 774 . i - the component of the function we wish to compute (must be local) 775 . vu - input vector 776 . vfu - output value 777 - w - any user data 778 779 Notes: Does NOT do ghost updates on vu upon entry 780 781 Level: advanced 782 783 .seealso: DMDAComputeJacobian1WithAdic() 784 785 @*/ 786 PetscErrorCode DMDAFormFunctionib1(DM da,PetscInt i,Vec vu,PetscScalar *vfu,void *w) 787 { 788 PetscErrorCode ierr; 789 void *u; 790 DMDALocalInfo info; 791 MatStencil stencil; 792 DM_DA *dd = (DM_DA*)da->data; 793 794 PetscFunctionBegin; 795 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 796 ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr); 797 798 /* figure out stencil value from i */ 799 stencil.c = i % info.dof; 800 if (stencil.c) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_ARG_WRONG,"Point-block functions can only be called for the entire block"); 801 stencil.i = (i % (info.xm*info.dof))/info.dof; 802 stencil.j = (i % (info.xm*info.ym*info.dof))/(info.xm*info.dof); 803 stencil.k = i/(info.xm*info.ym*info.dof); 804 805 ierr = (*dd->lfib)(&info,&stencil,u,vfu,w);CHKERRQ(ierr); 806 807 ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr); 808 PetscFunctionReturn(0); 809 } 810 811 #if defined(new) 812 #undef __FUNCT__ 813 #define __FUNCT__ "DMDAGetDiagonal_MFFD" 814 /* 815 DMDAGetDiagonal_MFFD - Gets the diagonal for a matrix free matrix where local 816 function lives on a DMDA 817 818 y ~= (F(u + ha) - F(u))/h, 819 where F = nonlinear function, as set by SNESSetFunction() 820 u = current iterate 821 h = difference interval 822 */ 823 PetscErrorCode DMDAGetDiagonal_MFFD(DM da,Vec U,Vec a) 824 { 825 PetscScalar h,*aa,*ww,v; 826 PetscReal epsilon = PETSC_SQRT_MACHINE_EPSILON,umin = 100.0*PETSC_SQRT_MACHINE_EPSILON; 827 PetscErrorCode ierr; 828 PetscInt gI,nI; 829 MatStencil stencil; 830 DMDALocalInfo info; 831 832 PetscFunctionBegin; 833 ierr = (*ctx->func)(0,U,a,ctx->funcctx);CHKERRQ(ierr); 834 ierr = (*ctx->funcisetbase)(U,ctx->funcctx);CHKERRQ(ierr); 835 836 ierr = VecGetArray(U,&ww);CHKERRQ(ierr); 837 ierr = VecGetArray(a,&aa);CHKERRQ(ierr); 838 839 nI = 0; 840 h = ww[gI]; 841 if (h == 0.0) h = 1.0; 842 #if !defined(PETSC_USE_COMPLEX) 843 if (h < umin && h >= 0.0) h = umin; 844 else if (h < 0.0 && h > -umin) h = -umin; 845 #else 846 if (PetscAbsScalar(h) < umin && PetscRealPart(h) >= 0.0) h = umin; 847 else if (PetscRealPart(h) < 0.0 && PetscAbsScalar(h) < umin) h = -umin; 848 #endif 849 h *= epsilon; 850 851 ww[gI] += h; 852 ierr = (*ctx->funci)(i,w,&v,ctx->funcctx);CHKERRQ(ierr); 853 aa[nI] = (v - aa[nI])/h; 854 ww[gI] -= h; 855 nI++; 856 } 857 ierr = VecRestoreArray(U,&ww);CHKERRQ(ierr); 858 ierr = VecRestoreArray(a,&aa);CHKERRQ(ierr); 859 PetscFunctionReturn(0); 860 } 861 #endif 862 863 #if defined(PETSC_HAVE_ADIC) 864 EXTERN_C_BEGIN 865 #include "adic/ad_utils.h" 866 EXTERN_C_END 867 868 #undef __FUNCT__ 869 #define __FUNCT__ "DMDAComputeJacobian1WithAdic" 870 /*@C 871 DMDAComputeJacobian1WithAdic - Evaluates a adiC provided Jacobian function on each processor that 872 share a DMDA 873 874 Input Parameters: 875 + da - the DM that defines the grid 876 . vu - input vector (ghosted) 877 . J - output matrix 878 - w - any user data 879 880 Level: advanced 881 882 Notes: Does NOT do ghost updates on vu upon entry 883 884 .seealso: DMDAFormFunction1() 885 886 @*/ 887 PetscErrorCode DMDAComputeJacobian1WithAdic(DM da,Vec vu,Mat J,void *w) 888 { 889 PetscErrorCode ierr; 890 PetscInt gtdof,tdof; 891 PetscScalar *ustart; 892 DMDALocalInfo info; 893 void *ad_u,*ad_f,*ad_ustart,*ad_fstart; 894 ISColoring iscoloring; 895 896 PetscFunctionBegin; 897 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 898 899 PetscADResetIndep(); 900 901 /* get space for derivative objects. */ 902 ierr = DMDAGetAdicArray(da,PETSC_TRUE,&ad_u,&ad_ustart,>dof);CHKERRQ(ierr); 903 ierr = DMDAGetAdicArray(da,PETSC_FALSE,&ad_f,&ad_fstart,&tdof);CHKERRQ(ierr); 904 ierr = VecGetArray(vu,&ustart);CHKERRQ(ierr); 905 ierr = DMGetColoring(da,IS_COLORING_GHOSTED,MATAIJ,&iscoloring);CHKERRQ(ierr); 906 907 PetscADSetValueAndColor(ad_ustart,gtdof,iscoloring->colors,ustart); 908 909 ierr = VecRestoreArray(vu,&ustart);CHKERRQ(ierr); 910 ierr = ISColoringDestroy(iscoloring);CHKERRQ(ierr); 911 ierr = PetscADIncrementTotalGradSize(iscoloring->n);CHKERRQ(ierr); 912 PetscADSetIndepDone(); 913 914 ierr = PetscLogEventBegin(DMDA_LocalADFunction,0,0,0,0);CHKERRQ(ierr); 915 ierr = (*dd->adic_lf)(&info,ad_u,ad_f,w);CHKERRQ(ierr); 916 ierr = PetscLogEventEnd(DMDA_LocalADFunction,0,0,0,0);CHKERRQ(ierr); 917 918 /* stick the values into the matrix */ 919 ierr = MatSetValuesAdic(J,(PetscScalar**)ad_fstart);CHKERRQ(ierr); 920 921 /* return space for derivative objects. */ 922 ierr = DMDARestoreAdicArray(da,PETSC_TRUE,&ad_u,&ad_ustart,>dof);CHKERRQ(ierr); 923 ierr = DMDARestoreAdicArray(da,PETSC_FALSE,&ad_f,&ad_fstart,&tdof);CHKERRQ(ierr); 924 PetscFunctionReturn(0); 925 } 926 927 #undef __FUNCT__ 928 #define __FUNCT__ "DMDAMultiplyByJacobian1WithAdic" 929 /*@C 930 DMDAMultiplyByJacobian1WithAdic - Applies an ADIC-provided Jacobian function to a vector on 931 each processor that shares a DMDA. 932 933 Input Parameters: 934 + da - the DM that defines the grid 935 . vu - Jacobian is computed at this point (ghosted) 936 . v - product is done on this vector (ghosted) 937 . fu - output vector = J(vu)*v (not ghosted) 938 - w - any user data 939 940 Notes: 941 This routine does NOT do ghost updates on vu upon entry. 942 943 Level: advanced 944 945 .seealso: DMDAFormFunction1() 946 947 @*/ 948 PetscErrorCode DMDAMultiplyByJacobian1WithAdic(DM da,Vec vu,Vec v,Vec f,void *w) 949 { 950 PetscErrorCode ierr; 951 PetscInt i,gtdof,tdof; 952 PetscScalar *avu,*av,*af,*ad_vustart,*ad_fstart; 953 DMDALocalInfo info; 954 void *ad_vu,*ad_f; 955 956 PetscFunctionBegin; 957 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 958 959 /* get space for derivative objects. */ 960 ierr = DMDAGetAdicMFArray(da,PETSC_TRUE,&ad_vu,&ad_vustart,>dof);CHKERRQ(ierr); 961 ierr = DMDAGetAdicMFArray(da,PETSC_FALSE,&ad_f,&ad_fstart,&tdof);CHKERRQ(ierr); 962 963 /* copy input vector into derivative object */ 964 ierr = VecGetArray(vu,&avu);CHKERRQ(ierr); 965 ierr = VecGetArray(v,&av);CHKERRQ(ierr); 966 for (i=0; i<gtdof; i++) { 967 ad_vustart[2*i] = avu[i]; 968 ad_vustart[2*i+1] = av[i]; 969 } 970 ierr = VecRestoreArray(vu,&avu);CHKERRQ(ierr); 971 ierr = VecRestoreArray(v,&av);CHKERRQ(ierr); 972 973 PetscADResetIndep(); 974 ierr = PetscADIncrementTotalGradSize(1);CHKERRQ(ierr); 975 PetscADSetIndepDone(); 976 977 ierr = (*dd->adicmf_lf)(&info,ad_vu,ad_f,w);CHKERRQ(ierr); 978 979 /* stick the values into the vector */ 980 ierr = VecGetArray(f,&af);CHKERRQ(ierr); 981 for (i=0; i<tdof; i++) { 982 af[i] = ad_fstart[2*i+1]; 983 } 984 ierr = VecRestoreArray(f,&af);CHKERRQ(ierr); 985 986 /* return space for derivative objects. */ 987 ierr = DMDARestoreAdicMFArray(da,PETSC_TRUE,&ad_vu,&ad_vustart,>dof);CHKERRQ(ierr); 988 ierr = DMDARestoreAdicMFArray(da,PETSC_FALSE,&ad_f,&ad_fstart,&tdof);CHKERRQ(ierr); 989 PetscFunctionReturn(0); 990 } 991 #endif 992 993 #undef __FUNCT__ 994 #define __FUNCT__ "DMDAComputeJacobian1" 995 /*@ 996 DMDAComputeJacobian1 - Evaluates a local Jacobian function on each processor that 997 share a DMDA 998 999 Input Parameters: 1000 + da - the DM that defines the grid 1001 . vu - input vector (ghosted) 1002 . J - output matrix 1003 - w - any user data 1004 1005 Notes: Does NOT do ghost updates on vu upon entry 1006 1007 Level: advanced 1008 1009 .seealso: DMDAFormFunction1() 1010 1011 @*/ 1012 PetscErrorCode DMDAComputeJacobian1(DM da,Vec vu,Mat J,void *w) 1013 { 1014 PetscErrorCode ierr; 1015 void *u; 1016 DMDALocalInfo info; 1017 DM_DA *dd = (DM_DA*)da->data; 1018 1019 PetscFunctionBegin; 1020 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 1021 ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr); 1022 ierr = (*dd->lj)(&info,u,J,w);CHKERRQ(ierr); 1023 ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr); 1024 PetscFunctionReturn(0); 1025 } 1026 1027 1028 #undef __FUNCT__ 1029 #define __FUNCT__ "DMDAComputeJacobian1WithAdifor" 1030 /* 1031 DMDAComputeJacobian1WithAdifor - Evaluates a ADIFOR provided Jacobian local function on each processor that 1032 share a DMDA 1033 1034 Input Parameters: 1035 + da - the DM that defines the grid 1036 . vu - input vector (ghosted) 1037 . J - output matrix 1038 - w - any user data 1039 1040 Notes: Does NOT do ghost updates on vu upon entry 1041 1042 .seealso: DMDAFormFunction1() 1043 1044 */ 1045 PetscErrorCode DMDAComputeJacobian1WithAdifor(DM da,Vec vu,Mat J,void *w) 1046 { 1047 PetscErrorCode ierr; 1048 PetscInt i,Nc,N; 1049 ISColoringValue *color; 1050 DMDALocalInfo info; 1051 PetscScalar *u,*g_u,*g_f,*f = 0,*p_u; 1052 ISColoring iscoloring; 1053 DM_DA *dd = (DM_DA*)da->data; 1054 void (*lf)(PetscInt*,DMDALocalInfo*,PetscScalar*,PetscScalar*,PetscInt*,PetscScalar*,PetscScalar*,PetscInt*,void*,PetscErrorCode*) = 1055 (void (*)(PetscInt*,DMDALocalInfo*,PetscScalar*,PetscScalar*,PetscInt*,PetscScalar*,PetscScalar*,PetscInt*,void*,PetscErrorCode*))*dd->adifor_lf; 1056 1057 PetscFunctionBegin; 1058 ierr = DMGetColoring(da,IS_COLORING_GHOSTED,MATAIJ,&iscoloring);CHKERRQ(ierr); 1059 Nc = iscoloring->n; 1060 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 1061 N = info.gxm*info.gym*info.gzm*info.dof; 1062 1063 /* get space for derivative objects. */ 1064 ierr = PetscMalloc(Nc*info.gxm*info.gym*info.gzm*info.dof*sizeof(PetscScalar),&g_u);CHKERRQ(ierr); 1065 ierr = PetscMemzero(g_u,Nc*info.gxm*info.gym*info.gzm*info.dof*sizeof(PetscScalar));CHKERRQ(ierr); 1066 p_u = g_u; 1067 color = iscoloring->colors; 1068 for (i=0; i<N; i++) { 1069 p_u[*color++] = 1.0; 1070 p_u += Nc; 1071 } 1072 ierr = ISColoringDestroy(iscoloring);CHKERRQ(ierr); 1073 ierr = PetscMalloc2(Nc*info.xm*info.ym*info.zm*info.dof,PetscScalar,&g_f,info.xm*info.ym*info.zm*info.dof,PetscScalar,&f);CHKERRQ(ierr); 1074 1075 /* Seed the input array g_u with coloring information */ 1076 1077 ierr = VecGetArray(vu,&u);CHKERRQ(ierr); 1078 (lf)(&Nc,&info,u,g_u,&Nc,f,g_f,&Nc,w,&ierr);CHKERRQ(ierr); 1079 ierr = VecRestoreArray(vu,&u);CHKERRQ(ierr); 1080 1081 /* stick the values into the matrix */ 1082 /* PetscScalarView(Nc*info.xm*info.ym,g_f,0); */ 1083 ierr = MatSetValuesAdifor(J,Nc,g_f);CHKERRQ(ierr); 1084 1085 /* return space for derivative objects. */ 1086 ierr = PetscFree(g_u);CHKERRQ(ierr); 1087 ierr = PetscFree2(g_f,f);CHKERRQ(ierr); 1088 PetscFunctionReturn(0); 1089 } 1090 1091 #undef __FUNCT__ 1092 #define __FUNCT__ "DMDAFormJacobianLocal" 1093 /*@C 1094 DMDAFormjacobianLocal - This is a universal Jacobian evaluation routine for 1095 a local DM function. 1096 1097 Collective on DMDA 1098 1099 Input Parameters: 1100 + da - the DM context 1101 . func - The local function 1102 . X - input vector 1103 . J - Jacobian matrix 1104 - ctx - A user context 1105 1106 Level: intermediate 1107 1108 .seealso: DMDASetLocalFunction(), DMDASetLocalJacobian(), DMDASetLocalAdicFunction(), DMDASetLocalAdicMFFunction(), 1109 SNESSetFunction(), SNESSetJacobian() 1110 1111 @*/ 1112 PetscErrorCode DMDAFormJacobianLocal(DM da, DMDALocalFunction1 func, Vec X, Mat J, void *ctx) 1113 { 1114 Vec localX; 1115 DMDALocalInfo info; 1116 void *u; 1117 PetscErrorCode ierr; 1118 1119 PetscFunctionBegin; 1120 ierr = DMGetLocalVector(da,&localX);CHKERRQ(ierr); 1121 /* 1122 Scatter ghost points to local vector, using the 2-step process 1123 DMGlobalToLocalBegin(), DMGlobalToLocalEnd(). 1124 */ 1125 ierr = DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);CHKERRQ(ierr); 1126 ierr = DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);CHKERRQ(ierr); 1127 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 1128 ierr = DMDAVecGetArray(da,localX,&u);CHKERRQ(ierr); 1129 ierr = (*func)(&info,u,J,ctx);CHKERRQ(ierr); 1130 ierr = DMDAVecRestoreArray(da,localX,&u);CHKERRQ(ierr); 1131 ierr = DMRestoreLocalVector(da,&localX);CHKERRQ(ierr); 1132 PetscFunctionReturn(0); 1133 } 1134 1135 #undef __FUNCT__ 1136 #define __FUNCT__ "DMDAMultiplyByJacobian1WithAD" 1137 /*@C 1138 DMDAMultiplyByJacobian1WithAD - Applies a Jacobian function supplied by ADIFOR or ADIC 1139 to a vector on each processor that shares a DMDA. 1140 1141 Input Parameters: 1142 + da - the DM that defines the grid 1143 . vu - Jacobian is computed at this point (ghosted) 1144 . v - product is done on this vector (ghosted) 1145 . fu - output vector = J(vu)*v (not ghosted) 1146 - w - any user data 1147 1148 Notes: 1149 This routine does NOT do ghost updates on vu and v upon entry. 1150 1151 Automatically calls DMDAMultiplyByJacobian1WithAdifor() or DMDAMultiplyByJacobian1WithAdic() 1152 depending on whether DMDASetLocalAdicMFFunction() or DMDASetLocalAdiforMFFunction() was called. 1153 1154 Level: advanced 1155 1156 .seealso: DMDAFormFunction1(), DMDAMultiplyByJacobian1WithAdifor(), DMDAMultiplyByJacobian1WithAdic() 1157 1158 @*/ 1159 PetscErrorCode DMDAMultiplyByJacobian1WithAD(DM da,Vec u,Vec v,Vec f,void *w) 1160 { 1161 PetscErrorCode ierr; 1162 DM_DA *dd = (DM_DA*)da->data; 1163 1164 PetscFunctionBegin; 1165 if (dd->adicmf_lf) { 1166 #if defined(PETSC_HAVE_ADIC) 1167 ierr = DMDAMultiplyByJacobian1WithAdic(da,u,v,f,w);CHKERRQ(ierr); 1168 #else 1169 SETERRQ(((PetscObject)da)->comm,PETSC_ERR_SUP_SYS,"Requires ADIC to be installed and cannot use complex numbers"); 1170 #endif 1171 } else if (dd->adiformf_lf) { 1172 ierr = DMDAMultiplyByJacobian1WithAdifor(da,u,v,f,w);CHKERRQ(ierr); 1173 } else { 1174 SETERRQ(((PetscObject)da)->comm,PETSC_ERR_ORDER,"Must call DMDASetLocalAdiforMFFunction() or DMDASetLocalAdicMFFunction() before using"); 1175 } 1176 PetscFunctionReturn(0); 1177 } 1178 1179 1180 #undef __FUNCT__ 1181 #define __FUNCT__ "DMDAMultiplyByJacobian1WithAdifor" 1182 /*@C 1183 DMDAMultiplyByJacobian1WithAdifor - Applies a ADIFOR provided Jacobian function on each processor that 1184 share a DM to a vector 1185 1186 Input Parameters: 1187 + da - the DM that defines the grid 1188 . vu - Jacobian is computed at this point (ghosted) 1189 . v - product is done on this vector (ghosted) 1190 . fu - output vector = J(vu)*v (not ghosted) 1191 - w - any user data 1192 1193 Notes: Does NOT do ghost updates on vu and v upon entry 1194 1195 Level: advanced 1196 1197 .seealso: DMDAFormFunction1() 1198 1199 @*/ 1200 PetscErrorCode DMDAMultiplyByJacobian1WithAdifor(DM da,Vec u,Vec v,Vec f,void *w) 1201 { 1202 PetscErrorCode ierr; 1203 PetscScalar *au,*av,*af,*awork; 1204 Vec work; 1205 DMDALocalInfo info; 1206 DM_DA *dd = (DM_DA*)da->data; 1207 void (*lf)(DMDALocalInfo*,PetscScalar*,PetscScalar*,PetscScalar*,PetscScalar*,void*,PetscErrorCode*) = 1208 (void (*)(DMDALocalInfo*,PetscScalar*,PetscScalar*,PetscScalar*,PetscScalar*,void*,PetscErrorCode*))*dd->adiformf_lf; 1209 1210 PetscFunctionBegin; 1211 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 1212 1213 ierr = DMGetGlobalVector(da,&work);CHKERRQ(ierr); 1214 ierr = VecGetArray(u,&au);CHKERRQ(ierr); 1215 ierr = VecGetArray(v,&av);CHKERRQ(ierr); 1216 ierr = VecGetArray(f,&af);CHKERRQ(ierr); 1217 ierr = VecGetArray(work,&awork);CHKERRQ(ierr); 1218 (lf)(&info,au,av,awork,af,w,&ierr);CHKERRQ(ierr); 1219 ierr = VecRestoreArray(u,&au);CHKERRQ(ierr); 1220 ierr = VecRestoreArray(v,&av);CHKERRQ(ierr); 1221 ierr = VecRestoreArray(f,&af);CHKERRQ(ierr); 1222 ierr = VecRestoreArray(work,&awork);CHKERRQ(ierr); 1223 ierr = DMRestoreGlobalVector(da,&work);CHKERRQ(ierr); 1224 1225 PetscFunctionReturn(0); 1226 } 1227 1228 #undef __FUNCT__ 1229 #define __FUNCT__ "DMSetUp_DA_2D" 1230 PetscErrorCode DMSetUp_DA_2D(DM da) 1231 { 1232 DM_DA *dd = (DM_DA*)da->data; 1233 const PetscInt M = dd->M; 1234 const PetscInt N = dd->N; 1235 PetscInt m = dd->m; 1236 PetscInt n = dd->n; 1237 const PetscInt dof = dd->w; 1238 const PetscInt s = dd->s; 1239 const DMDABoundaryType wrap = dd->wrap; 1240 const DMDAStencilType stencil_type = dd->stencil_type; 1241 PetscInt *lx = dd->lx; 1242 PetscInt *ly = dd->ly; 1243 MPI_Comm comm; 1244 PetscMPIInt rank,size; 1245 PetscInt xs,xe,ys,ye,x,y,Xs,Xe,Ys,Ye,start,end,IXs,IXe,IYs,IYe; 1246 PetscInt up,down,left,right,i,n0,n1,n2,n3,n5,n6,n7,n8,*idx,nn,*idx_cpy; 1247 const PetscInt *idx_full; 1248 PetscInt xbase,*bases,*ldims,j,x_t,y_t,s_t,base,count; 1249 PetscInt s_x,s_y; /* s proportionalized to w */ 1250 PetscInt sn0 = 0,sn2 = 0,sn6 = 0,sn8 = 0; 1251 Vec local,global; 1252 VecScatter ltog,gtol; 1253 IS to,from,ltogis; 1254 PetscErrorCode ierr; 1255 1256 PetscFunctionBegin; 1257 ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr); 1258 1259 if (dof < 1) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have 1 or more degrees of freedom per node: %D",dof); 1260 if (s < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Stencil width cannot be negative: %D",s); 1261 1262 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 1263 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 1264 1265 dd->dim = 2; 1266 ierr = PetscMalloc(dof*sizeof(char*),&dd->fieldname);CHKERRQ(ierr); 1267 ierr = PetscMemzero(dd->fieldname,dof*sizeof(char*));CHKERRQ(ierr); 1268 1269 if (m != PETSC_DECIDE) { 1270 if (m < 1) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Non-positive number of processors in X direction: %D",m); 1271 else if (m > size) SETERRQ2(comm,PETSC_ERR_ARG_OUTOFRANGE,"Too many processors in X direction: %D %d",m,size); 1272 } 1273 if (n != PETSC_DECIDE) { 1274 if (n < 1) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Non-positive number of processors in Y direction: %D",n); 1275 else if (n > size) SETERRQ2(comm,PETSC_ERR_ARG_OUTOFRANGE,"Too many processors in Y direction: %D %d",n,size); 1276 } 1277 1278 if (m == PETSC_DECIDE || n == PETSC_DECIDE) { 1279 if (n != PETSC_DECIDE) { 1280 m = size/n; 1281 } else if (m != PETSC_DECIDE) { 1282 n = size/m; 1283 } else { 1284 /* try for squarish distribution */ 1285 m = (PetscInt)(0.5 + sqrt(((double)M)*((double)size)/((double)N))); 1286 if (!m) m = 1; 1287 while (m > 0) { 1288 n = size/m; 1289 if (m*n == size) break; 1290 m--; 1291 } 1292 if (M > N && m < n) {PetscInt _m = m; m = n; n = _m;} 1293 } 1294 if (m*n != size) SETERRQ(comm,PETSC_ERR_PLIB,"Unable to create partition, check the size of the communicator and input m and n "); 1295 } else if (m*n != size) SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"Given Bad partition"); 1296 1297 if (M < m) SETERRQ2(comm,PETSC_ERR_ARG_OUTOFRANGE,"Partition in x direction is too fine! %D %D",M,m); 1298 if (N < n) SETERRQ2(comm,PETSC_ERR_ARG_OUTOFRANGE,"Partition in y direction is too fine! %D %D",N,n); 1299 1300 /* 1301 Determine locally owned region 1302 xs is the first local node number, x is the number of local nodes 1303 */ 1304 if (!lx) { 1305 ierr = PetscMalloc(m*sizeof(PetscInt), &dd->lx);CHKERRQ(ierr); 1306 lx = dd->lx; 1307 for (i=0; i<m; i++) { 1308 lx[i] = M/m + ((M % m) > i); 1309 } 1310 } 1311 x = lx[rank % m]; 1312 xs = 0; 1313 for (i=0; i<(rank % m); i++) { 1314 xs += lx[i]; 1315 } 1316 #if defined(PETSC_USE_DEBUG) 1317 left = xs; 1318 for (i=(rank % m); i<m; i++) { 1319 left += lx[i]; 1320 } 1321 if (left != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of lx across processors not equal to M: %D %D",left,M); 1322 #endif 1323 1324 /* 1325 Determine locally owned region 1326 ys is the first local node number, y is the number of local nodes 1327 */ 1328 if (!ly) { 1329 ierr = PetscMalloc(n*sizeof(PetscInt), &dd->ly);CHKERRQ(ierr); 1330 ly = dd->ly; 1331 for (i=0; i<n; i++) { 1332 ly[i] = N/n + ((N % n) > i); 1333 } 1334 } 1335 y = ly[rank/m]; 1336 ys = 0; 1337 for (i=0; i<(rank/m); i++) { 1338 ys += ly[i]; 1339 } 1340 #if defined(PETSC_USE_DEBUG) 1341 left = ys; 1342 for (i=(rank/m); i<n; i++) { 1343 left += ly[i]; 1344 } 1345 if (left != N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of ly across processors not equal to N: %D %D",left,N); 1346 #endif 1347 1348 if (x < s) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local x-width of domain x %D is smaller than stencil width s %D",x,s); 1349 if (y < s) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local y-width of domain y %D is smaller than stencil width s %D",y,s); 1350 xe = xs + x; 1351 ye = ys + y; 1352 1353 /* determine ghost region (Xs) and region scattered into (IXs) */ 1354 /* Assume No Periodicity */ 1355 if (xs-s > 0) { Xs = xs - s; IXs = xs - s; } else { Xs = 0; IXs = 0; } 1356 if (xe+s <= M) { Xe = xe + s; IXe = xe + s; } else { Xe = M; IXe = M; } 1357 if (ys-s > 0) { Ys = ys - s; IYs = ys - s; } else { Ys = 0; IYs = 0; } 1358 if (ye+s <= N) { Ye = ye + s; IYe = ye + s; } else { Ye = N; IYe = N; } 1359 1360 /* fix for periodicity/ghosted */ 1361 if (DMDAXGhosted(wrap)) { Xs = xs - s; Xe = xe + s; } 1362 if (DMDAXPeriodic(wrap)) { IXs = xs - s; IXe = xe + s; } 1363 if (DMDAYGhosted(wrap)) { Ys = ys - s; Ye = ye + s; } 1364 if (DMDAYPeriodic(wrap)) { IYs = ys - s; IYe = ye + s; } 1365 1366 /* Resize all X parameters to reflect w */ 1367 s_x = s; 1368 s_y = s; 1369 1370 /* determine starting point of each processor */ 1371 nn = x*y; 1372 ierr = PetscMalloc2(size+1,PetscInt,&bases,size,PetscInt,&ldims);CHKERRQ(ierr); 1373 ierr = MPI_Allgather(&nn,1,MPIU_INT,ldims,1,MPIU_INT,comm);CHKERRQ(ierr); 1374 bases[0] = 0; 1375 for (i=1; i<=size; i++) { 1376 bases[i] = ldims[i-1]; 1377 } 1378 for (i=1; i<=size; i++) { 1379 bases[i] += bases[i-1]; 1380 } 1381 base = bases[rank]*dof; 1382 1383 /* allocate the base parallel and sequential vectors */ 1384 dd->Nlocal = x*y*dof; 1385 ierr = VecCreateMPIWithArray(comm,dd->Nlocal,PETSC_DECIDE,0,&global);CHKERRQ(ierr); 1386 ierr = VecSetBlockSize(global,dof);CHKERRQ(ierr); 1387 dd->nlocal = (Xe-Xs)*(Ye-Ys)*dof; 1388 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,dd->nlocal,0,&local);CHKERRQ(ierr); 1389 ierr = VecSetBlockSize(local,dof);CHKERRQ(ierr); 1390 1391 /* generate appropriate vector scatters */ 1392 /* local to global inserts non-ghost point region into global */ 1393 ierr = VecGetOwnershipRange(global,&start,&end);CHKERRQ(ierr); 1394 ierr = ISCreateStride(comm,x*y*dof,start,1,&to);CHKERRQ(ierr); 1395 1396 count = x*y; 1397 ierr = PetscMalloc(x*y*sizeof(PetscInt),&idx);CHKERRQ(ierr); 1398 left = xs - Xs; right = left + x; 1399 down = ys - Ys; up = down + y; 1400 count = 0; 1401 for (i=down; i<up; i++) { 1402 for (j=left; j<right; j++) { 1403 idx[count++] = i*(Xe-Xs) + j; 1404 } 1405 } 1406 1407 ierr = ISCreateBlock(comm,dof,count,idx,PETSC_OWN_POINTER,&from);CHKERRQ(ierr); 1408 ierr = VecScatterCreate(local,from,global,to,<og);CHKERRQ(ierr); 1409 ierr = PetscLogObjectParent(dd,ltog);CHKERRQ(ierr); 1410 ierr = ISDestroy(from);CHKERRQ(ierr); 1411 ierr = ISDestroy(to);CHKERRQ(ierr); 1412 1413 /* global to local must include ghost points within the domain, 1414 but not ghost points outside the domain that aren't periodic */ 1415 if (stencil_type == DMDA_STENCIL_BOX) { 1416 count = (IXe-IXs)*(IYe-IYs); 1417 ierr = PetscMalloc(count*sizeof(PetscInt),&idx);CHKERRQ(ierr); 1418 1419 left = IXs - Xs; right = left + (IXe-IXs); 1420 down = IYs - Ys; up = down + (IYe-IYs); 1421 count = 0; 1422 for (i=down; i<up; i++) { 1423 for (j=left; j<right; j++) { 1424 idx[count++] = j + i*(Xe-Xs); 1425 } 1426 } 1427 ierr = ISCreateBlock(comm,dof,count,idx,PETSC_OWN_POINTER,&to);CHKERRQ(ierr); 1428 1429 } else { 1430 /* must drop into cross shape region */ 1431 /* ---------| 1432 | top | 1433 |--- ---| up 1434 | middle | 1435 | | 1436 ---- ---- down 1437 | bottom | 1438 ----------- 1439 Xs xs xe Xe */ 1440 count = (ys-IYs)*x + y*(IXe-IXs) + (IYe-ye)*x; 1441 ierr = PetscMalloc(count*sizeof(PetscInt),&idx);CHKERRQ(ierr); 1442 1443 left = xs - Xs; right = left + x; 1444 down = ys - Ys; up = down + y; 1445 count = 0; 1446 /* bottom */ 1447 for (i=(IYs-Ys); i<down; i++) { 1448 for (j=left; j<right; j++) { 1449 idx[count++] = j + i*(Xe-Xs); 1450 } 1451 } 1452 /* middle */ 1453 for (i=down; i<up; i++) { 1454 for (j=(IXs-Xs); j<(IXe-Xs); j++) { 1455 idx[count++] = j + i*(Xe-Xs); 1456 } 1457 } 1458 /* top */ 1459 for (i=up; i<up+IYe-ye; i++) { 1460 for (j=left; j<right; j++) { 1461 idx[count++] = j + i*(Xe-Xs); 1462 } 1463 } 1464 ierr = ISCreateBlock(comm,dof,count,idx,PETSC_OWN_POINTER,&to);CHKERRQ(ierr); 1465 } 1466 1467 1468 /* determine who lies on each side of us stored in n6 n7 n8 1469 n3 n5 1470 n0 n1 n2 1471 */ 1472 1473 /* Assume the Non-Periodic Case */ 1474 n1 = rank - m; 1475 if (rank % m) { 1476 n0 = n1 - 1; 1477 } else { 1478 n0 = -1; 1479 } 1480 if ((rank+1) % m) { 1481 n2 = n1 + 1; 1482 n5 = rank + 1; 1483 n8 = rank + m + 1; if (n8 >= m*n) n8 = -1; 1484 } else { 1485 n2 = -1; n5 = -1; n8 = -1; 1486 } 1487 if (rank % m) { 1488 n3 = rank - 1; 1489 n6 = n3 + m; if (n6 >= m*n) n6 = -1; 1490 } else { 1491 n3 = -1; n6 = -1; 1492 } 1493 n7 = rank + m; if (n7 >= m*n) n7 = -1; 1494 1495 if (DMDAXPeriodic(wrap) && DMDAYPeriodic(wrap)) { 1496 /* Modify for Periodic Cases */ 1497 /* Handle all four corners */ 1498 if ((n6 < 0) && (n7 < 0) && (n3 < 0)) n6 = m-1; 1499 if ((n8 < 0) && (n7 < 0) && (n5 < 0)) n8 = 0; 1500 if ((n2 < 0) && (n5 < 0) && (n1 < 0)) n2 = size-m; 1501 if ((n0 < 0) && (n3 < 0) && (n1 < 0)) n0 = size-1; 1502 1503 /* Handle Top and Bottom Sides */ 1504 if (n1 < 0) n1 = rank + m * (n-1); 1505 if (n7 < 0) n7 = rank - m * (n-1); 1506 if ((n3 >= 0) && (n0 < 0)) n0 = size - m + rank - 1; 1507 if ((n3 >= 0) && (n6 < 0)) n6 = (rank%m)-1; 1508 if ((n5 >= 0) && (n2 < 0)) n2 = size - m + rank + 1; 1509 if ((n5 >= 0) && (n8 < 0)) n8 = (rank%m)+1; 1510 1511 /* Handle Left and Right Sides */ 1512 if (n3 < 0) n3 = rank + (m-1); 1513 if (n5 < 0) n5 = rank - (m-1); 1514 if ((n1 >= 0) && (n0 < 0)) n0 = rank-1; 1515 if ((n1 >= 0) && (n2 < 0)) n2 = rank-2*m+1; 1516 if ((n7 >= 0) && (n6 < 0)) n6 = rank+2*m-1; 1517 if ((n7 >= 0) && (n8 < 0)) n8 = rank+1; 1518 } else if (DMDAYPeriodic(wrap)) { /* Handle Top and Bottom Sides */ 1519 if (n1 < 0) n1 = rank + m * (n-1); 1520 if (n7 < 0) n7 = rank - m * (n-1); 1521 if ((n3 >= 0) && (n0 < 0)) n0 = size - m + rank - 1; 1522 if ((n3 >= 0) && (n6 < 0)) n6 = (rank%m)-1; 1523 if ((n5 >= 0) && (n2 < 0)) n2 = size - m + rank + 1; 1524 if ((n5 >= 0) && (n8 < 0)) n8 = (rank%m)+1; 1525 } else if (DMDAXPeriodic(wrap)) { /* Handle Left and Right Sides */ 1526 if (n3 < 0) n3 = rank + (m-1); 1527 if (n5 < 0) n5 = rank - (m-1); 1528 if ((n1 >= 0) && (n0 < 0)) n0 = rank-1; 1529 if ((n1 >= 0) && (n2 < 0)) n2 = rank-2*m+1; 1530 if ((n7 >= 0) && (n6 < 0)) n6 = rank+2*m-1; 1531 if ((n7 >= 0) && (n8 < 0)) n8 = rank+1; 1532 } 1533 1534 ierr = PetscMalloc(9*sizeof(PetscInt),&dd->neighbors);CHKERRQ(ierr); 1535 dd->neighbors[0] = n0; 1536 dd->neighbors[1] = n1; 1537 dd->neighbors[2] = n2; 1538 dd->neighbors[3] = n3; 1539 dd->neighbors[4] = rank; 1540 dd->neighbors[5] = n5; 1541 dd->neighbors[6] = n6; 1542 dd->neighbors[7] = n7; 1543 dd->neighbors[8] = n8; 1544 1545 if (stencil_type == DMDA_STENCIL_STAR) { 1546 /* save corner processor numbers */ 1547 sn0 = n0; sn2 = n2; sn6 = n6; sn8 = n8; 1548 n0 = n2 = n6 = n8 = -1; 1549 } 1550 1551 ierr = PetscMalloc((Xe-Xs)*(Ye-Ys)*sizeof(PetscInt),&idx);CHKERRQ(ierr); 1552 ierr = PetscLogObjectMemory(da,(Xe-Xs)*(Ye-Ys)*sizeof(PetscInt));CHKERRQ(ierr); 1553 1554 nn = 0; 1555 xbase = bases[rank]; 1556 for (i=1; i<=s_y; i++) { 1557 if (n0 >= 0) { /* left below */ 1558 x_t = lx[n0 % m]; 1559 y_t = ly[(n0/m)]; 1560 s_t = bases[n0] + x_t*y_t - (s_y-i)*x_t - s_x; 1561 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1562 } 1563 if (n1 >= 0) { /* directly below */ 1564 x_t = x; 1565 y_t = ly[(n1/m)]; 1566 s_t = bases[n1] + x_t*y_t - (s_y+1-i)*x_t; 1567 for (j=0; j<x_t; j++) { idx[nn++] = s_t++;} 1568 } 1569 if (n2 >= 0) { /* right below */ 1570 x_t = lx[n2 % m]; 1571 y_t = ly[(n2/m)]; 1572 s_t = bases[n2] + x_t*y_t - (s_y+1-i)*x_t; 1573 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1574 } 1575 } 1576 1577 for (i=0; i<y; i++) { 1578 if (n3 >= 0) { /* directly left */ 1579 x_t = lx[n3 % m]; 1580 /* y_t = y; */ 1581 s_t = bases[n3] + (i+1)*x_t - s_x; 1582 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1583 } 1584 1585 for (j=0; j<x; j++) { idx[nn++] = xbase++; } /* interior */ 1586 1587 if (n5 >= 0) { /* directly right */ 1588 x_t = lx[n5 % m]; 1589 /* y_t = y; */ 1590 s_t = bases[n5] + (i)*x_t; 1591 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1592 } 1593 } 1594 1595 for (i=1; i<=s_y; i++) { 1596 if (n6 >= 0) { /* left above */ 1597 x_t = lx[n6 % m]; 1598 /* y_t = ly[(n6/m)]; */ 1599 s_t = bases[n6] + (i)*x_t - s_x; 1600 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1601 } 1602 if (n7 >= 0) { /* directly above */ 1603 x_t = x; 1604 /* y_t = ly[(n7/m)]; */ 1605 s_t = bases[n7] + (i-1)*x_t; 1606 for (j=0; j<x_t; j++) { idx[nn++] = s_t++;} 1607 } 1608 if (n8 >= 0) { /* right above */ 1609 x_t = lx[n8 % m]; 1610 /* y_t = ly[(n8/m)]; */ 1611 s_t = bases[n8] + (i-1)*x_t; 1612 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1613 } 1614 } 1615 1616 ierr = ISCreateBlock(comm,dof,nn,idx,PETSC_COPY_VALUES,&from);CHKERRQ(ierr); 1617 ierr = VecScatterCreate(global,from,local,to,>ol);CHKERRQ(ierr); 1618 ierr = PetscLogObjectParent(da,gtol);CHKERRQ(ierr); 1619 ierr = ISDestroy(to);CHKERRQ(ierr); 1620 ierr = ISDestroy(from);CHKERRQ(ierr); 1621 1622 if (stencil_type == DMDA_STENCIL_STAR) { 1623 n0 = sn0; n2 = sn2; n6 = sn6; n8 = sn8; 1624 } 1625 1626 if ((stencil_type == DMDA_STENCIL_STAR) || 1627 (!DMDAXPeriodic(wrap) && DMDAXGhosted(wrap)) || 1628 (!DMDAYPeriodic(wrap) && DMDAYGhosted(wrap))) { 1629 /* 1630 Recompute the local to global mappings, this time keeping the 1631 information about the cross corner processor numbers and any ghosted 1632 but not periodic indices. 1633 */ 1634 nn = 0; 1635 xbase = bases[rank]; 1636 for (i=1; i<=s_y; i++) { 1637 if (n0 >= 0) { /* left below */ 1638 x_t = lx[n0 % m]; 1639 y_t = ly[(n0/m)]; 1640 s_t = bases[n0] + x_t*y_t - (s_y-i)*x_t - s_x; 1641 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1642 } else if (xs-Xs > 0 && ys-Ys > 0) { 1643 for (j=0; j<s_x; j++) { idx[nn++] = -1;} 1644 } 1645 if (n1 >= 0) { /* directly below */ 1646 x_t = x; 1647 y_t = ly[(n1/m)]; 1648 s_t = bases[n1] + x_t*y_t - (s_y+1-i)*x_t; 1649 for (j=0; j<x_t; j++) { idx[nn++] = s_t++;} 1650 } else if (ys-Ys > 0) { 1651 for (j=0; j<x; j++) { idx[nn++] = -1;} 1652 } 1653 if (n2 >= 0) { /* right below */ 1654 x_t = lx[n2 % m]; 1655 y_t = ly[(n2/m)]; 1656 s_t = bases[n2] + x_t*y_t - (s_y+1-i)*x_t; 1657 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1658 } else if (Xe-xe> 0 && ys-Ys > 0) { 1659 for (j=0; j<s_x; j++) { idx[nn++] = -1;} 1660 } 1661 } 1662 1663 for (i=0; i<y; i++) { 1664 if (n3 >= 0) { /* directly left */ 1665 x_t = lx[n3 % m]; 1666 /* y_t = y; */ 1667 s_t = bases[n3] + (i+1)*x_t - s_x; 1668 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1669 } else if (xs-Xs > 0) { 1670 for (j=0; j<s_x; j++) { idx[nn++] = -1;} 1671 } 1672 1673 for (j=0; j<x; j++) { idx[nn++] = xbase++; } /* interior */ 1674 1675 if (n5 >= 0) { /* directly right */ 1676 x_t = lx[n5 % m]; 1677 /* y_t = y; */ 1678 s_t = bases[n5] + (i)*x_t; 1679 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1680 } else if (Xe-xe > 0) { 1681 for (j=0; j<s_x; j++) { idx[nn++] = -1;} 1682 } 1683 } 1684 1685 for (i=1; i<=s_y; i++) { 1686 if (n6 >= 0) { /* left above */ 1687 x_t = lx[n6 % m]; 1688 /* y_t = ly[(n6/m)]; */ 1689 s_t = bases[n6] + (i)*x_t - s_x; 1690 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1691 } else if (xs-Xs > 0 && Ye-ye > 0) { 1692 for (j=0; j<s_x; j++) { idx[nn++] = -1;} 1693 } 1694 if (n7 >= 0) { /* directly above */ 1695 x_t = x; 1696 /* y_t = ly[(n7/m)]; */ 1697 s_t = bases[n7] + (i-1)*x_t; 1698 for (j=0; j<x_t; j++) { idx[nn++] = s_t++;} 1699 } else if (Ye-ye > 0) { 1700 for (j=0; j<x; j++) { idx[nn++] = -1;} 1701 } 1702 if (n8 >= 0) { /* right above */ 1703 x_t = lx[n8 % m]; 1704 /* y_t = ly[(n8/m)]; */ 1705 s_t = bases[n8] + (i-1)*x_t; 1706 for (j=0; j<s_x; j++) { idx[nn++] = s_t++;} 1707 } else if (Xe-xe > 0 && Ye-ye > 0) { 1708 for (j=0; j<s_x; j++) { idx[nn++] = -1;} 1709 } 1710 } 1711 } 1712 /* 1713 Set the local to global ordering in the global vector, this allows use 1714 of VecSetValuesLocal(). 1715 */ 1716 ierr = ISCreateBlock(comm,dof,nn,idx,PETSC_OWN_POINTER,<ogis);CHKERRQ(ierr); 1717 ierr = PetscMalloc(nn*dof*sizeof(PetscInt),&idx_cpy);CHKERRQ(ierr); 1718 ierr = PetscLogObjectMemory(da,nn*dof*sizeof(PetscInt));CHKERRQ(ierr); 1719 ierr = ISGetIndices(ltogis, &idx_full); 1720 ierr = PetscMemcpy(idx_cpy,idx_full,nn*dof*sizeof(PetscInt));CHKERRQ(ierr); 1721 ierr = ISRestoreIndices(ltogis, &idx_full); 1722 ierr = ISLocalToGlobalMappingCreateIS(ltogis,&da->ltogmap);CHKERRQ(ierr); 1723 ierr = PetscLogObjectParent(da,da->ltogmap);CHKERRQ(ierr); 1724 ierr = ISDestroy(ltogis);CHKERRQ(ierr); 1725 ierr = ISLocalToGlobalMappingBlock(da->ltogmap,dd->w,&da->ltogmapb);CHKERRQ(ierr); 1726 ierr = PetscLogObjectParent(da,da->ltogmap);CHKERRQ(ierr); 1727 1728 ierr = PetscFree2(bases,ldims);CHKERRQ(ierr); 1729 dd->m = m; dd->n = n; 1730 /* note petsc expects xs/xe/Xs/Xe to be multiplied by #dofs in many places */ 1731 dd->xs = xs*dof; dd->xe = xe*dof; dd->ys = ys; dd->ye = ye; dd->zs = 0; dd->ze = 1; 1732 dd->Xs = Xs*dof; dd->Xe = Xe*dof; dd->Ys = Ys; dd->Ye = Ye; dd->Zs = 0; dd->Ze = 1; 1733 1734 ierr = VecDestroy(local);CHKERRQ(ierr); 1735 ierr = VecDestroy(global);CHKERRQ(ierr); 1736 1737 dd->gtol = gtol; 1738 dd->ltog = ltog; 1739 dd->idx = idx_cpy; 1740 dd->Nl = nn*dof; 1741 dd->base = base; 1742 da->ops->view = DMView_DA_2d; 1743 dd->ltol = PETSC_NULL; 1744 dd->ao = PETSC_NULL; 1745 1746 PetscFunctionReturn(0); 1747 } 1748 1749 #undef __FUNCT__ 1750 #define __FUNCT__ "DMDACreate2d" 1751 /*@C 1752 DMDACreate2d - Creates an object that will manage the communication of two-dimensional 1753 regular array data that is distributed across some processors. 1754 1755 Collective on MPI_Comm 1756 1757 Input Parameters: 1758 + comm - MPI communicator 1759 . wrap - type of periodicity should the array have. 1760 Use one of DMDA_NONPERIODIC, DMDA_XPERIODIC, DMDA_YPERIODIC, or DMDA_XYPERIODIC. 1761 . stencil_type - stencil type. Use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR. 1762 . M,N - global dimension in each direction of the array (use -M and or -N to indicate that it may be set to a different value 1763 from the command line with -da_grid_x <M> -da_grid_y <N>) 1764 . m,n - corresponding number of processors in each dimension 1765 (or PETSC_DECIDE to have calculated) 1766 . dof - number of degrees of freedom per node 1767 . s - stencil width 1768 - lx, ly - arrays containing the number of nodes in each cell along 1769 the x and y coordinates, or PETSC_NULL. If non-null, these 1770 must be of length as m and n, and the corresponding 1771 m and n cannot be PETSC_DECIDE. The sum of the lx[] entries 1772 must be M, and the sum of the ly[] entries must be N. 1773 1774 Output Parameter: 1775 . da - the resulting distributed array object 1776 1777 Options Database Key: 1778 + -da_view - Calls DMView() at the conclusion of DMDACreate2d() 1779 . -da_grid_x <nx> - number of grid points in x direction, if M < 0 1780 . -da_grid_y <ny> - number of grid points in y direction, if N < 0 1781 . -da_processors_x <nx> - number of processors in x direction 1782 . -da_processors_y <ny> - number of processors in y direction 1783 . -da_refine_x - refinement ratio in x direction 1784 - -da_refine_y - refinement ratio in y direction 1785 1786 Level: beginner 1787 1788 Notes: 1789 The stencil type DMDA_STENCIL_STAR with width 1 corresponds to the 1790 standard 5-pt stencil, while DMDA_STENCIL_BOX with width 1 denotes 1791 the standard 9-pt stencil. 1792 1793 The array data itself is NOT stored in the DMDA, it is stored in Vec objects; 1794 The appropriate vector objects can be obtained with calls to DMCreateGlobalVector() 1795 and DMCreateLocalVector() and calls to VecDuplicate() if more are needed. 1796 1797 .keywords: distributed array, create, two-dimensional 1798 1799 .seealso: DMDestroy(), DMView(), DMDACreate1d(), DMDACreate3d(), DMGlobalToLocalBegin(), DMDAGetRefinementFactor(), 1800 DMGlobalToLocalEnd(), DMLocalToGlobalBegin(), DMDALocalToLocalBegin(), DMDALocalToLocalEnd(), DMDASetRefinementFactor(), 1801 DMDAGetInfo(), DMCreateGlobalVector(), DMCreateLocalVector(), DMDACreateNaturalVector(), DMDALoad(), DMDAGetOwnershipRanges() 1802 1803 @*/ 1804 PetscErrorCode DMDACreate2d(MPI_Comm comm,DMDABoundaryType wrap,DMDAStencilType stencil_type, 1805 PetscInt M,PetscInt N,PetscInt m,PetscInt n,PetscInt dof,PetscInt s,const PetscInt lx[],const PetscInt ly[],DM *da) 1806 { 1807 PetscErrorCode ierr; 1808 1809 PetscFunctionBegin; 1810 ierr = DMDACreate(comm, da);CHKERRQ(ierr); 1811 ierr = DMDASetDim(*da, 2);CHKERRQ(ierr); 1812 ierr = DMDASetSizes(*da, M, N, 1);CHKERRQ(ierr); 1813 ierr = DMDASetNumProcs(*da, m, n, PETSC_DECIDE);CHKERRQ(ierr); 1814 ierr = DMDASetBoundaryType(*da, wrap);CHKERRQ(ierr); 1815 ierr = DMDASetDof(*da, dof);CHKERRQ(ierr); 1816 ierr = DMDASetStencilType(*da, stencil_type);CHKERRQ(ierr); 1817 ierr = DMDASetStencilWidth(*da, s);CHKERRQ(ierr); 1818 ierr = DMDASetOwnershipRanges(*da, lx, ly, PETSC_NULL);CHKERRQ(ierr); 1819 /* This violates the behavior for other classes, but right now users expect negative dimensions to be handled this way */ 1820 ierr = DMSetFromOptions(*da);CHKERRQ(ierr); 1821 ierr = DMSetUp(*da);CHKERRQ(ierr); 1822 ierr = DMView_DA_Private(*da);CHKERRQ(ierr); 1823 PetscFunctionReturn(0); 1824 } 1825