1 #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ 2 #include <petscsf.h> 3 4 PetscClassId DM_CLASSID; 5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal, DM_LocalToLocal; 6 7 const char *const DMBoundaryTypes[] = {"NONE","GHOSTED","MIRROR","PERIODIC","TWIST","DM_BOUNDARY_",0}; 8 9 #undef __FUNCT__ 10 #define __FUNCT__ "DMCreate" 11 /*@ 12 DMCreate - Creates an empty DM object. The type can then be set with DMSetType(). 13 14 If you never call DMSetType() it will generate an 15 error when you try to use the vector. 16 17 Collective on MPI_Comm 18 19 Input Parameter: 20 . comm - The communicator for the DM object 21 22 Output Parameter: 23 . dm - The DM object 24 25 Level: beginner 26 27 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE 28 @*/ 29 PetscErrorCode DMCreate(MPI_Comm comm,DM *dm) 30 { 31 DM v; 32 PetscErrorCode ierr; 33 34 PetscFunctionBegin; 35 PetscValidPointer(dm,2); 36 *dm = NULL; 37 ierr = PetscSysInitializePackage();CHKERRQ(ierr); 38 ierr = VecInitializePackage();CHKERRQ(ierr); 39 ierr = MatInitializePackage();CHKERRQ(ierr); 40 ierr = DMInitializePackage();CHKERRQ(ierr); 41 42 ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr); 43 ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr); 44 45 46 v->ltogmap = NULL; 47 v->ltogmapb = NULL; 48 v->bs = 1; 49 v->coloringtype = IS_COLORING_GLOBAL; 50 ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr); 51 ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr); 52 v->defaultSection = NULL; 53 v->defaultGlobalSection = NULL; 54 { 55 PetscInt i; 56 for (i = 0; i < 10; ++i) { 57 v->nullspaceConstructors[i] = NULL; 58 } 59 } 60 v->numFields = 0; 61 v->fields = NULL; 62 v->dmBC = NULL; 63 ierr = DMSetVecType(v,VECSTANDARD);CHKERRQ(ierr); 64 ierr = DMSetMatType(v,MATAIJ);CHKERRQ(ierr); 65 *dm = v; 66 PetscFunctionReturn(0); 67 } 68 69 #undef __FUNCT__ 70 #define __FUNCT__ "DMClone" 71 /*@ 72 DMClone - Creates a DM object with the same topology as the original. 73 74 Collective on MPI_Comm 75 76 Input Parameter: 77 . dm - The original DM object 78 79 Output Parameter: 80 . newdm - The new DM object 81 82 Level: beginner 83 84 .keywords: DM, topology, create 85 @*/ 86 PetscErrorCode DMClone(DM dm, DM *newdm) 87 { 88 PetscSF sf; 89 Vec coords; 90 void *ctx; 91 PetscErrorCode ierr; 92 93 PetscFunctionBegin; 94 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 95 PetscValidPointer(newdm,2); 96 ierr = DMCreate(PetscObjectComm((PetscObject)dm), newdm);CHKERRQ(ierr); 97 if (dm->ops->clone) { 98 ierr = (*dm->ops->clone)(dm, newdm);CHKERRQ(ierr); 99 } 100 (*newdm)->setupcalled = PETSC_TRUE; 101 ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr); 102 ierr = DMSetPointSF(*newdm, sf);CHKERRQ(ierr); 103 ierr = DMGetApplicationContext(dm, &ctx);CHKERRQ(ierr); 104 ierr = DMSetApplicationContext(*newdm, ctx);CHKERRQ(ierr); 105 ierr = DMGetCoordinatesLocal(dm, &coords);CHKERRQ(ierr); 106 if (coords) { 107 ierr = DMSetCoordinatesLocal(*newdm, coords);CHKERRQ(ierr); 108 } else { 109 ierr = DMGetCoordinates(dm, &coords);CHKERRQ(ierr); 110 if (coords) {ierr = DMSetCoordinates(*newdm, coords);CHKERRQ(ierr);} 111 } 112 PetscFunctionReturn(0); 113 } 114 115 #undef __FUNCT__ 116 #define __FUNCT__ "DMSetVecType" 117 /*@C 118 DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 119 120 Logically Collective on DMDA 121 122 Input Parameter: 123 + da - initial distributed array 124 . ctype - the vector type, currently either VECSTANDARD or VECCUSP 125 126 Options Database: 127 . -dm_vec_type ctype 128 129 Level: intermediate 130 131 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType, DMGetVecType() 132 @*/ 133 PetscErrorCode DMSetVecType(DM da,VecType ctype) 134 { 135 PetscErrorCode ierr; 136 137 PetscFunctionBegin; 138 PetscValidHeaderSpecific(da,DM_CLASSID,1); 139 ierr = PetscFree(da->vectype);CHKERRQ(ierr); 140 ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr); 141 PetscFunctionReturn(0); 142 } 143 144 #undef __FUNCT__ 145 #define __FUNCT__ "DMGetVecType" 146 /*@C 147 DMGetVecType - Gets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 148 149 Logically Collective on DMDA 150 151 Input Parameter: 152 . da - initial distributed array 153 154 Output Parameter: 155 . ctype - the vector type 156 157 Level: intermediate 158 159 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType 160 @*/ 161 PetscErrorCode DMGetVecType(DM da,VecType *ctype) 162 { 163 PetscFunctionBegin; 164 PetscValidHeaderSpecific(da,DM_CLASSID,1); 165 *ctype = da->vectype; 166 PetscFunctionReturn(0); 167 } 168 169 #undef __FUNCT__ 170 #define __FUNCT__ "VecGetDM" 171 /*@ 172 VecGetDM - Gets the DM defining the data layout of the vector 173 174 Not collective 175 176 Input Parameter: 177 . v - The Vec 178 179 Output Parameter: 180 . dm - The DM 181 182 Level: intermediate 183 184 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 185 @*/ 186 PetscErrorCode VecGetDM(Vec v, DM *dm) 187 { 188 PetscErrorCode ierr; 189 190 PetscFunctionBegin; 191 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 192 PetscValidPointer(dm,2); 193 ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 194 PetscFunctionReturn(0); 195 } 196 197 #undef __FUNCT__ 198 #define __FUNCT__ "VecSetDM" 199 /*@ 200 VecSetDM - Sets the DM defining the data layout of the vector 201 202 Not collective 203 204 Input Parameters: 205 + v - The Vec 206 - dm - The DM 207 208 Level: intermediate 209 210 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 211 @*/ 212 PetscErrorCode VecSetDM(Vec v, DM dm) 213 { 214 PetscErrorCode ierr; 215 216 PetscFunctionBegin; 217 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 218 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 219 ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 220 PetscFunctionReturn(0); 221 } 222 223 #undef __FUNCT__ 224 #define __FUNCT__ "DMSetMatType" 225 /*@C 226 DMSetMatType - Sets the type of matrix created with DMCreateMatrix() 227 228 Logically Collective on DM 229 230 Input Parameter: 231 + dm - the DM context 232 . ctype - the matrix type 233 234 Options Database: 235 . -dm_mat_type ctype 236 237 Level: intermediate 238 239 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMGetMatType() 240 @*/ 241 PetscErrorCode DMSetMatType(DM dm,MatType ctype) 242 { 243 PetscErrorCode ierr; 244 245 PetscFunctionBegin; 246 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 247 ierr = PetscFree(dm->mattype);CHKERRQ(ierr); 248 ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr); 249 PetscFunctionReturn(0); 250 } 251 252 #undef __FUNCT__ 253 #define __FUNCT__ "DMGetMatType" 254 /*@C 255 DMGetMatType - Gets the type of matrix created with DMCreateMatrix() 256 257 Logically Collective on DM 258 259 Input Parameter: 260 . dm - the DM context 261 262 Output Parameter: 263 . ctype - the matrix type 264 265 Options Database: 266 . -dm_mat_type ctype 267 268 Level: intermediate 269 270 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMSetMatType() 271 @*/ 272 PetscErrorCode DMGetMatType(DM dm,MatType *ctype) 273 { 274 PetscFunctionBegin; 275 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 276 *ctype = dm->mattype; 277 PetscFunctionReturn(0); 278 } 279 280 #undef __FUNCT__ 281 #define __FUNCT__ "MatGetDM" 282 /*@ 283 MatGetDM - Gets the DM defining the data layout of the matrix 284 285 Not collective 286 287 Input Parameter: 288 . A - The Mat 289 290 Output Parameter: 291 . dm - The DM 292 293 Level: intermediate 294 295 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType() 296 @*/ 297 PetscErrorCode MatGetDM(Mat A, DM *dm) 298 { 299 PetscErrorCode ierr; 300 301 PetscFunctionBegin; 302 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 303 PetscValidPointer(dm,2); 304 ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 305 PetscFunctionReturn(0); 306 } 307 308 #undef __FUNCT__ 309 #define __FUNCT__ "MatSetDM" 310 /*@ 311 MatSetDM - Sets the DM defining the data layout of the matrix 312 313 Not collective 314 315 Input Parameters: 316 + A - The Mat 317 - dm - The DM 318 319 Level: intermediate 320 321 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType() 322 @*/ 323 PetscErrorCode MatSetDM(Mat A, DM dm) 324 { 325 PetscErrorCode ierr; 326 327 PetscFunctionBegin; 328 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 329 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 330 ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 331 PetscFunctionReturn(0); 332 } 333 334 #undef __FUNCT__ 335 #define __FUNCT__ "DMSetOptionsPrefix" 336 /*@C 337 DMSetOptionsPrefix - Sets the prefix used for searching for all 338 DMDA options in the database. 339 340 Logically Collective on DMDA 341 342 Input Parameter: 343 + da - the DMDA context 344 - prefix - the prefix to prepend to all option names 345 346 Notes: 347 A hyphen (-) must NOT be given at the beginning of the prefix name. 348 The first character of all runtime options is AUTOMATICALLY the hyphen. 349 350 Level: advanced 351 352 .keywords: DMDA, set, options, prefix, database 353 354 .seealso: DMSetFromOptions() 355 @*/ 356 PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[]) 357 { 358 PetscErrorCode ierr; 359 360 PetscFunctionBegin; 361 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 362 ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr); 363 PetscFunctionReturn(0); 364 } 365 366 #undef __FUNCT__ 367 #define __FUNCT__ "DMDestroy" 368 /*@ 369 DMDestroy - Destroys a vector packer or DMDA. 370 371 Collective on DM 372 373 Input Parameter: 374 . dm - the DM object to destroy 375 376 Level: developer 377 378 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 379 380 @*/ 381 PetscErrorCode DMDestroy(DM *dm) 382 { 383 PetscInt i, cnt = 0, f; 384 DMNamedVecLink nlink,nnext; 385 PetscErrorCode ierr; 386 387 PetscFunctionBegin; 388 if (!*dm) PetscFunctionReturn(0); 389 PetscValidHeaderSpecific((*dm),DM_CLASSID,1); 390 391 /* I think it makes sense to dump all attached things when you are destroyed, which also eliminates circular references */ 392 for (f = 0; f < (*dm)->numFields; ++f) { 393 ierr = PetscObjectCompose((PetscObject) (*dm)->fields[f], "pmat", NULL);CHKERRQ(ierr); 394 ierr = PetscObjectCompose((PetscObject) (*dm)->fields[f], "nullspace", NULL);CHKERRQ(ierr); 395 ierr = PetscObjectCompose((PetscObject) (*dm)->fields[f], "nearnullspace", NULL);CHKERRQ(ierr); 396 } 397 /* count all the circular references of DM and its contained Vecs */ 398 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 399 if ((*dm)->localin[i]) cnt++; 400 if ((*dm)->globalin[i]) cnt++; 401 } 402 for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++; 403 for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++; 404 if ((*dm)->x) { 405 DM obj; 406 ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr); 407 if (obj == *dm) cnt++; 408 } 409 410 if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);} 411 /* 412 Need this test because the dm references the vectors that 413 reference the dm, so destroying the dm calls destroy on the 414 vectors that cause another destroy on the dm 415 */ 416 if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0); 417 ((PetscObject) (*dm))->refct = 0; 418 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 419 if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()"); 420 ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr); 421 } 422 nnext=(*dm)->namedglobal; 423 (*dm)->namedglobal = NULL; 424 for (nlink=nnext; nlink; nlink=nnext) { /* Destroy the named vectors */ 425 nnext = nlink->next; 426 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 427 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 428 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 429 ierr = PetscFree(nlink);CHKERRQ(ierr); 430 } 431 nnext=(*dm)->namedlocal; 432 (*dm)->namedlocal = NULL; 433 for (nlink=nnext; nlink; nlink=nnext) { /* Destroy the named local vectors */ 434 nnext = nlink->next; 435 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 436 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 437 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 438 ierr = PetscFree(nlink);CHKERRQ(ierr); 439 } 440 441 /* Destroy the list of hooks */ 442 { 443 DMCoarsenHookLink link,next; 444 for (link=(*dm)->coarsenhook; link; link=next) { 445 next = link->next; 446 ierr = PetscFree(link);CHKERRQ(ierr); 447 } 448 (*dm)->coarsenhook = NULL; 449 } 450 { 451 DMRefineHookLink link,next; 452 for (link=(*dm)->refinehook; link; link=next) { 453 next = link->next; 454 ierr = PetscFree(link);CHKERRQ(ierr); 455 } 456 (*dm)->refinehook = NULL; 457 } 458 { 459 DMSubDomainHookLink link,next; 460 for (link=(*dm)->subdomainhook; link; link=next) { 461 next = link->next; 462 ierr = PetscFree(link);CHKERRQ(ierr); 463 } 464 (*dm)->subdomainhook = NULL; 465 } 466 { 467 DMGlobalToLocalHookLink link,next; 468 for (link=(*dm)->gtolhook; link; link=next) { 469 next = link->next; 470 ierr = PetscFree(link);CHKERRQ(ierr); 471 } 472 (*dm)->gtolhook = NULL; 473 } 474 /* Destroy the work arrays */ 475 { 476 DMWorkLink link,next; 477 if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out"); 478 for (link=(*dm)->workin; link; link=next) { 479 next = link->next; 480 ierr = PetscFree(link->mem);CHKERRQ(ierr); 481 ierr = PetscFree(link);CHKERRQ(ierr); 482 } 483 (*dm)->workin = NULL; 484 } 485 486 ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr); 487 ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr); 488 ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr); 489 490 if ((*dm)->ctx && (*dm)->ctxdestroy) { 491 ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr); 492 } 493 ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr); 494 ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr); 495 ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr); 496 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr); 497 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr); 498 ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr); 499 ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr); 500 501 ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr); 502 ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr); 503 ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr); 504 ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr); 505 ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr); 506 507 ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr); 508 ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr); 509 ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr); 510 511 for (f = 0; f < (*dm)->numFields; ++f) { 512 ierr = PetscObjectDestroy((PetscObject *) &(*dm)->fields[f]);CHKERRQ(ierr); 513 } 514 ierr = PetscFree((*dm)->fields);CHKERRQ(ierr); 515 ierr = DMDestroy(&(*dm)->dmBC);CHKERRQ(ierr); 516 /* if memory was published with SAWs then destroy it */ 517 ierr = PetscObjectSAWsViewOff((PetscObject)*dm);CHKERRQ(ierr); 518 519 ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr); 520 /* We do not destroy (*dm)->data here so that we can reference count backend objects */ 521 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 522 PetscFunctionReturn(0); 523 } 524 525 #undef __FUNCT__ 526 #define __FUNCT__ "DMSetUp" 527 /*@ 528 DMSetUp - sets up the data structures inside a DM object 529 530 Collective on DM 531 532 Input Parameter: 533 . dm - the DM object to setup 534 535 Level: developer 536 537 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 538 539 @*/ 540 PetscErrorCode DMSetUp(DM dm) 541 { 542 PetscErrorCode ierr; 543 544 PetscFunctionBegin; 545 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 546 if (dm->setupcalled) PetscFunctionReturn(0); 547 if (dm->ops->setup) { 548 ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr); 549 } 550 dm->setupcalled = PETSC_TRUE; 551 PetscFunctionReturn(0); 552 } 553 554 #undef __FUNCT__ 555 #define __FUNCT__ "DMSetFromOptions" 556 /*@ 557 DMSetFromOptions - sets parameters in a DM from the options database 558 559 Collective on DM 560 561 Input Parameter: 562 . dm - the DM object to set options for 563 564 Options Database: 565 + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros 566 . -dm_vec_type <type> type of vector to create inside DM 567 . -dm_mat_type <type> type of matrix to create inside DM 568 - -dm_coloring_type <global or ghosted> 569 570 Level: developer 571 572 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 573 574 @*/ 575 PetscErrorCode DMSetFromOptions(DM dm) 576 { 577 char typeName[256]; 578 PetscBool flg; 579 PetscErrorCode ierr; 580 581 PetscFunctionBegin; 582 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 583 ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr); 584 ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr); 585 ierr = PetscOptionsFList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr); 586 if (flg) { 587 ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr); 588 } 589 ierr = PetscOptionsFList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr); 590 if (flg) { 591 ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr); 592 } 593 ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr); 594 if (dm->ops->setfromoptions) { 595 ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr); 596 } 597 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 598 ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr); 599 ierr = PetscOptionsEnd();CHKERRQ(ierr); 600 PetscFunctionReturn(0); 601 } 602 603 #undef __FUNCT__ 604 #define __FUNCT__ "DMView" 605 /*@C 606 DMView - Views a vector packer or DMDA. 607 608 Collective on DM 609 610 Input Parameter: 611 + dm - the DM object to view 612 - v - the viewer 613 614 Level: developer 615 616 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 617 618 @*/ 619 PetscErrorCode DMView(DM dm,PetscViewer v) 620 { 621 PetscErrorCode ierr; 622 PetscBool isbinary; 623 624 PetscFunctionBegin; 625 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 626 if (!v) { 627 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr); 628 } 629 ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 630 if (isbinary) { 631 PetscInt classid = DM_FILE_CLASSID; 632 char type[256]; 633 634 ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr); 635 ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr); 636 ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr); 637 } 638 if (dm->ops->view) { 639 ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr); 640 } 641 PetscFunctionReturn(0); 642 } 643 644 #undef __FUNCT__ 645 #define __FUNCT__ "DMCreateGlobalVector" 646 /*@ 647 DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object 648 649 Collective on DM 650 651 Input Parameter: 652 . dm - the DM object 653 654 Output Parameter: 655 . vec - the global vector 656 657 Level: beginner 658 659 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 660 661 @*/ 662 PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec) 663 { 664 PetscErrorCode ierr; 665 666 PetscFunctionBegin; 667 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 668 ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr); 669 PetscFunctionReturn(0); 670 } 671 672 #undef __FUNCT__ 673 #define __FUNCT__ "DMCreateLocalVector" 674 /*@ 675 DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object 676 677 Not Collective 678 679 Input Parameter: 680 . dm - the DM object 681 682 Output Parameter: 683 . vec - the local vector 684 685 Level: beginner 686 687 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 688 689 @*/ 690 PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec) 691 { 692 PetscErrorCode ierr; 693 694 PetscFunctionBegin; 695 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 696 ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr); 697 PetscFunctionReturn(0); 698 } 699 700 #undef __FUNCT__ 701 #define __FUNCT__ "DMGetLocalToGlobalMapping" 702 /*@ 703 DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM. 704 705 Collective on DM 706 707 Input Parameter: 708 . dm - the DM that provides the mapping 709 710 Output Parameter: 711 . ltog - the mapping 712 713 Level: intermediate 714 715 Notes: 716 This mapping can then be used by VecSetLocalToGlobalMapping() or 717 MatSetLocalToGlobalMapping(). 718 719 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock() 720 @*/ 721 PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog) 722 { 723 PetscErrorCode ierr; 724 725 PetscFunctionBegin; 726 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 727 PetscValidPointer(ltog,2); 728 if (!dm->ltogmap) { 729 PetscSection section, sectionGlobal; 730 731 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 732 if (section) { 733 PetscInt *ltog; 734 PetscInt pStart, pEnd, size, p, l; 735 736 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 737 ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr); 738 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 739 ierr = PetscMalloc1(size, <og);CHKERRQ(ierr); /* We want the local+overlap size */ 740 for (p = pStart, l = 0; p < pEnd; ++p) { 741 PetscInt dof, off, c; 742 743 /* Should probably use constrained dofs */ 744 ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr); 745 ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr); 746 for (c = 0; c < dof; ++c, ++l) { 747 ltog[l] = off+c; 748 } 749 } 750 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr); 751 ierr = PetscLogObjectParent((PetscObject)dm, (PetscObject)dm->ltogmap);CHKERRQ(ierr); 752 } else { 753 if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping"); 754 ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr); 755 } 756 } 757 *ltog = dm->ltogmap; 758 PetscFunctionReturn(0); 759 } 760 761 #undef __FUNCT__ 762 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock" 763 /*@ 764 DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM. 765 766 Collective on DM 767 768 Input Parameter: 769 . da - the distributed array that provides the mapping 770 771 Output Parameter: 772 . ltog - the block mapping 773 774 Level: intermediate 775 776 Notes: 777 This mapping can then be used by VecSetLocalToGlobalMappingBlock() or 778 MatSetLocalToGlobalMappingBlock(). 779 780 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize() 781 @*/ 782 PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog) 783 { 784 PetscErrorCode ierr; 785 786 PetscFunctionBegin; 787 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 788 PetscValidPointer(ltog,2); 789 if (!dm->ltogmapb) { 790 PetscInt bs; 791 ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr); 792 if (bs > 1) { 793 if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock"); 794 ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr); 795 } else { 796 ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr); 797 ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr); 798 } 799 } 800 *ltog = dm->ltogmapb; 801 PetscFunctionReturn(0); 802 } 803 804 #undef __FUNCT__ 805 #define __FUNCT__ "DMGetBlockSize" 806 /*@ 807 DMGetBlockSize - Gets the inherent block size associated with a DM 808 809 Not Collective 810 811 Input Parameter: 812 . dm - the DM with block structure 813 814 Output Parameter: 815 . bs - the block size, 1 implies no exploitable block structure 816 817 Level: intermediate 818 819 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock() 820 @*/ 821 PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs) 822 { 823 PetscFunctionBegin; 824 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 825 PetscValidPointer(bs,2); 826 if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet"); 827 *bs = dm->bs; 828 PetscFunctionReturn(0); 829 } 830 831 #undef __FUNCT__ 832 #define __FUNCT__ "DMCreateInterpolation" 833 /*@ 834 DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects 835 836 Collective on DM 837 838 Input Parameter: 839 + dm1 - the DM object 840 - dm2 - the second, finer DM object 841 842 Output Parameter: 843 + mat - the interpolation 844 - vec - the scaling (optional) 845 846 Level: developer 847 848 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 849 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation. 850 851 For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors 852 EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic. 853 854 855 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen() 856 857 @*/ 858 PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 859 { 860 PetscErrorCode ierr; 861 862 PetscFunctionBegin; 863 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 864 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 865 ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr); 866 PetscFunctionReturn(0); 867 } 868 869 #undef __FUNCT__ 870 #define __FUNCT__ "DMCreateInjection" 871 /*@ 872 DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects 873 874 Collective on DM 875 876 Input Parameter: 877 + dm1 - the DM object 878 - dm2 - the second, finer DM object 879 880 Output Parameter: 881 . ctx - the injection 882 883 Level: developer 884 885 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 886 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection. 887 888 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation() 889 890 @*/ 891 PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx) 892 { 893 PetscErrorCode ierr; 894 895 PetscFunctionBegin; 896 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 897 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 898 ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr); 899 PetscFunctionReturn(0); 900 } 901 902 #undef __FUNCT__ 903 #define __FUNCT__ "DMCreateColoring" 904 /*@ 905 DMCreateColoring - Gets coloring for a DM 906 907 Collective on DM 908 909 Input Parameter: 910 + dm - the DM object 911 - ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL 912 913 Output Parameter: 914 . coloring - the coloring 915 916 Level: developer 917 918 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix(), DMSetMatType() 919 920 @*/ 921 PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,ISColoring *coloring) 922 { 923 PetscErrorCode ierr; 924 925 PetscFunctionBegin; 926 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 927 if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet"); 928 ierr = (*dm->ops->getcoloring)(dm,ctype,coloring);CHKERRQ(ierr); 929 PetscFunctionReturn(0); 930 } 931 932 #undef __FUNCT__ 933 #define __FUNCT__ "DMCreateMatrix" 934 /*@ 935 DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite 936 937 Collective on DM 938 939 Input Parameter: 940 . dm - the DM object 941 942 Output Parameter: 943 . mat - the empty Jacobian 944 945 Level: beginner 946 947 Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 948 do not need to do it yourself. 949 950 By default it also sets the nonzero structure and puts in the zero entries. To prevent setting 951 the nonzero pattern call DMDASetMatPreallocateOnly() 952 953 For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used 954 internally by PETSc. 955 956 For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires 957 the indices for the global numbering for DMDAs which is complicated. 958 959 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMSetMatType() 960 961 @*/ 962 PetscErrorCode DMCreateMatrix(DM dm,Mat *mat) 963 { 964 PetscErrorCode ierr; 965 966 PetscFunctionBegin; 967 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 968 ierr = MatInitializePackage();CHKERRQ(ierr); 969 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 970 PetscValidPointer(mat,3); 971 ierr = (*dm->ops->creatematrix)(dm,mat);CHKERRQ(ierr); 972 PetscFunctionReturn(0); 973 } 974 975 #undef __FUNCT__ 976 #define __FUNCT__ "DMSetMatrixPreallocateOnly" 977 /*@ 978 DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly 979 preallocated but the nonzero structure and zero values will not be set. 980 981 Logically Collective on DMDA 982 983 Input Parameter: 984 + dm - the DM 985 - only - PETSC_TRUE if only want preallocation 986 987 Level: developer 988 .seealso DMCreateMatrix() 989 @*/ 990 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only) 991 { 992 PetscFunctionBegin; 993 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 994 dm->prealloc_only = only; 995 PetscFunctionReturn(0); 996 } 997 998 #undef __FUNCT__ 999 #define __FUNCT__ "DMGetWorkArray" 1000 /*@C 1001 DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 1002 1003 Not Collective 1004 1005 Input Parameters: 1006 + dm - the DM object 1007 . count - The minium size 1008 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 1009 1010 Output Parameter: 1011 . array - the work array 1012 1013 Level: developer 1014 1015 .seealso DMDestroy(), DMCreate() 1016 @*/ 1017 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1018 { 1019 PetscErrorCode ierr; 1020 DMWorkLink link; 1021 size_t size; 1022 1023 PetscFunctionBegin; 1024 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1025 PetscValidPointer(mem,4); 1026 if (dm->workin) { 1027 link = dm->workin; 1028 dm->workin = dm->workin->next; 1029 } else { 1030 ierr = PetscNewLog(dm,&link);CHKERRQ(ierr); 1031 } 1032 ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr); 1033 if (size*count > link->bytes) { 1034 ierr = PetscFree(link->mem);CHKERRQ(ierr); 1035 ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr); 1036 link->bytes = size*count; 1037 } 1038 link->next = dm->workout; 1039 dm->workout = link; 1040 *(void**)mem = link->mem; 1041 PetscFunctionReturn(0); 1042 } 1043 1044 #undef __FUNCT__ 1045 #define __FUNCT__ "DMRestoreWorkArray" 1046 /*@C 1047 DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 1048 1049 Not Collective 1050 1051 Input Parameters: 1052 + dm - the DM object 1053 . count - The minium size 1054 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 1055 1056 Output Parameter: 1057 . array - the work array 1058 1059 Level: developer 1060 1061 .seealso DMDestroy(), DMCreate() 1062 @*/ 1063 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1064 { 1065 DMWorkLink *p,link; 1066 1067 PetscFunctionBegin; 1068 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1069 PetscValidPointer(mem,4); 1070 for (p=&dm->workout; (link=*p); p=&link->next) { 1071 if (link->mem == *(void**)mem) { 1072 *p = link->next; 1073 link->next = dm->workin; 1074 dm->workin = link; 1075 *(void**)mem = NULL; 1076 PetscFunctionReturn(0); 1077 } 1078 } 1079 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out"); 1080 PetscFunctionReturn(0); 1081 } 1082 1083 #undef __FUNCT__ 1084 #define __FUNCT__ "DMSetNullSpaceConstructor" 1085 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace)) 1086 { 1087 PetscFunctionBegin; 1088 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1089 if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field); 1090 dm->nullspaceConstructors[field] = nullsp; 1091 PetscFunctionReturn(0); 1092 } 1093 1094 #undef __FUNCT__ 1095 #define __FUNCT__ "DMCreateFieldIS" 1096 /*@C 1097 DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field 1098 1099 Not collective 1100 1101 Input Parameter: 1102 . dm - the DM object 1103 1104 Output Parameters: 1105 + numFields - The number of fields (or NULL if not requested) 1106 . fieldNames - The name for each field (or NULL if not requested) 1107 - fields - The global indices for each field (or NULL if not requested) 1108 1109 Level: intermediate 1110 1111 Notes: 1112 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1113 PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with 1114 PetscFree(). 1115 1116 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 1117 @*/ 1118 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields) 1119 { 1120 PetscSection section, sectionGlobal; 1121 PetscErrorCode ierr; 1122 1123 PetscFunctionBegin; 1124 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1125 if (numFields) { 1126 PetscValidPointer(numFields,2); 1127 *numFields = 0; 1128 } 1129 if (fieldNames) { 1130 PetscValidPointer(fieldNames,3); 1131 *fieldNames = NULL; 1132 } 1133 if (fields) { 1134 PetscValidPointer(fields,4); 1135 *fields = NULL; 1136 } 1137 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1138 if (section) { 1139 PetscInt *fieldSizes, **fieldIndices; 1140 PetscInt nF, f, pStart, pEnd, p; 1141 1142 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 1143 ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr); 1144 ierr = PetscMalloc2(nF,&fieldSizes,nF,&fieldIndices);CHKERRQ(ierr); 1145 ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr); 1146 for (f = 0; f < nF; ++f) { 1147 fieldSizes[f] = 0; 1148 } 1149 for (p = pStart; p < pEnd; ++p) { 1150 PetscInt gdof; 1151 1152 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1153 if (gdof > 0) { 1154 for (f = 0; f < nF; ++f) { 1155 PetscInt fdof, fcdof; 1156 1157 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1158 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1159 fieldSizes[f] += fdof-fcdof; 1160 } 1161 } 1162 } 1163 for (f = 0; f < nF; ++f) { 1164 ierr = PetscMalloc1(fieldSizes[f], &fieldIndices[f]);CHKERRQ(ierr); 1165 fieldSizes[f] = 0; 1166 } 1167 for (p = pStart; p < pEnd; ++p) { 1168 PetscInt gdof, goff; 1169 1170 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1171 if (gdof > 0) { 1172 ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr); 1173 for (f = 0; f < nF; ++f) { 1174 PetscInt fdof, fcdof, fc; 1175 1176 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1177 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1178 for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) { 1179 fieldIndices[f][fieldSizes[f]] = goff++; 1180 } 1181 } 1182 } 1183 } 1184 if (numFields) *numFields = nF; 1185 if (fieldNames) { 1186 ierr = PetscMalloc1(nF, fieldNames);CHKERRQ(ierr); 1187 for (f = 0; f < nF; ++f) { 1188 const char *fieldName; 1189 1190 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1191 ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr); 1192 } 1193 } 1194 if (fields) { 1195 ierr = PetscMalloc1(nF, fields);CHKERRQ(ierr); 1196 for (f = 0; f < nF; ++f) { 1197 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr); 1198 } 1199 } 1200 ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr); 1201 } else if (dm->ops->createfieldis) { 1202 ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr); 1203 } 1204 PetscFunctionReturn(0); 1205 } 1206 1207 1208 #undef __FUNCT__ 1209 #define __FUNCT__ "DMCreateFieldDecomposition" 1210 /*@C 1211 DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems 1212 corresponding to different fields: each IS contains the global indices of the dofs of the 1213 corresponding field. The optional list of DMs define the DM for each subproblem. 1214 Generalizes DMCreateFieldIS(). 1215 1216 Not collective 1217 1218 Input Parameter: 1219 . dm - the DM object 1220 1221 Output Parameters: 1222 + len - The number of subproblems in the field decomposition (or NULL if not requested) 1223 . namelist - The name for each field (or NULL if not requested) 1224 . islist - The global indices for each field (or NULL if not requested) 1225 - dmlist - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1226 1227 Level: intermediate 1228 1229 Notes: 1230 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1231 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1232 and all of the arrays should be freed with PetscFree(). 1233 1234 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1235 @*/ 1236 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) 1237 { 1238 PetscErrorCode ierr; 1239 1240 PetscFunctionBegin; 1241 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1242 if (len) { 1243 PetscValidPointer(len,2); 1244 *len = 0; 1245 } 1246 if (namelist) { 1247 PetscValidPointer(namelist,3); 1248 *namelist = 0; 1249 } 1250 if (islist) { 1251 PetscValidPointer(islist,4); 1252 *islist = 0; 1253 } 1254 if (dmlist) { 1255 PetscValidPointer(dmlist,5); 1256 *dmlist = 0; 1257 } 1258 /* 1259 Is it a good idea to apply the following check across all impls? 1260 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1261 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1262 */ 1263 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1264 if (!dm->ops->createfielddecomposition) { 1265 PetscSection section; 1266 PetscInt numFields, f; 1267 1268 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1269 if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);} 1270 if (section && numFields && dm->ops->createsubdm) { 1271 *len = numFields; 1272 if (namelist) {ierr = PetscMalloc1(numFields,namelist);CHKERRQ(ierr);} 1273 if (islist) {ierr = PetscMalloc1(numFields,islist);CHKERRQ(ierr);} 1274 if (dmlist) {ierr = PetscMalloc1(numFields,dmlist);CHKERRQ(ierr);} 1275 for (f = 0; f < numFields; ++f) { 1276 const char *fieldName; 1277 1278 ierr = DMCreateSubDM(dm, 1, &f, islist ? &(*islist)[f] : NULL, dmlist ? &(*dmlist)[f] : NULL);CHKERRQ(ierr); 1279 if (namelist) { 1280 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1281 ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr); 1282 } 1283 } 1284 } else { 1285 ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr); 1286 /* By default there are no DMs associated with subproblems. */ 1287 if (dmlist) *dmlist = NULL; 1288 } 1289 } else { 1290 ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr); 1291 } 1292 PetscFunctionReturn(0); 1293 } 1294 1295 #undef __FUNCT__ 1296 #define __FUNCT__ "DMCreateSubDM" 1297 /*@C 1298 DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in. 1299 The fields are defined by DMCreateFieldIS(). 1300 1301 Not collective 1302 1303 Input Parameters: 1304 + dm - the DM object 1305 . numFields - number of fields in this subproblem 1306 - len - The number of subproblems in the decomposition (or NULL if not requested) 1307 1308 Output Parameters: 1309 . is - The global indices for the subproblem 1310 - dm - The DM for the subproblem 1311 1312 Level: intermediate 1313 1314 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1315 @*/ 1316 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm) 1317 { 1318 PetscErrorCode ierr; 1319 1320 PetscFunctionBegin; 1321 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1322 PetscValidPointer(fields,3); 1323 if (is) PetscValidPointer(is,4); 1324 if (subdm) PetscValidPointer(subdm,5); 1325 if (dm->ops->createsubdm) { 1326 ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr); 1327 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined"); 1328 PetscFunctionReturn(0); 1329 } 1330 1331 1332 #undef __FUNCT__ 1333 #define __FUNCT__ "DMCreateDomainDecomposition" 1334 /*@C 1335 DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems 1336 corresponding to restrictions to pairs nested subdomains: each IS contains the global 1337 indices of the dofs of the corresponding subdomains. The inner subdomains conceptually 1338 define a nonoverlapping covering, while outer subdomains can overlap. 1339 The optional list of DMs define the DM for each subproblem. 1340 1341 Not collective 1342 1343 Input Parameter: 1344 . dm - the DM object 1345 1346 Output Parameters: 1347 + len - The number of subproblems in the domain decomposition (or NULL if not requested) 1348 . namelist - The name for each subdomain (or NULL if not requested) 1349 . innerislist - The global indices for each inner subdomain (or NULL, if not requested) 1350 . outerislist - The global indices for each outer subdomain (or NULL, if not requested) 1351 - dmlist - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1352 1353 Level: intermediate 1354 1355 Notes: 1356 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1357 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1358 and all of the arrays should be freed with PetscFree(). 1359 1360 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition() 1361 @*/ 1362 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) 1363 { 1364 PetscErrorCode ierr; 1365 DMSubDomainHookLink link; 1366 PetscInt i,l; 1367 1368 PetscFunctionBegin; 1369 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1370 if (len) {PetscValidPointer(len,2); *len = 0;} 1371 if (namelist) {PetscValidPointer(namelist,3); *namelist = NULL;} 1372 if (innerislist) {PetscValidPointer(innerislist,4); *innerislist = NULL;} 1373 if (outerislist) {PetscValidPointer(outerislist,5); *outerislist = NULL;} 1374 if (dmlist) {PetscValidPointer(dmlist,6); *dmlist = NULL;} 1375 /* 1376 Is it a good idea to apply the following check across all impls? 1377 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1378 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1379 */ 1380 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1381 if (dm->ops->createdomaindecomposition) { 1382 ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr); 1383 /* copy subdomain hooks and context over to the subdomain DMs */ 1384 if (dmlist) { 1385 for (i = 0; i < l; i++) { 1386 for (link=dm->subdomainhook; link; link=link->next) { 1387 if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);} 1388 } 1389 (*dmlist)[i]->ctx = dm->ctx; 1390 } 1391 } 1392 if (len) *len = l; 1393 } 1394 PetscFunctionReturn(0); 1395 } 1396 1397 1398 #undef __FUNCT__ 1399 #define __FUNCT__ "DMCreateDomainDecompositionScatters" 1400 /*@C 1401 DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector 1402 1403 Not collective 1404 1405 Input Parameters: 1406 + dm - the DM object 1407 . n - the number of subdomain scatters 1408 - subdms - the local subdomains 1409 1410 Output Parameters: 1411 + n - the number of scatters returned 1412 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain 1413 . oscat - scatter from global vector to overlapping global vector entries on subdomain 1414 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts) 1415 1416 Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution 1417 of general nonlinear problems with overlapping subdomain methods. While merely having index sets that enable subsets 1418 of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of 1419 solution and residual data. 1420 1421 Level: developer 1422 1423 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1424 @*/ 1425 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat) 1426 { 1427 PetscErrorCode ierr; 1428 1429 PetscFunctionBegin; 1430 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1431 PetscValidPointer(subdms,3); 1432 if (dm->ops->createddscatters) { 1433 ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr); 1434 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined"); 1435 PetscFunctionReturn(0); 1436 } 1437 1438 #undef __FUNCT__ 1439 #define __FUNCT__ "DMRefine" 1440 /*@ 1441 DMRefine - Refines a DM object 1442 1443 Collective on DM 1444 1445 Input Parameter: 1446 + dm - the DM object 1447 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1448 1449 Output Parameter: 1450 . dmf - the refined DM, or NULL 1451 1452 Note: If no refinement was done, the return value is NULL 1453 1454 Level: developer 1455 1456 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1457 @*/ 1458 PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf) 1459 { 1460 PetscErrorCode ierr; 1461 DMRefineHookLink link; 1462 1463 PetscFunctionBegin; 1464 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1465 ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr); 1466 if (*dmf) { 1467 (*dmf)->ops->creatematrix = dm->ops->creatematrix; 1468 1469 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr); 1470 1471 (*dmf)->ctx = dm->ctx; 1472 (*dmf)->leveldown = dm->leveldown; 1473 (*dmf)->levelup = dm->levelup + 1; 1474 1475 ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr); 1476 for (link=dm->refinehook; link; link=link->next) { 1477 if (link->refinehook) { 1478 ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr); 1479 } 1480 } 1481 } 1482 PetscFunctionReturn(0); 1483 } 1484 1485 #undef __FUNCT__ 1486 #define __FUNCT__ "DMRefineHookAdd" 1487 /*@C 1488 DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid 1489 1490 Logically Collective 1491 1492 Input Arguments: 1493 + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level 1494 . refinehook - function to run when setting up a coarser level 1495 . interphook - function to run to update data on finer levels (once per SNESSolve()) 1496 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1497 1498 Calling sequence of refinehook: 1499 $ refinehook(DM coarse,DM fine,void *ctx); 1500 1501 + coarse - coarse level DM 1502 . fine - fine level DM to interpolate problem to 1503 - ctx - optional user-defined function context 1504 1505 Calling sequence for interphook: 1506 $ interphook(DM coarse,Mat interp,DM fine,void *ctx) 1507 1508 + coarse - coarse level DM 1509 . interp - matrix interpolating a coarse-level solution to the finer grid 1510 . fine - fine level DM to update 1511 - ctx - optional user-defined function context 1512 1513 Level: advanced 1514 1515 Notes: 1516 This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing 1517 1518 If this function is called multiple times, the hooks will be run in the order they are added. 1519 1520 This function is currently not available from Fortran. 1521 1522 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1523 @*/ 1524 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx) 1525 { 1526 PetscErrorCode ierr; 1527 DMRefineHookLink link,*p; 1528 1529 PetscFunctionBegin; 1530 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1531 for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1532 ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr); 1533 link->refinehook = refinehook; 1534 link->interphook = interphook; 1535 link->ctx = ctx; 1536 link->next = NULL; 1537 *p = link; 1538 PetscFunctionReturn(0); 1539 } 1540 1541 #undef __FUNCT__ 1542 #define __FUNCT__ "DMInterpolate" 1543 /*@ 1544 DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd() 1545 1546 Collective if any hooks are 1547 1548 Input Arguments: 1549 + coarse - coarser DM to use as a base 1550 . restrct - interpolation matrix, apply using MatInterpolate() 1551 - fine - finer DM to update 1552 1553 Level: developer 1554 1555 .seealso: DMRefineHookAdd(), MatInterpolate() 1556 @*/ 1557 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine) 1558 { 1559 PetscErrorCode ierr; 1560 DMRefineHookLink link; 1561 1562 PetscFunctionBegin; 1563 for (link=fine->refinehook; link; link=link->next) { 1564 if (link->interphook) { 1565 ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr); 1566 } 1567 } 1568 PetscFunctionReturn(0); 1569 } 1570 1571 #undef __FUNCT__ 1572 #define __FUNCT__ "DMGetRefineLevel" 1573 /*@ 1574 DMGetRefineLevel - Get's the number of refinements that have generated this DM. 1575 1576 Not Collective 1577 1578 Input Parameter: 1579 . dm - the DM object 1580 1581 Output Parameter: 1582 . level - number of refinements 1583 1584 Level: developer 1585 1586 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1587 1588 @*/ 1589 PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level) 1590 { 1591 PetscFunctionBegin; 1592 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1593 *level = dm->levelup; 1594 PetscFunctionReturn(0); 1595 } 1596 1597 #undef __FUNCT__ 1598 #define __FUNCT__ "DMGlobalToLocalHookAdd" 1599 /*@C 1600 DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called 1601 1602 Logically Collective 1603 1604 Input Arguments: 1605 + dm - the DM 1606 . beginhook - function to run at the beginning of DMGlobalToLocalBegin() 1607 . endhook - function to run after DMGlobalToLocalEnd() has completed 1608 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1609 1610 Calling sequence for beginhook: 1611 $ beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1612 1613 + dm - global DM 1614 . g - global vector 1615 . mode - mode 1616 . l - local vector 1617 - ctx - optional user-defined function context 1618 1619 1620 Calling sequence for endhook: 1621 $ endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1622 1623 + global - global DM 1624 - ctx - optional user-defined function context 1625 1626 Level: advanced 1627 1628 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1629 @*/ 1630 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx) 1631 { 1632 PetscErrorCode ierr; 1633 DMGlobalToLocalHookLink link,*p; 1634 1635 PetscFunctionBegin; 1636 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1637 for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1638 ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr); 1639 link->beginhook = beginhook; 1640 link->endhook = endhook; 1641 link->ctx = ctx; 1642 link->next = NULL; 1643 *p = link; 1644 PetscFunctionReturn(0); 1645 } 1646 1647 #undef __FUNCT__ 1648 #define __FUNCT__ "DMGlobalToLocalBegin" 1649 /*@ 1650 DMGlobalToLocalBegin - Begins updating local vectors from global vector 1651 1652 Neighbor-wise Collective on DM 1653 1654 Input Parameters: 1655 + dm - the DM object 1656 . g - the global vector 1657 . mode - INSERT_VALUES or ADD_VALUES 1658 - l - the local vector 1659 1660 1661 Level: beginner 1662 1663 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1664 1665 @*/ 1666 PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1667 { 1668 PetscSF sf; 1669 PetscErrorCode ierr; 1670 DMGlobalToLocalHookLink link; 1671 1672 PetscFunctionBegin; 1673 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1674 for (link=dm->gtolhook; link; link=link->next) { 1675 if (link->beginhook) { 1676 ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr); 1677 } 1678 } 1679 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1680 if (sf) { 1681 PetscScalar *lArray, *gArray; 1682 1683 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1684 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1685 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1686 ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1687 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1688 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1689 } else { 1690 ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1691 } 1692 PetscFunctionReturn(0); 1693 } 1694 1695 #undef __FUNCT__ 1696 #define __FUNCT__ "DMGlobalToLocalEnd" 1697 /*@ 1698 DMGlobalToLocalEnd - Ends updating local vectors from global vector 1699 1700 Neighbor-wise Collective on DM 1701 1702 Input Parameters: 1703 + dm - the DM object 1704 . g - the global vector 1705 . mode - INSERT_VALUES or ADD_VALUES 1706 - l - the local vector 1707 1708 1709 Level: beginner 1710 1711 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1712 1713 @*/ 1714 PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1715 { 1716 PetscSF sf; 1717 PetscErrorCode ierr; 1718 PetscScalar *lArray, *gArray; 1719 DMGlobalToLocalHookLink link; 1720 1721 PetscFunctionBegin; 1722 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1723 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1724 if (sf) { 1725 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1726 1727 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1728 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1729 ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1730 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1731 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1732 } else { 1733 ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1734 } 1735 for (link=dm->gtolhook; link; link=link->next) { 1736 if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);} 1737 } 1738 PetscFunctionReturn(0); 1739 } 1740 1741 #undef __FUNCT__ 1742 #define __FUNCT__ "DMLocalToGlobalBegin" 1743 /*@ 1744 DMLocalToGlobalBegin - updates global vectors from local vectors 1745 1746 Neighbor-wise Collective on DM 1747 1748 Input Parameters: 1749 + dm - the DM object 1750 . l - the local vector 1751 . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that 1752 base point. 1753 - - the global vector 1754 1755 Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local 1756 array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work 1757 global array to the final global array with VecAXPY(). 1758 1759 Level: beginner 1760 1761 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin() 1762 1763 @*/ 1764 PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g) 1765 { 1766 PetscSF sf; 1767 PetscErrorCode ierr; 1768 1769 PetscFunctionBegin; 1770 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1771 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1772 if (sf) { 1773 MPI_Op op; 1774 PetscScalar *lArray, *gArray; 1775 1776 switch (mode) { 1777 case INSERT_VALUES: 1778 case INSERT_ALL_VALUES: 1779 op = MPIU_REPLACE; break; 1780 case ADD_VALUES: 1781 case ADD_ALL_VALUES: 1782 op = MPI_SUM; break; 1783 default: 1784 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1785 } 1786 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1787 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1788 ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1789 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1790 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1791 } else { 1792 ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1793 } 1794 PetscFunctionReturn(0); 1795 } 1796 1797 #undef __FUNCT__ 1798 #define __FUNCT__ "DMLocalToGlobalEnd" 1799 /*@ 1800 DMLocalToGlobalEnd - updates global vectors from local vectors 1801 1802 Neighbor-wise Collective on DM 1803 1804 Input Parameters: 1805 + dm - the DM object 1806 . l - the local vector 1807 . mode - INSERT_VALUES or ADD_VALUES 1808 - g - the global vector 1809 1810 1811 Level: beginner 1812 1813 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd() 1814 1815 @*/ 1816 PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g) 1817 { 1818 PetscSF sf; 1819 PetscErrorCode ierr; 1820 1821 PetscFunctionBegin; 1822 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1823 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1824 if (sf) { 1825 MPI_Op op; 1826 PetscScalar *lArray, *gArray; 1827 1828 switch (mode) { 1829 case INSERT_VALUES: 1830 case INSERT_ALL_VALUES: 1831 op = MPIU_REPLACE; break; 1832 case ADD_VALUES: 1833 case ADD_ALL_VALUES: 1834 op = MPI_SUM; break; 1835 default: 1836 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1837 } 1838 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1839 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1840 ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1841 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1842 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1843 } else { 1844 ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1845 } 1846 PetscFunctionReturn(0); 1847 } 1848 1849 #undef __FUNCT__ 1850 #define __FUNCT__ "DMLocalToLocalBegin" 1851 /*@ 1852 DMLocalToLocalBegin - Maps from a local vector (including ghost points 1853 that contain irrelevant values) to another local vector where the ghost 1854 points in the second are set correctly. Must be followed by DMLocalToLocalEnd(). 1855 1856 Neighbor-wise Collective on DM and Vec 1857 1858 Input Parameters: 1859 + dm - the DM object 1860 . g - the original local vector 1861 - mode - one of INSERT_VALUES or ADD_VALUES 1862 1863 Output Parameter: 1864 . l - the local vector with correct ghost values 1865 1866 Level: intermediate 1867 1868 Notes: 1869 The local vectors used here need not be the same as those 1870 obtained from DMCreateLocalVector(), BUT they 1871 must have the same parallel data layout; they could, for example, be 1872 obtained with VecDuplicate() from the DM originating vectors. 1873 1874 .keywords: DM, local-to-local, begin 1875 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalEnd(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1876 1877 @*/ 1878 PetscErrorCode DMLocalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1879 { 1880 PetscErrorCode ierr; 1881 1882 PetscFunctionBegin; 1883 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1884 ierr = (*dm->ops->localtolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1885 PetscFunctionReturn(0); 1886 } 1887 1888 #undef __FUNCT__ 1889 #define __FUNCT__ "DMLocalToLocalEnd" 1890 /*@ 1891 DMLocalToLocalEnd - Maps from a local vector (including ghost points 1892 that contain irrelevant values) to another local vector where the ghost 1893 points in the second are set correctly. Must be preceded by DMLocalToLocalBegin(). 1894 1895 Neighbor-wise Collective on DM and Vec 1896 1897 Input Parameters: 1898 + da - the DM object 1899 . g - the original local vector 1900 - mode - one of INSERT_VALUES or ADD_VALUES 1901 1902 Output Parameter: 1903 . l - the local vector with correct ghost values 1904 1905 Level: intermediate 1906 1907 Notes: 1908 The local vectors used here need not be the same as those 1909 obtained from DMCreateLocalVector(), BUT they 1910 must have the same parallel data layout; they could, for example, be 1911 obtained with VecDuplicate() from the DM originating vectors. 1912 1913 .keywords: DM, local-to-local, end 1914 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1915 1916 @*/ 1917 PetscErrorCode DMLocalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1918 { 1919 PetscErrorCode ierr; 1920 1921 PetscFunctionBegin; 1922 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1923 ierr = (*dm->ops->localtolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1924 PetscFunctionReturn(0); 1925 } 1926 1927 1928 #undef __FUNCT__ 1929 #define __FUNCT__ "DMCoarsen" 1930 /*@ 1931 DMCoarsen - Coarsens a DM object 1932 1933 Collective on DM 1934 1935 Input Parameter: 1936 + dm - the DM object 1937 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1938 1939 Output Parameter: 1940 . dmc - the coarsened DM 1941 1942 Level: developer 1943 1944 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1945 1946 @*/ 1947 PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) 1948 { 1949 PetscErrorCode ierr; 1950 DMCoarsenHookLink link; 1951 1952 PetscFunctionBegin; 1953 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1954 ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr); 1955 (*dmc)->ops->creatematrix = dm->ops->creatematrix; 1956 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr); 1957 (*dmc)->ctx = dm->ctx; 1958 (*dmc)->levelup = dm->levelup; 1959 (*dmc)->leveldown = dm->leveldown + 1; 1960 ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr); 1961 for (link=dm->coarsenhook; link; link=link->next) { 1962 if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);} 1963 } 1964 PetscFunctionReturn(0); 1965 } 1966 1967 #undef __FUNCT__ 1968 #define __FUNCT__ "DMCoarsenHookAdd" 1969 /*@C 1970 DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1971 1972 Logically Collective 1973 1974 Input Arguments: 1975 + fine - nonlinear solver context on which to run a hook when restricting to a coarser level 1976 . coarsenhook - function to run when setting up a coarser level 1977 . restricthook - function to run to update data on coarser levels (once per SNESSolve()) 1978 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1979 1980 Calling sequence of coarsenhook: 1981 $ coarsenhook(DM fine,DM coarse,void *ctx); 1982 1983 + fine - fine level DM 1984 . coarse - coarse level DM to restrict problem to 1985 - ctx - optional user-defined function context 1986 1987 Calling sequence for restricthook: 1988 $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx) 1989 1990 + fine - fine level DM 1991 . mrestrict - matrix restricting a fine-level solution to the coarse grid 1992 . rscale - scaling vector for restriction 1993 . inject - matrix restricting by injection 1994 . coarse - coarse level DM to update 1995 - ctx - optional user-defined function context 1996 1997 Level: advanced 1998 1999 Notes: 2000 This function is only needed if auxiliary data needs to be set up on coarse grids. 2001 2002 If this function is called multiple times, the hooks will be run in the order they are added. 2003 2004 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 2005 extract the finest level information from its context (instead of from the SNES). 2006 2007 This function is currently not available from Fortran. 2008 2009 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 2010 @*/ 2011 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx) 2012 { 2013 PetscErrorCode ierr; 2014 DMCoarsenHookLink link,*p; 2015 2016 PetscFunctionBegin; 2017 PetscValidHeaderSpecific(fine,DM_CLASSID,1); 2018 for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 2019 ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr); 2020 link->coarsenhook = coarsenhook; 2021 link->restricthook = restricthook; 2022 link->ctx = ctx; 2023 link->next = NULL; 2024 *p = link; 2025 PetscFunctionReturn(0); 2026 } 2027 2028 #undef __FUNCT__ 2029 #define __FUNCT__ "DMRestrict" 2030 /*@ 2031 DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd() 2032 2033 Collective if any hooks are 2034 2035 Input Arguments: 2036 + fine - finer DM to use as a base 2037 . restrct - restriction matrix, apply using MatRestrict() 2038 . inject - injection matrix, also use MatRestrict() 2039 - coarse - coarer DM to update 2040 2041 Level: developer 2042 2043 .seealso: DMCoarsenHookAdd(), MatRestrict() 2044 @*/ 2045 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse) 2046 { 2047 PetscErrorCode ierr; 2048 DMCoarsenHookLink link; 2049 2050 PetscFunctionBegin; 2051 for (link=fine->coarsenhook; link; link=link->next) { 2052 if (link->restricthook) { 2053 ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr); 2054 } 2055 } 2056 PetscFunctionReturn(0); 2057 } 2058 2059 #undef __FUNCT__ 2060 #define __FUNCT__ "DMSubDomainHookAdd" 2061 /*@C 2062 DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid 2063 2064 Logically Collective 2065 2066 Input Arguments: 2067 + global - global DM 2068 . ddhook - function to run to pass data to the decomposition DM upon its creation 2069 . restricthook - function to run to update data on block solve (at the beginning of the block solve) 2070 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 2071 2072 2073 Calling sequence for ddhook: 2074 $ ddhook(DM global,DM block,void *ctx) 2075 2076 + global - global DM 2077 . block - block DM 2078 - ctx - optional user-defined function context 2079 2080 Calling sequence for restricthook: 2081 $ restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx) 2082 2083 + global - global DM 2084 . out - scatter to the outer (with ghost and overlap points) block vector 2085 . in - scatter to block vector values only owned locally 2086 . block - block DM 2087 - ctx - optional user-defined function context 2088 2089 Level: advanced 2090 2091 Notes: 2092 This function is only needed if auxiliary data needs to be set up on subdomain DMs. 2093 2094 If this function is called multiple times, the hooks will be run in the order they are added. 2095 2096 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 2097 extract the global information from its context (instead of from the SNES). 2098 2099 This function is currently not available from Fortran. 2100 2101 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 2102 @*/ 2103 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx) 2104 { 2105 PetscErrorCode ierr; 2106 DMSubDomainHookLink link,*p; 2107 2108 PetscFunctionBegin; 2109 PetscValidHeaderSpecific(global,DM_CLASSID,1); 2110 for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 2111 ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr); 2112 link->restricthook = restricthook; 2113 link->ddhook = ddhook; 2114 link->ctx = ctx; 2115 link->next = NULL; 2116 *p = link; 2117 PetscFunctionReturn(0); 2118 } 2119 2120 #undef __FUNCT__ 2121 #define __FUNCT__ "DMSubDomainRestrict" 2122 /*@ 2123 DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd() 2124 2125 Collective if any hooks are 2126 2127 Input Arguments: 2128 + fine - finer DM to use as a base 2129 . oscatter - scatter from domain global vector filling subdomain global vector with overlap 2130 . gscatter - scatter from domain global vector filling subdomain local vector with ghosts 2131 - coarse - coarer DM to update 2132 2133 Level: developer 2134 2135 .seealso: DMCoarsenHookAdd(), MatRestrict() 2136 @*/ 2137 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm) 2138 { 2139 PetscErrorCode ierr; 2140 DMSubDomainHookLink link; 2141 2142 PetscFunctionBegin; 2143 for (link=global->subdomainhook; link; link=link->next) { 2144 if (link->restricthook) { 2145 ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr); 2146 } 2147 } 2148 PetscFunctionReturn(0); 2149 } 2150 2151 #undef __FUNCT__ 2152 #define __FUNCT__ "DMGetCoarsenLevel" 2153 /*@ 2154 DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM. 2155 2156 Not Collective 2157 2158 Input Parameter: 2159 . dm - the DM object 2160 2161 Output Parameter: 2162 . level - number of coarsenings 2163 2164 Level: developer 2165 2166 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2167 2168 @*/ 2169 PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level) 2170 { 2171 PetscFunctionBegin; 2172 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2173 *level = dm->leveldown; 2174 PetscFunctionReturn(0); 2175 } 2176 2177 2178 2179 #undef __FUNCT__ 2180 #define __FUNCT__ "DMRefineHierarchy" 2181 /*@C 2182 DMRefineHierarchy - Refines a DM object, all levels at once 2183 2184 Collective on DM 2185 2186 Input Parameter: 2187 + dm - the DM object 2188 - nlevels - the number of levels of refinement 2189 2190 Output Parameter: 2191 . dmf - the refined DM hierarchy 2192 2193 Level: developer 2194 2195 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2196 2197 @*/ 2198 PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[]) 2199 { 2200 PetscErrorCode ierr; 2201 2202 PetscFunctionBegin; 2203 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2204 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2205 if (nlevels == 0) PetscFunctionReturn(0); 2206 if (dm->ops->refinehierarchy) { 2207 ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr); 2208 } else if (dm->ops->refine) { 2209 PetscInt i; 2210 2211 ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr); 2212 for (i=1; i<nlevels; i++) { 2213 ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr); 2214 } 2215 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet"); 2216 PetscFunctionReturn(0); 2217 } 2218 2219 #undef __FUNCT__ 2220 #define __FUNCT__ "DMCoarsenHierarchy" 2221 /*@C 2222 DMCoarsenHierarchy - Coarsens a DM object, all levels at once 2223 2224 Collective on DM 2225 2226 Input Parameter: 2227 + dm - the DM object 2228 - nlevels - the number of levels of coarsening 2229 2230 Output Parameter: 2231 . dmc - the coarsened DM hierarchy 2232 2233 Level: developer 2234 2235 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2236 2237 @*/ 2238 PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[]) 2239 { 2240 PetscErrorCode ierr; 2241 2242 PetscFunctionBegin; 2243 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2244 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2245 if (nlevels == 0) PetscFunctionReturn(0); 2246 PetscValidPointer(dmc,3); 2247 if (dm->ops->coarsenhierarchy) { 2248 ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr); 2249 } else if (dm->ops->coarsen) { 2250 PetscInt i; 2251 2252 ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr); 2253 for (i=1; i<nlevels; i++) { 2254 ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr); 2255 } 2256 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet"); 2257 PetscFunctionReturn(0); 2258 } 2259 2260 #undef __FUNCT__ 2261 #define __FUNCT__ "DMCreateAggregates" 2262 /*@ 2263 DMCreateAggregates - Gets the aggregates that map between 2264 grids associated with two DMs. 2265 2266 Collective on DM 2267 2268 Input Parameters: 2269 + dmc - the coarse grid DM 2270 - dmf - the fine grid DM 2271 2272 Output Parameters: 2273 . rest - the restriction matrix (transpose of the projection matrix) 2274 2275 Level: intermediate 2276 2277 .keywords: interpolation, restriction, multigrid 2278 2279 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation() 2280 @*/ 2281 PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest) 2282 { 2283 PetscErrorCode ierr; 2284 2285 PetscFunctionBegin; 2286 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 2287 PetscValidHeaderSpecific(dmf,DM_CLASSID,2); 2288 ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr); 2289 PetscFunctionReturn(0); 2290 } 2291 2292 #undef __FUNCT__ 2293 #define __FUNCT__ "DMSetApplicationContextDestroy" 2294 /*@C 2295 DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed 2296 2297 Not Collective 2298 2299 Input Parameters: 2300 + dm - the DM object 2301 - destroy - the destroy function 2302 2303 Level: intermediate 2304 2305 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2306 2307 @*/ 2308 PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**)) 2309 { 2310 PetscFunctionBegin; 2311 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2312 dm->ctxdestroy = destroy; 2313 PetscFunctionReturn(0); 2314 } 2315 2316 #undef __FUNCT__ 2317 #define __FUNCT__ "DMSetApplicationContext" 2318 /*@ 2319 DMSetApplicationContext - Set a user context into a DM object 2320 2321 Not Collective 2322 2323 Input Parameters: 2324 + dm - the DM object 2325 - ctx - the user context 2326 2327 Level: intermediate 2328 2329 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2330 2331 @*/ 2332 PetscErrorCode DMSetApplicationContext(DM dm,void *ctx) 2333 { 2334 PetscFunctionBegin; 2335 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2336 dm->ctx = ctx; 2337 PetscFunctionReturn(0); 2338 } 2339 2340 #undef __FUNCT__ 2341 #define __FUNCT__ "DMGetApplicationContext" 2342 /*@ 2343 DMGetApplicationContext - Gets a user context from a DM object 2344 2345 Not Collective 2346 2347 Input Parameter: 2348 . dm - the DM object 2349 2350 Output Parameter: 2351 . ctx - the user context 2352 2353 Level: intermediate 2354 2355 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2356 2357 @*/ 2358 PetscErrorCode DMGetApplicationContext(DM dm,void *ctx) 2359 { 2360 PetscFunctionBegin; 2361 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2362 *(void**)ctx = dm->ctx; 2363 PetscFunctionReturn(0); 2364 } 2365 2366 #undef __FUNCT__ 2367 #define __FUNCT__ "DMSetVariableBounds" 2368 /*@C 2369 DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI. 2370 2371 Logically Collective on DM 2372 2373 Input Parameter: 2374 + dm - the DM object 2375 - f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set) 2376 2377 Level: intermediate 2378 2379 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), 2380 DMSetJacobian() 2381 2382 @*/ 2383 PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2384 { 2385 PetscFunctionBegin; 2386 dm->ops->computevariablebounds = f; 2387 PetscFunctionReturn(0); 2388 } 2389 2390 #undef __FUNCT__ 2391 #define __FUNCT__ "DMHasVariableBounds" 2392 /*@ 2393 DMHasVariableBounds - does the DM object have a variable bounds function? 2394 2395 Not Collective 2396 2397 Input Parameter: 2398 . dm - the DM object to destroy 2399 2400 Output Parameter: 2401 . flg - PETSC_TRUE if the variable bounds function exists 2402 2403 Level: developer 2404 2405 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2406 2407 @*/ 2408 PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg) 2409 { 2410 PetscFunctionBegin; 2411 *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE; 2412 PetscFunctionReturn(0); 2413 } 2414 2415 #undef __FUNCT__ 2416 #define __FUNCT__ "DMComputeVariableBounds" 2417 /*@C 2418 DMComputeVariableBounds - compute variable bounds used by SNESVI. 2419 2420 Logically Collective on DM 2421 2422 Input Parameters: 2423 + dm - the DM object to destroy 2424 - x - current solution at which the bounds are computed 2425 2426 Output parameters: 2427 + xl - lower bound 2428 - xu - upper bound 2429 2430 Level: intermediate 2431 2432 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2433 2434 @*/ 2435 PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu) 2436 { 2437 PetscErrorCode ierr; 2438 2439 PetscFunctionBegin; 2440 PetscValidHeaderSpecific(xl,VEC_CLASSID,2); 2441 PetscValidHeaderSpecific(xu,VEC_CLASSID,2); 2442 if (dm->ops->computevariablebounds) { 2443 ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr); 2444 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds."); 2445 PetscFunctionReturn(0); 2446 } 2447 2448 #undef __FUNCT__ 2449 #define __FUNCT__ "DMHasColoring" 2450 /*@ 2451 DMHasColoring - does the DM object have a method of providing a coloring? 2452 2453 Not Collective 2454 2455 Input Parameter: 2456 . dm - the DM object 2457 2458 Output Parameter: 2459 . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring(). 2460 2461 Level: developer 2462 2463 .seealso DMHasFunction(), DMCreateColoring() 2464 2465 @*/ 2466 PetscErrorCode DMHasColoring(DM dm,PetscBool *flg) 2467 { 2468 PetscFunctionBegin; 2469 *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE; 2470 PetscFunctionReturn(0); 2471 } 2472 2473 #undef __FUNCT__ 2474 #define __FUNCT__ "DMSetVec" 2475 /*@C 2476 DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear. 2477 2478 Collective on DM 2479 2480 Input Parameter: 2481 + dm - the DM object 2482 - x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems. 2483 2484 Level: developer 2485 2486 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2487 2488 @*/ 2489 PetscErrorCode DMSetVec(DM dm,Vec x) 2490 { 2491 PetscErrorCode ierr; 2492 2493 PetscFunctionBegin; 2494 if (x) { 2495 if (!dm->x) { 2496 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2497 } 2498 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2499 } else if (dm->x) { 2500 ierr = VecDestroy(&dm->x);CHKERRQ(ierr); 2501 } 2502 PetscFunctionReturn(0); 2503 } 2504 2505 PetscFunctionList DMList = NULL; 2506 PetscBool DMRegisterAllCalled = PETSC_FALSE; 2507 2508 #undef __FUNCT__ 2509 #define __FUNCT__ "DMSetType" 2510 /*@C 2511 DMSetType - Builds a DM, for a particular DM implementation. 2512 2513 Collective on DM 2514 2515 Input Parameters: 2516 + dm - The DM object 2517 - method - The name of the DM type 2518 2519 Options Database Key: 2520 . -dm_type <type> - Sets the DM type; use -help for a list of available types 2521 2522 Notes: 2523 See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D). 2524 2525 Level: intermediate 2526 2527 .keywords: DM, set, type 2528 .seealso: DMGetType(), DMCreate() 2529 @*/ 2530 PetscErrorCode DMSetType(DM dm, DMType method) 2531 { 2532 PetscErrorCode (*r)(DM); 2533 PetscBool match; 2534 PetscErrorCode ierr; 2535 2536 PetscFunctionBegin; 2537 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2538 ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr); 2539 if (match) PetscFunctionReturn(0); 2540 2541 if (!DMRegisterAllCalled) {ierr = DMRegisterAll();CHKERRQ(ierr);} 2542 ierr = PetscFunctionListFind(DMList,method,&r);CHKERRQ(ierr); 2543 if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method); 2544 2545 if (dm->ops->destroy) { 2546 ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr); 2547 dm->ops->destroy = NULL; 2548 } 2549 ierr = (*r)(dm);CHKERRQ(ierr); 2550 ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr); 2551 PetscFunctionReturn(0); 2552 } 2553 2554 #undef __FUNCT__ 2555 #define __FUNCT__ "DMGetType" 2556 /*@C 2557 DMGetType - Gets the DM type name (as a string) from the DM. 2558 2559 Not Collective 2560 2561 Input Parameter: 2562 . dm - The DM 2563 2564 Output Parameter: 2565 . type - The DM type name 2566 2567 Level: intermediate 2568 2569 .keywords: DM, get, type, name 2570 .seealso: DMSetType(), DMCreate() 2571 @*/ 2572 PetscErrorCode DMGetType(DM dm, DMType *type) 2573 { 2574 PetscErrorCode ierr; 2575 2576 PetscFunctionBegin; 2577 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2578 PetscValidCharPointer(type,2); 2579 if (!DMRegisterAllCalled) { 2580 ierr = DMRegisterAll();CHKERRQ(ierr); 2581 } 2582 *type = ((PetscObject)dm)->type_name; 2583 PetscFunctionReturn(0); 2584 } 2585 2586 #undef __FUNCT__ 2587 #define __FUNCT__ "DMConvert" 2588 /*@C 2589 DMConvert - Converts a DM to another DM, either of the same or different type. 2590 2591 Collective on DM 2592 2593 Input Parameters: 2594 + dm - the DM 2595 - newtype - new DM type (use "same" for the same type) 2596 2597 Output Parameter: 2598 . M - pointer to new DM 2599 2600 Notes: 2601 Cannot be used to convert a sequential DM to parallel or parallel to sequential, 2602 the MPI communicator of the generated DM is always the same as the communicator 2603 of the input DM. 2604 2605 Level: intermediate 2606 2607 .seealso: DMCreate() 2608 @*/ 2609 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M) 2610 { 2611 DM B; 2612 char convname[256]; 2613 PetscBool sametype, issame; 2614 PetscErrorCode ierr; 2615 2616 PetscFunctionBegin; 2617 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2618 PetscValidType(dm,1); 2619 PetscValidPointer(M,3); 2620 ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr); 2621 ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr); 2622 { 2623 PetscErrorCode (*conv)(DM, DMType, DM*) = NULL; 2624 2625 /* 2626 Order of precedence: 2627 1) See if a specialized converter is known to the current DM. 2628 2) See if a specialized converter is known to the desired DM class. 2629 3) See if a good general converter is registered for the desired class 2630 4) See if a good general converter is known for the current matrix. 2631 5) Use a really basic converter. 2632 */ 2633 2634 /* 1) See if a specialized converter is known to the current DM and the desired class */ 2635 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2636 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2637 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2638 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2639 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2640 ierr = PetscObjectQueryFunction((PetscObject)dm,convname,&conv);CHKERRQ(ierr); 2641 if (conv) goto foundconv; 2642 2643 /* 2) See if a specialized converter is known to the desired DM class. */ 2644 ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr); 2645 ierr = DMSetType(B, newtype);CHKERRQ(ierr); 2646 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2647 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2648 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2649 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2650 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2651 ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv);CHKERRQ(ierr); 2652 if (conv) { 2653 ierr = DMDestroy(&B);CHKERRQ(ierr); 2654 goto foundconv; 2655 } 2656 2657 #if 0 2658 /* 3) See if a good general converter is registered for the desired class */ 2659 conv = B->ops->convertfrom; 2660 ierr = DMDestroy(&B);CHKERRQ(ierr); 2661 if (conv) goto foundconv; 2662 2663 /* 4) See if a good general converter is known for the current matrix */ 2664 if (dm->ops->convert) { 2665 conv = dm->ops->convert; 2666 } 2667 if (conv) goto foundconv; 2668 #endif 2669 2670 /* 5) Use a really basic converter. */ 2671 SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype); 2672 2673 foundconv: 2674 ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2675 ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr); 2676 ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2677 } 2678 ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr); 2679 PetscFunctionReturn(0); 2680 } 2681 2682 /*--------------------------------------------------------------------------------------------------------------------*/ 2683 2684 #undef __FUNCT__ 2685 #define __FUNCT__ "DMRegister" 2686 /*@C 2687 DMRegister - Adds a new DM component implementation 2688 2689 Not Collective 2690 2691 Input Parameters: 2692 + name - The name of a new user-defined creation routine 2693 - create_func - The creation routine itself 2694 2695 Notes: 2696 DMRegister() may be called multiple times to add several user-defined DMs 2697 2698 2699 Sample usage: 2700 .vb 2701 DMRegister("my_da", MyDMCreate); 2702 .ve 2703 2704 Then, your DM type can be chosen with the procedural interface via 2705 .vb 2706 DMCreate(MPI_Comm, DM *); 2707 DMSetType(DM,"my_da"); 2708 .ve 2709 or at runtime via the option 2710 .vb 2711 -da_type my_da 2712 .ve 2713 2714 Level: advanced 2715 2716 .keywords: DM, register 2717 .seealso: DMRegisterAll(), DMRegisterDestroy() 2718 2719 @*/ 2720 PetscErrorCode DMRegister(const char sname[],PetscErrorCode (*function)(DM)) 2721 { 2722 PetscErrorCode ierr; 2723 2724 PetscFunctionBegin; 2725 ierr = PetscFunctionListAdd(&DMList,sname,function);CHKERRQ(ierr); 2726 PetscFunctionReturn(0); 2727 } 2728 2729 #undef __FUNCT__ 2730 #define __FUNCT__ "DMLoad" 2731 /*@C 2732 DMLoad - Loads a DM that has been stored in binary with DMView(). 2733 2734 Collective on PetscViewer 2735 2736 Input Parameters: 2737 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or 2738 some related function before a call to DMLoad(). 2739 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or 2740 HDF5 file viewer, obtained from PetscViewerHDF5Open() 2741 2742 Level: intermediate 2743 2744 Notes: 2745 The type is determined by the data in the file, any type set into the DM before this call is ignored. 2746 2747 Notes for advanced users: 2748 Most users should not need to know the details of the binary storage 2749 format, since DMLoad() and DMView() completely hide these details. 2750 But for anyone who's interested, the standard binary matrix storage 2751 format is 2752 .vb 2753 has not yet been determined 2754 .ve 2755 2756 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad() 2757 @*/ 2758 PetscErrorCode DMLoad(DM newdm, PetscViewer viewer) 2759 { 2760 PetscErrorCode ierr; 2761 PetscBool isbinary; 2762 PetscInt classid; 2763 char type[256]; 2764 2765 PetscFunctionBegin; 2766 PetscValidHeaderSpecific(newdm,DM_CLASSID,1); 2767 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 2768 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 2769 if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 2770 2771 ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr); 2772 if (classid != DM_FILE_CLASSID) SETERRQ1(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file, classid found %d",(int)classid); 2773 ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr); 2774 ierr = DMSetType(newdm, type);CHKERRQ(ierr); 2775 if (newdm->ops->load) { 2776 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 2777 } 2778 PetscFunctionReturn(0); 2779 } 2780 2781 /******************************** FEM Support **********************************/ 2782 2783 #undef __FUNCT__ 2784 #define __FUNCT__ "DMPrintCellVector" 2785 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) 2786 { 2787 PetscInt f; 2788 PetscErrorCode ierr; 2789 2790 PetscFunctionBegin; 2791 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2792 for (f = 0; f < len; ++f) { 2793 ierr = PetscPrintf(PETSC_COMM_SELF, " | %g |\n", (double)PetscRealPart(x[f]));CHKERRQ(ierr); 2794 } 2795 PetscFunctionReturn(0); 2796 } 2797 2798 #undef __FUNCT__ 2799 #define __FUNCT__ "DMPrintCellMatrix" 2800 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) 2801 { 2802 PetscInt f, g; 2803 PetscErrorCode ierr; 2804 2805 PetscFunctionBegin; 2806 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2807 for (f = 0; f < rows; ++f) { 2808 ierr = PetscPrintf(PETSC_COMM_SELF, " |");CHKERRQ(ierr); 2809 for (g = 0; g < cols; ++g) { 2810 ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5g", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr); 2811 } 2812 ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr); 2813 } 2814 PetscFunctionReturn(0); 2815 } 2816 2817 #undef __FUNCT__ 2818 #define __FUNCT__ "DMPrintLocalVec" 2819 PetscErrorCode DMPrintLocalVec(DM dm, const char name[], PetscReal tol, Vec X) 2820 { 2821 PetscMPIInt rank, numProcs; 2822 PetscInt p; 2823 PetscErrorCode ierr; 2824 2825 PetscFunctionBegin; 2826 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr); 2827 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr); 2828 ierr = PetscPrintf(PetscObjectComm((PetscObject) dm), "%s:\n", name);CHKERRQ(ierr); 2829 for (p = 0; p < numProcs; ++p) { 2830 if (p == rank) { 2831 Vec x; 2832 2833 ierr = VecDuplicate(X, &x);CHKERRQ(ierr); 2834 ierr = VecCopy(X, x);CHKERRQ(ierr); 2835 ierr = VecChop(x, tol);CHKERRQ(ierr); 2836 ierr = VecView(x, PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr); 2837 ierr = VecDestroy(&x);CHKERRQ(ierr); 2838 ierr = PetscViewerFlush(PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr); 2839 } 2840 ierr = PetscBarrier((PetscObject) dm);CHKERRQ(ierr); 2841 } 2842 PetscFunctionReturn(0); 2843 } 2844 2845 #undef __FUNCT__ 2846 #define __FUNCT__ "DMGetDefaultSection" 2847 /*@ 2848 DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM. 2849 2850 Input Parameter: 2851 . dm - The DM 2852 2853 Output Parameter: 2854 . section - The PetscSection 2855 2856 Level: intermediate 2857 2858 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2859 2860 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2861 @*/ 2862 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) 2863 { 2864 PetscErrorCode ierr; 2865 2866 PetscFunctionBegin; 2867 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2868 PetscValidPointer(section, 2); 2869 if (!dm->defaultSection && dm->ops->createdefaultsection) {ierr = (*dm->ops->createdefaultsection)(dm);CHKERRQ(ierr);} 2870 *section = dm->defaultSection; 2871 PetscFunctionReturn(0); 2872 } 2873 2874 #undef __FUNCT__ 2875 #define __FUNCT__ "DMSetDefaultSection" 2876 /*@ 2877 DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM. 2878 2879 Input Parameters: 2880 + dm - The DM 2881 - section - The PetscSection 2882 2883 Level: intermediate 2884 2885 Note: Any existing Section will be destroyed 2886 2887 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2888 @*/ 2889 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) 2890 { 2891 PetscInt numFields; 2892 PetscInt f; 2893 PetscErrorCode ierr; 2894 2895 PetscFunctionBegin; 2896 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2897 PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2898 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2899 ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr); 2900 dm->defaultSection = section; 2901 ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr); 2902 if (numFields) { 2903 ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr); 2904 for (f = 0; f < numFields; ++f) { 2905 const char *name; 2906 2907 ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr); 2908 ierr = PetscObjectSetName((PetscObject) dm->fields[f], name);CHKERRQ(ierr); 2909 } 2910 } 2911 /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */ 2912 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2913 PetscFunctionReturn(0); 2914 } 2915 2916 #undef __FUNCT__ 2917 #define __FUNCT__ "DMGetDefaultGlobalSection" 2918 /*@ 2919 DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM. 2920 2921 Collective on DM 2922 2923 Input Parameter: 2924 . dm - The DM 2925 2926 Output Parameter: 2927 . section - The PetscSection 2928 2929 Level: intermediate 2930 2931 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2932 2933 .seealso: DMSetDefaultSection(), DMGetDefaultSection() 2934 @*/ 2935 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) 2936 { 2937 PetscErrorCode ierr; 2938 2939 PetscFunctionBegin; 2940 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2941 PetscValidPointer(section, 2); 2942 if (!dm->defaultGlobalSection) { 2943 PetscSection s; 2944 2945 ierr = DMGetDefaultSection(dm, &s);CHKERRQ(ierr); 2946 if (!s) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection in order to create a global PetscSection"); 2947 if (!dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSF in order to create a global PetscSection"); 2948 ierr = PetscSectionCreateGlobalSection(s, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr); 2949 ierr = PetscLayoutDestroy(&dm->map);CHKERRQ(ierr); 2950 ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm), dm->defaultGlobalSection, &dm->map);CHKERRQ(ierr); 2951 } 2952 *section = dm->defaultGlobalSection; 2953 PetscFunctionReturn(0); 2954 } 2955 2956 #undef __FUNCT__ 2957 #define __FUNCT__ "DMSetDefaultGlobalSection" 2958 /*@ 2959 DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM. 2960 2961 Input Parameters: 2962 + dm - The DM 2963 - section - The PetscSection, or NULL 2964 2965 Level: intermediate 2966 2967 Note: Any existing Section will be destroyed 2968 2969 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection() 2970 @*/ 2971 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) 2972 { 2973 PetscErrorCode ierr; 2974 2975 PetscFunctionBegin; 2976 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2977 if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2978 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2979 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2980 dm->defaultGlobalSection = section; 2981 PetscFunctionReturn(0); 2982 } 2983 2984 #undef __FUNCT__ 2985 #define __FUNCT__ "DMGetDefaultSF" 2986 /*@ 2987 DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set, 2988 it is created from the default PetscSection layouts in the DM. 2989 2990 Input Parameter: 2991 . dm - The DM 2992 2993 Output Parameter: 2994 . sf - The PetscSF 2995 2996 Level: intermediate 2997 2998 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 2999 3000 .seealso: DMSetDefaultSF(), DMCreateDefaultSF() 3001 @*/ 3002 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) 3003 { 3004 PetscInt nroots; 3005 PetscErrorCode ierr; 3006 3007 PetscFunctionBegin; 3008 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3009 PetscValidPointer(sf, 2); 3010 ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr); 3011 if (nroots < 0) { 3012 PetscSection section, gSection; 3013 3014 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 3015 if (section) { 3016 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 3017 ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr); 3018 } else { 3019 *sf = NULL; 3020 PetscFunctionReturn(0); 3021 } 3022 } 3023 *sf = dm->defaultSF; 3024 PetscFunctionReturn(0); 3025 } 3026 3027 #undef __FUNCT__ 3028 #define __FUNCT__ "DMSetDefaultSF" 3029 /*@ 3030 DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM 3031 3032 Input Parameters: 3033 + dm - The DM 3034 - sf - The PetscSF 3035 3036 Level: intermediate 3037 3038 Note: Any previous SF is destroyed 3039 3040 .seealso: DMGetDefaultSF(), DMCreateDefaultSF() 3041 @*/ 3042 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) 3043 { 3044 PetscErrorCode ierr; 3045 3046 PetscFunctionBegin; 3047 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3048 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2); 3049 ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr); 3050 dm->defaultSF = sf; 3051 PetscFunctionReturn(0); 3052 } 3053 3054 #undef __FUNCT__ 3055 #define __FUNCT__ "DMCreateDefaultSF" 3056 /*@C 3057 DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections 3058 describing the data layout. 3059 3060 Input Parameters: 3061 + dm - The DM 3062 . localSection - PetscSection describing the local data layout 3063 - globalSection - PetscSection describing the global data layout 3064 3065 Level: intermediate 3066 3067 .seealso: DMGetDefaultSF(), DMSetDefaultSF() 3068 @*/ 3069 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection) 3070 { 3071 MPI_Comm comm; 3072 PetscLayout layout; 3073 const PetscInt *ranges; 3074 PetscInt *local; 3075 PetscSFNode *remote; 3076 PetscInt pStart, pEnd, p, nroots, nleaves = 0, l; 3077 PetscMPIInt size, rank; 3078 PetscErrorCode ierr; 3079 3080 PetscFunctionBegin; 3081 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 3082 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3083 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 3084 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 3085 ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr); 3086 ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr); 3087 ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr); 3088 ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr); 3089 ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr); 3090 ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr); 3091 ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr); 3092 for (p = pStart; p < pEnd; ++p) { 3093 PetscInt gdof, gcdof; 3094 3095 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3096 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3097 nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3098 } 3099 ierr = PetscMalloc1(nleaves, &local);CHKERRQ(ierr); 3100 ierr = PetscMalloc1(nleaves, &remote);CHKERRQ(ierr); 3101 for (p = pStart, l = 0; p < pEnd; ++p) { 3102 const PetscInt *cind; 3103 PetscInt dof, cdof, off, gdof, gcdof, goff, gsize, d, c; 3104 3105 ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr); 3106 ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr); 3107 ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr); 3108 ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr); 3109 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3110 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3111 ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr); 3112 if (!gdof) continue; /* Censored point */ 3113 gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3114 if (gsize != dof-cdof) { 3115 if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof); 3116 cdof = 0; /* Ignore constraints */ 3117 } 3118 for (d = 0, c = 0; d < dof; ++d) { 3119 if ((c < cdof) && (cind[c] == d)) {++c; continue;} 3120 local[l+d-c] = off+d; 3121 } 3122 if (gdof < 0) { 3123 for (d = 0; d < gsize; ++d, ++l) { 3124 PetscInt offset = -(goff+1) + d, r; 3125 3126 ierr = PetscFindInt(offset,size+1,ranges,&r);CHKERRQ(ierr); 3127 if (r < 0) r = -(r+2); 3128 if ((r < 0) || (r >= size)) SETERRQ4(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Point %d mapped to invalid process %d (%d, %d)", p, r, gdof, goff); 3129 remote[l].rank = r; 3130 remote[l].index = offset - ranges[r]; 3131 } 3132 } else { 3133 for (d = 0; d < gsize; ++d, ++l) { 3134 remote[l].rank = rank; 3135 remote[l].index = goff+d - ranges[rank]; 3136 } 3137 } 3138 } 3139 if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves); 3140 ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr); 3141 ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 3142 PetscFunctionReturn(0); 3143 } 3144 3145 #undef __FUNCT__ 3146 #define __FUNCT__ "DMGetPointSF" 3147 /*@ 3148 DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM. 3149 3150 Input Parameter: 3151 . dm - The DM 3152 3153 Output Parameter: 3154 . sf - The PetscSF 3155 3156 Level: intermediate 3157 3158 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3159 3160 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3161 @*/ 3162 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) 3163 { 3164 PetscFunctionBegin; 3165 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3166 PetscValidPointer(sf, 2); 3167 *sf = dm->sf; 3168 PetscFunctionReturn(0); 3169 } 3170 3171 #undef __FUNCT__ 3172 #define __FUNCT__ "DMSetPointSF" 3173 /*@ 3174 DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM. 3175 3176 Input Parameters: 3177 + dm - The DM 3178 - sf - The PetscSF 3179 3180 Level: intermediate 3181 3182 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3183 @*/ 3184 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) 3185 { 3186 PetscErrorCode ierr; 3187 3188 PetscFunctionBegin; 3189 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3190 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1); 3191 ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr); 3192 ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr); 3193 dm->sf = sf; 3194 PetscFunctionReturn(0); 3195 } 3196 3197 #undef __FUNCT__ 3198 #define __FUNCT__ "DMGetNumFields" 3199 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields) 3200 { 3201 PetscFunctionBegin; 3202 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3203 PetscValidPointer(numFields, 2); 3204 *numFields = dm->numFields; 3205 PetscFunctionReturn(0); 3206 } 3207 3208 #undef __FUNCT__ 3209 #define __FUNCT__ "DMSetNumFields" 3210 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields) 3211 { 3212 PetscInt f; 3213 PetscErrorCode ierr; 3214 3215 PetscFunctionBegin; 3216 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3217 if (dm->numFields == numFields) PetscFunctionReturn(0); 3218 for (f = 0; f < dm->numFields; ++f) { 3219 ierr = PetscObjectDestroy((PetscObject *) &dm->fields[f]);CHKERRQ(ierr); 3220 } 3221 ierr = PetscFree(dm->fields);CHKERRQ(ierr); 3222 dm->numFields = numFields; 3223 ierr = PetscMalloc1(dm->numFields, &dm->fields);CHKERRQ(ierr); 3224 for (f = 0; f < dm->numFields; ++f) { 3225 ierr = PetscContainerCreate(PetscObjectComm((PetscObject) dm), (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr); 3226 } 3227 PetscFunctionReturn(0); 3228 } 3229 3230 #undef __FUNCT__ 3231 #define __FUNCT__ "DMGetField" 3232 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field) 3233 { 3234 PetscFunctionBegin; 3235 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3236 PetscValidPointer(field, 3); 3237 if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3238 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3239 *field = (PetscObject) dm->fields[f]; 3240 PetscFunctionReturn(0); 3241 } 3242 3243 #undef __FUNCT__ 3244 #define __FUNCT__ "DMSetField" 3245 PetscErrorCode DMSetField(DM dm, PetscInt f, PetscObject field) 3246 { 3247 PetscErrorCode ierr; 3248 3249 PetscFunctionBegin; 3250 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3251 if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3252 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3253 if (((PetscObject) dm->fields[f]) == field) PetscFunctionReturn(0); 3254 ierr = PetscObjectDestroy((PetscObject *) &dm->fields[f]);CHKERRQ(ierr); 3255 dm->fields[f] = (PetscFE) field; 3256 ierr = PetscObjectReference((PetscObject) dm->fields[f]);CHKERRQ(ierr); 3257 PetscFunctionReturn(0); 3258 } 3259 3260 #undef __FUNCT__ 3261 #define __FUNCT__ "DMRestrictHook_Coordinates" 3262 PetscErrorCode DMRestrictHook_Coordinates(DM dm,DM dmc,void *ctx) 3263 { 3264 DM dm_coord,dmc_coord; 3265 PetscErrorCode ierr; 3266 Vec coords,ccoords; 3267 VecScatter scat; 3268 PetscFunctionBegin; 3269 ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr); 3270 ierr = DMGetCoordinateDM(dmc,&dmc_coord);CHKERRQ(ierr); 3271 ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr); 3272 ierr = DMGetCoordinates(dmc,&ccoords);CHKERRQ(ierr); 3273 if (coords && !ccoords) { 3274 ierr = DMCreateGlobalVector(dmc_coord,&ccoords);CHKERRQ(ierr); 3275 ierr = DMCreateInjection(dmc_coord,dm_coord,&scat);CHKERRQ(ierr); 3276 ierr = VecScatterBegin(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3277 ierr = VecScatterEnd(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3278 ierr = DMSetCoordinates(dmc,ccoords);CHKERRQ(ierr); 3279 ierr = VecScatterDestroy(&scat);CHKERRQ(ierr); 3280 ierr = VecDestroy(&ccoords);CHKERRQ(ierr); 3281 } 3282 PetscFunctionReturn(0); 3283 } 3284 3285 #undef __FUNCT__ 3286 #define __FUNCT__ "DMSubDomainHook_Coordinates" 3287 static PetscErrorCode DMSubDomainHook_Coordinates(DM dm,DM subdm,void *ctx) 3288 { 3289 DM dm_coord,subdm_coord; 3290 PetscErrorCode ierr; 3291 Vec coords,ccoords,clcoords; 3292 VecScatter *scat_i,*scat_g; 3293 PetscFunctionBegin; 3294 ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr); 3295 ierr = DMGetCoordinateDM(subdm,&subdm_coord);CHKERRQ(ierr); 3296 ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr); 3297 ierr = DMGetCoordinates(subdm,&ccoords);CHKERRQ(ierr); 3298 if (coords && !ccoords) { 3299 ierr = DMCreateGlobalVector(subdm_coord,&ccoords);CHKERRQ(ierr); 3300 ierr = DMCreateLocalVector(subdm_coord,&clcoords);CHKERRQ(ierr); 3301 ierr = DMCreateDomainDecompositionScatters(dm_coord,1,&subdm_coord,NULL,&scat_i,&scat_g);CHKERRQ(ierr); 3302 ierr = VecScatterBegin(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3303 ierr = VecScatterBegin(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3304 ierr = VecScatterEnd(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3305 ierr = VecScatterEnd(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3306 ierr = DMSetCoordinates(subdm,ccoords);CHKERRQ(ierr); 3307 ierr = DMSetCoordinatesLocal(subdm,clcoords);CHKERRQ(ierr); 3308 ierr = VecScatterDestroy(&scat_i[0]);CHKERRQ(ierr); 3309 ierr = VecScatterDestroy(&scat_g[0]);CHKERRQ(ierr); 3310 ierr = VecDestroy(&ccoords);CHKERRQ(ierr); 3311 ierr = VecDestroy(&clcoords);CHKERRQ(ierr); 3312 ierr = PetscFree(scat_i);CHKERRQ(ierr); 3313 ierr = PetscFree(scat_g);CHKERRQ(ierr); 3314 } 3315 PetscFunctionReturn(0); 3316 } 3317 3318 #undef __FUNCT__ 3319 #define __FUNCT__ "DMSetCoordinates" 3320 /*@ 3321 DMSetCoordinates - Sets into the DM a global vector that holds the coordinates 3322 3323 Collective on DM 3324 3325 Input Parameters: 3326 + dm - the DM 3327 - c - coordinate vector 3328 3329 Note: 3330 The coordinates do include those for ghost points, which are in the local vector 3331 3332 Level: intermediate 3333 3334 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3335 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM() 3336 @*/ 3337 PetscErrorCode DMSetCoordinates(DM dm, Vec c) 3338 { 3339 PetscErrorCode ierr; 3340 3341 PetscFunctionBegin; 3342 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3343 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3344 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3345 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3346 dm->coordinates = c; 3347 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3348 ierr = DMCoarsenHookAdd(dm,DMRestrictHook_Coordinates,NULL,NULL);CHKERRQ(ierr); 3349 ierr = DMSubDomainHookAdd(dm,DMSubDomainHook_Coordinates,NULL,NULL);CHKERRQ(ierr); 3350 PetscFunctionReturn(0); 3351 } 3352 3353 #undef __FUNCT__ 3354 #define __FUNCT__ "DMSetCoordinatesLocal" 3355 /*@ 3356 DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates 3357 3358 Collective on DM 3359 3360 Input Parameters: 3361 + dm - the DM 3362 - c - coordinate vector 3363 3364 Note: 3365 The coordinates of ghost points can be set using DMSetCoordinates() 3366 followed by DMGetCoordinatesLocal(). This is intended to enable the 3367 setting of ghost coordinates outside of the domain. 3368 3369 Level: intermediate 3370 3371 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3372 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM() 3373 @*/ 3374 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c) 3375 { 3376 PetscErrorCode ierr; 3377 3378 PetscFunctionBegin; 3379 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3380 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3381 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3382 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3383 3384 dm->coordinatesLocal = c; 3385 3386 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3387 PetscFunctionReturn(0); 3388 } 3389 3390 #undef __FUNCT__ 3391 #define __FUNCT__ "DMGetCoordinates" 3392 /*@ 3393 DMGetCoordinates - Gets a global vector with the coordinates associated with the DM. 3394 3395 Not Collective 3396 3397 Input Parameter: 3398 . dm - the DM 3399 3400 Output Parameter: 3401 . c - global coordinate vector 3402 3403 Note: 3404 This is a borrowed reference, so the user should NOT destroy this vector 3405 3406 Each process has only the local coordinates (does NOT have the ghost coordinates). 3407 3408 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3409 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3410 3411 Level: intermediate 3412 3413 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3414 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM() 3415 @*/ 3416 PetscErrorCode DMGetCoordinates(DM dm, Vec *c) 3417 { 3418 PetscErrorCode ierr; 3419 3420 PetscFunctionBegin; 3421 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3422 PetscValidPointer(c,2); 3423 if (!dm->coordinates && dm->coordinatesLocal) { 3424 DM cdm = NULL; 3425 3426 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3427 ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr); 3428 ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr); 3429 ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3430 ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3431 } 3432 *c = dm->coordinates; 3433 PetscFunctionReturn(0); 3434 } 3435 3436 #undef __FUNCT__ 3437 #define __FUNCT__ "DMGetCoordinatesLocal" 3438 /*@ 3439 DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM. 3440 3441 Collective on DM 3442 3443 Input Parameter: 3444 . dm - the DM 3445 3446 Output Parameter: 3447 . c - coordinate vector 3448 3449 Note: 3450 This is a borrowed reference, so the user should NOT destroy this vector 3451 3452 Each process has the local and ghost coordinates 3453 3454 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3455 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3456 3457 Level: intermediate 3458 3459 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3460 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM() 3461 @*/ 3462 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c) 3463 { 3464 PetscErrorCode ierr; 3465 3466 PetscFunctionBegin; 3467 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3468 PetscValidPointer(c,2); 3469 if (!dm->coordinatesLocal && dm->coordinates) { 3470 DM cdm = NULL; 3471 3472 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3473 ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr); 3474 ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr); 3475 ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3476 ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3477 } 3478 *c = dm->coordinatesLocal; 3479 PetscFunctionReturn(0); 3480 } 3481 3482 #undef __FUNCT__ 3483 #define __FUNCT__ "DMGetCoordinateDM" 3484 /*@ 3485 DMGetCoordinateDM - Gets the DM that prescribes coordinate layout and scatters between global and local coordinates 3486 3487 Collective on DM 3488 3489 Input Parameter: 3490 . dm - the DM 3491 3492 Output Parameter: 3493 . cdm - coordinate DM 3494 3495 Level: intermediate 3496 3497 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3498 .seealso: DMSetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3499 @*/ 3500 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm) 3501 { 3502 PetscErrorCode ierr; 3503 3504 PetscFunctionBegin; 3505 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3506 PetscValidPointer(cdm,2); 3507 if (!dm->coordinateDM) { 3508 if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM"); 3509 ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr); 3510 } 3511 *cdm = dm->coordinateDM; 3512 PetscFunctionReturn(0); 3513 } 3514 3515 #undef __FUNCT__ 3516 #define __FUNCT__ "DMSetCoordinateDM" 3517 /*@ 3518 DMSetCoordinateDM - Sets the DM that prescribes coordinate layout and scatters between global and local coordinates 3519 3520 Logically Collective on DM 3521 3522 Input Parameters: 3523 + dm - the DM 3524 - cdm - coordinate DM 3525 3526 Level: intermediate 3527 3528 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3529 .seealso: DMGetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3530 @*/ 3531 PetscErrorCode DMSetCoordinateDM(DM dm, DM cdm) 3532 { 3533 PetscErrorCode ierr; 3534 3535 PetscFunctionBegin; 3536 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3537 PetscValidHeaderSpecific(cdm,DM_CLASSID,2); 3538 ierr = DMDestroy(&dm->coordinateDM);CHKERRQ(ierr); 3539 dm->coordinateDM = cdm; 3540 ierr = PetscObjectReference((PetscObject) dm->coordinateDM);CHKERRQ(ierr); 3541 PetscFunctionReturn(0); 3542 } 3543 3544 #undef __FUNCT__ 3545 #define __FUNCT__ "DMGetCoordinateSection" 3546 /*@ 3547 DMGetCoordinateSection - Retrieve the layout of coordinate values over the mesh. 3548 3549 Not Collective 3550 3551 Input Parameter: 3552 . dm - The DM object 3553 3554 Output Parameter: 3555 . section - The PetscSection object 3556 3557 Level: intermediate 3558 3559 .keywords: mesh, coordinates 3560 .seealso: DMGetCoordinateDM(), DMGetDefaultSection(), DMSetDefaultSection() 3561 @*/ 3562 PetscErrorCode DMGetCoordinateSection(DM dm, PetscSection *section) 3563 { 3564 DM cdm; 3565 PetscErrorCode ierr; 3566 3567 PetscFunctionBegin; 3568 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3569 PetscValidPointer(section, 2); 3570 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3571 ierr = DMGetDefaultSection(cdm, section);CHKERRQ(ierr); 3572 PetscFunctionReturn(0); 3573 } 3574 3575 #undef __FUNCT__ 3576 #define __FUNCT__ "DMSetCoordinateSection" 3577 /*@ 3578 DMSetCoordinateSection - Set the layout of coordinate values over the mesh. 3579 3580 Not Collective 3581 3582 Input Parameters: 3583 + dm - The DM object 3584 - section - The PetscSection object 3585 3586 Level: intermediate 3587 3588 .keywords: mesh, coordinates 3589 .seealso: DMGetCoordinateSection(), DMGetDefaultSection(), DMSetDefaultSection() 3590 @*/ 3591 PetscErrorCode DMSetCoordinateSection(DM dm, PetscSection section) 3592 { 3593 DM cdm; 3594 PetscErrorCode ierr; 3595 3596 PetscFunctionBegin; 3597 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3598 PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 3599 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3600 ierr = DMSetDefaultSection(cdm, section);CHKERRQ(ierr); 3601 PetscFunctionReturn(0); 3602 } 3603 3604 #undef __FUNCT__ 3605 #define __FUNCT__ "DMLocatePoints" 3606 /*@ 3607 DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells 3608 3609 Not collective 3610 3611 Input Parameters: 3612 + dm - The DM 3613 - v - The Vec of points 3614 3615 Output Parameter: 3616 . cells - The local cell numbers for cells which contain the points 3617 3618 Level: developer 3619 3620 .keywords: point location, mesh 3621 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3622 @*/ 3623 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells) 3624 { 3625 PetscErrorCode ierr; 3626 3627 PetscFunctionBegin; 3628 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3629 PetscValidHeaderSpecific(v,VEC_CLASSID,2); 3630 PetscValidPointer(cells,3); 3631 if (dm->ops->locatepoints) { 3632 ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr); 3633 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM"); 3634 PetscFunctionReturn(0); 3635 } 3636 3637 #undef __FUNCT__ 3638 #define __FUNCT__ "DMGetOutputDM" 3639 PetscErrorCode DMGetOutputDM(DM dm, DM *odm) 3640 { 3641 PetscErrorCode ierr; 3642 3643 PetscFunctionBegin; 3644 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3645 PetscValidPointer(odm,2); 3646 if (!dm->dmBC) { 3647 PetscSection section, newSection, gsection; 3648 PetscSF sf; 3649 3650 ierr = DMClone(dm, &dm->dmBC);CHKERRQ(ierr); 3651 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 3652 ierr = PetscSectionClone(section, &newSection);CHKERRQ(ierr); 3653 ierr = DMSetDefaultSection(dm->dmBC, newSection);CHKERRQ(ierr); 3654 ierr = PetscSectionDestroy(&newSection);CHKERRQ(ierr); 3655 ierr = DMGetPointSF(dm->dmBC, &sf);CHKERRQ(ierr); 3656 ierr = PetscSectionCreateGlobalSection(section, sf, PETSC_TRUE, &gsection);CHKERRQ(ierr); 3657 ierr = DMSetDefaultGlobalSection(dm->dmBC, gsection);CHKERRQ(ierr); 3658 ierr = PetscSectionDestroy(&gsection);CHKERRQ(ierr); 3659 } 3660 *odm = dm->dmBC; 3661 PetscFunctionReturn(0); 3662 } 3663