1 #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ 2 #include <petscsf.h> 3 4 PetscClassId DM_CLASSID; 5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal, DM_LocalToLocal; 6 7 const char *const DMBoundaryTypes[] = {"NONE","GHOSTED","MIRROR","PERIODIC","TWIST","DM_BOUNDARY_",0}; 8 9 #undef __FUNCT__ 10 #define __FUNCT__ "DMCreate" 11 /*@ 12 DMCreate - Creates an empty DM object. The type can then be set with DMSetType(). 13 14 If you never call DMSetType() it will generate an 15 error when you try to use the vector. 16 17 Collective on MPI_Comm 18 19 Input Parameter: 20 . comm - The communicator for the DM object 21 22 Output Parameter: 23 . dm - The DM object 24 25 Level: beginner 26 27 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE 28 @*/ 29 PetscErrorCode DMCreate(MPI_Comm comm,DM *dm) 30 { 31 DM v; 32 PetscErrorCode ierr; 33 34 PetscFunctionBegin; 35 PetscValidPointer(dm,2); 36 *dm = NULL; 37 ierr = PetscSysInitializePackage();CHKERRQ(ierr); 38 ierr = VecInitializePackage();CHKERRQ(ierr); 39 ierr = MatInitializePackage();CHKERRQ(ierr); 40 ierr = DMInitializePackage();CHKERRQ(ierr); 41 42 ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr); 43 ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr); 44 45 46 v->ltogmap = NULL; 47 v->ltogmapb = NULL; 48 v->bs = 1; 49 v->coloringtype = IS_COLORING_GLOBAL; 50 ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr); 51 ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr); 52 v->defaultSection = NULL; 53 v->defaultGlobalSection = NULL; 54 { 55 PetscInt i; 56 for (i = 0; i < 10; ++i) { 57 v->nullspaceConstructors[i] = NULL; 58 } 59 } 60 v->numFields = 0; 61 v->fields = NULL; 62 ierr = DMSetVecType(v,VECSTANDARD);CHKERRQ(ierr); 63 ierr = DMSetMatType(v,MATAIJ);CHKERRQ(ierr); 64 *dm = v; 65 PetscFunctionReturn(0); 66 } 67 68 #undef __FUNCT__ 69 #define __FUNCT__ "DMClone" 70 /*@ 71 DMClone - Creates a DM object with the same topology as the original. 72 73 Collective on MPI_Comm 74 75 Input Parameter: 76 . dm - The original DM object 77 78 Output Parameter: 79 . newdm - The new DM object 80 81 Level: beginner 82 83 .keywords: DM, topology, create 84 @*/ 85 PetscErrorCode DMClone(DM dm, DM *newdm) 86 { 87 PetscSF sf; 88 Vec coords; 89 void *ctx; 90 PetscErrorCode ierr; 91 92 PetscFunctionBegin; 93 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 94 PetscValidPointer(newdm,2); 95 ierr = DMCreate(PetscObjectComm((PetscObject)dm), newdm);CHKERRQ(ierr); 96 if (dm->ops->clone) { 97 ierr = (*dm->ops->clone)(dm, newdm);CHKERRQ(ierr); 98 } 99 (*newdm)->setupcalled = PETSC_TRUE; 100 ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr); 101 ierr = DMSetPointSF(*newdm, sf);CHKERRQ(ierr); 102 ierr = DMGetApplicationContext(dm, &ctx);CHKERRQ(ierr); 103 ierr = DMSetApplicationContext(*newdm, ctx);CHKERRQ(ierr); 104 ierr = DMGetCoordinatesLocal(dm, &coords);CHKERRQ(ierr); 105 if (coords) { 106 ierr = DMSetCoordinatesLocal(*newdm, coords);CHKERRQ(ierr); 107 } else { 108 ierr = DMGetCoordinates(dm, &coords);CHKERRQ(ierr); 109 if (coords) {ierr = DMSetCoordinates(*newdm, coords);CHKERRQ(ierr);} 110 } 111 PetscFunctionReturn(0); 112 } 113 114 #undef __FUNCT__ 115 #define __FUNCT__ "DMSetVecType" 116 /*@C 117 DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 118 119 Logically Collective on DMDA 120 121 Input Parameter: 122 + da - initial distributed array 123 . ctype - the vector type, currently either VECSTANDARD or VECCUSP 124 125 Options Database: 126 . -dm_vec_type ctype 127 128 Level: intermediate 129 130 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType, DMGetVecType() 131 @*/ 132 PetscErrorCode DMSetVecType(DM da,VecType ctype) 133 { 134 PetscErrorCode ierr; 135 136 PetscFunctionBegin; 137 PetscValidHeaderSpecific(da,DM_CLASSID,1); 138 ierr = PetscFree(da->vectype);CHKERRQ(ierr); 139 ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr); 140 PetscFunctionReturn(0); 141 } 142 143 #undef __FUNCT__ 144 #define __FUNCT__ "DMGetVecType" 145 /*@C 146 DMGetVecType - Gets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 147 148 Logically Collective on DMDA 149 150 Input Parameter: 151 . da - initial distributed array 152 153 Output Parameter: 154 . ctype - the vector type 155 156 Level: intermediate 157 158 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType 159 @*/ 160 PetscErrorCode DMGetVecType(DM da,VecType *ctype) 161 { 162 PetscFunctionBegin; 163 PetscValidHeaderSpecific(da,DM_CLASSID,1); 164 *ctype = da->vectype; 165 PetscFunctionReturn(0); 166 } 167 168 #undef __FUNCT__ 169 #define __FUNCT__ "VecGetDM" 170 /*@ 171 VecGetDM - Gets the DM defining the data layout of the vector 172 173 Not collective 174 175 Input Parameter: 176 . v - The Vec 177 178 Output Parameter: 179 . dm - The DM 180 181 Level: intermediate 182 183 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 184 @*/ 185 PetscErrorCode VecGetDM(Vec v, DM *dm) 186 { 187 PetscErrorCode ierr; 188 189 PetscFunctionBegin; 190 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 191 PetscValidPointer(dm,2); 192 ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 193 PetscFunctionReturn(0); 194 } 195 196 #undef __FUNCT__ 197 #define __FUNCT__ "VecSetDM" 198 /*@ 199 VecSetDM - Sets the DM defining the data layout of the vector 200 201 Not collective 202 203 Input Parameters: 204 + v - The Vec 205 - dm - The DM 206 207 Level: intermediate 208 209 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 210 @*/ 211 PetscErrorCode VecSetDM(Vec v, DM dm) 212 { 213 PetscErrorCode ierr; 214 215 PetscFunctionBegin; 216 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 217 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 218 ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 219 PetscFunctionReturn(0); 220 } 221 222 #undef __FUNCT__ 223 #define __FUNCT__ "DMSetMatType" 224 /*@C 225 DMSetMatType - Sets the type of matrix created with DMCreateMatrix() 226 227 Logically Collective on DM 228 229 Input Parameter: 230 + dm - the DM context 231 . ctype - the matrix type 232 233 Options Database: 234 . -dm_mat_type ctype 235 236 Level: intermediate 237 238 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMGetMatType() 239 @*/ 240 PetscErrorCode DMSetMatType(DM dm,MatType ctype) 241 { 242 PetscErrorCode ierr; 243 244 PetscFunctionBegin; 245 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 246 ierr = PetscFree(dm->mattype);CHKERRQ(ierr); 247 ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr); 248 PetscFunctionReturn(0); 249 } 250 251 #undef __FUNCT__ 252 #define __FUNCT__ "DMGetMatType" 253 /*@C 254 DMGetMatType - Gets the type of matrix created with DMCreateMatrix() 255 256 Logically Collective on DM 257 258 Input Parameter: 259 . dm - the DM context 260 261 Output Parameter: 262 . ctype - the matrix type 263 264 Options Database: 265 . -dm_mat_type ctype 266 267 Level: intermediate 268 269 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMSetMatType() 270 @*/ 271 PetscErrorCode DMGetMatType(DM dm,MatType *ctype) 272 { 273 PetscFunctionBegin; 274 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 275 *ctype = dm->mattype; 276 PetscFunctionReturn(0); 277 } 278 279 #undef __FUNCT__ 280 #define __FUNCT__ "MatGetDM" 281 /*@ 282 MatGetDM - Gets the DM defining the data layout of the matrix 283 284 Not collective 285 286 Input Parameter: 287 . A - The Mat 288 289 Output Parameter: 290 . dm - The DM 291 292 Level: intermediate 293 294 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType() 295 @*/ 296 PetscErrorCode MatGetDM(Mat A, DM *dm) 297 { 298 PetscErrorCode ierr; 299 300 PetscFunctionBegin; 301 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 302 PetscValidPointer(dm,2); 303 ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 304 PetscFunctionReturn(0); 305 } 306 307 #undef __FUNCT__ 308 #define __FUNCT__ "MatSetDM" 309 /*@ 310 MatSetDM - Sets the DM defining the data layout of the matrix 311 312 Not collective 313 314 Input Parameters: 315 + A - The Mat 316 - dm - The DM 317 318 Level: intermediate 319 320 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType() 321 @*/ 322 PetscErrorCode MatSetDM(Mat A, DM dm) 323 { 324 PetscErrorCode ierr; 325 326 PetscFunctionBegin; 327 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 328 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 329 ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 330 PetscFunctionReturn(0); 331 } 332 333 #undef __FUNCT__ 334 #define __FUNCT__ "DMSetOptionsPrefix" 335 /*@C 336 DMSetOptionsPrefix - Sets the prefix used for searching for all 337 DMDA options in the database. 338 339 Logically Collective on DMDA 340 341 Input Parameter: 342 + da - the DMDA context 343 - prefix - the prefix to prepend to all option names 344 345 Notes: 346 A hyphen (-) must NOT be given at the beginning of the prefix name. 347 The first character of all runtime options is AUTOMATICALLY the hyphen. 348 349 Level: advanced 350 351 .keywords: DMDA, set, options, prefix, database 352 353 .seealso: DMSetFromOptions() 354 @*/ 355 PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[]) 356 { 357 PetscErrorCode ierr; 358 359 PetscFunctionBegin; 360 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 361 ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr); 362 PetscFunctionReturn(0); 363 } 364 365 #undef __FUNCT__ 366 #define __FUNCT__ "DMDestroy" 367 /*@ 368 DMDestroy - Destroys a vector packer or DMDA. 369 370 Collective on DM 371 372 Input Parameter: 373 . dm - the DM object to destroy 374 375 Level: developer 376 377 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 378 379 @*/ 380 PetscErrorCode DMDestroy(DM *dm) 381 { 382 PetscInt i, cnt = 0, f; 383 DMNamedVecLink nlink,nnext; 384 PetscErrorCode ierr; 385 386 PetscFunctionBegin; 387 if (!*dm) PetscFunctionReturn(0); 388 PetscValidHeaderSpecific((*dm),DM_CLASSID,1); 389 390 /* I think it makes sense to dump all attached things when you are destroyed, which also eliminates circular references */ 391 for (f = 0; f < (*dm)->numFields; ++f) { 392 ierr = PetscObjectCompose((PetscObject) (*dm)->fields[f], "pmat", NULL);CHKERRQ(ierr); 393 ierr = PetscObjectCompose((PetscObject) (*dm)->fields[f], "nullspace", NULL);CHKERRQ(ierr); 394 ierr = PetscObjectCompose((PetscObject) (*dm)->fields[f], "nearnullspace", NULL);CHKERRQ(ierr); 395 } 396 /* count all the circular references of DM and its contained Vecs */ 397 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 398 if ((*dm)->localin[i]) cnt++; 399 if ((*dm)->globalin[i]) cnt++; 400 } 401 for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++; 402 for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++; 403 if ((*dm)->x) { 404 DM obj; 405 ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr); 406 if (obj == *dm) cnt++; 407 } 408 409 if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);} 410 /* 411 Need this test because the dm references the vectors that 412 reference the dm, so destroying the dm calls destroy on the 413 vectors that cause another destroy on the dm 414 */ 415 if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0); 416 ((PetscObject) (*dm))->refct = 0; 417 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 418 if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()"); 419 ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr); 420 } 421 nnext=(*dm)->namedglobal; 422 (*dm)->namedglobal = NULL; 423 for (nlink=nnext; nlink; nlink=nnext) { /* Destroy the named vectors */ 424 nnext = nlink->next; 425 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 426 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 427 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 428 ierr = PetscFree(nlink);CHKERRQ(ierr); 429 } 430 nnext=(*dm)->namedlocal; 431 (*dm)->namedlocal = NULL; 432 for (nlink=nnext; nlink; nlink=nnext) { /* Destroy the named local vectors */ 433 nnext = nlink->next; 434 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 435 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 436 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 437 ierr = PetscFree(nlink);CHKERRQ(ierr); 438 } 439 440 /* Destroy the list of hooks */ 441 { 442 DMCoarsenHookLink link,next; 443 for (link=(*dm)->coarsenhook; link; link=next) { 444 next = link->next; 445 ierr = PetscFree(link);CHKERRQ(ierr); 446 } 447 (*dm)->coarsenhook = NULL; 448 } 449 { 450 DMRefineHookLink link,next; 451 for (link=(*dm)->refinehook; link; link=next) { 452 next = link->next; 453 ierr = PetscFree(link);CHKERRQ(ierr); 454 } 455 (*dm)->refinehook = NULL; 456 } 457 { 458 DMSubDomainHookLink link,next; 459 for (link=(*dm)->subdomainhook; link; link=next) { 460 next = link->next; 461 ierr = PetscFree(link);CHKERRQ(ierr); 462 } 463 (*dm)->subdomainhook = NULL; 464 } 465 { 466 DMGlobalToLocalHookLink link,next; 467 for (link=(*dm)->gtolhook; link; link=next) { 468 next = link->next; 469 ierr = PetscFree(link);CHKERRQ(ierr); 470 } 471 (*dm)->gtolhook = NULL; 472 } 473 /* Destroy the work arrays */ 474 { 475 DMWorkLink link,next; 476 if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out"); 477 for (link=(*dm)->workin; link; link=next) { 478 next = link->next; 479 ierr = PetscFree(link->mem);CHKERRQ(ierr); 480 ierr = PetscFree(link);CHKERRQ(ierr); 481 } 482 (*dm)->workin = NULL; 483 } 484 485 ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr); 486 ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr); 487 ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr); 488 489 if ((*dm)->ctx && (*dm)->ctxdestroy) { 490 ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr); 491 } 492 ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr); 493 ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr); 494 ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr); 495 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr); 496 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr); 497 ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr); 498 ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr); 499 500 ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr); 501 ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr); 502 ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr); 503 ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr); 504 ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr); 505 506 ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr); 507 ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr); 508 ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr); 509 510 for (f = 0; f < (*dm)->numFields; ++f) { 511 ierr = PetscObjectDestroy((PetscObject *) &(*dm)->fields[f]);CHKERRQ(ierr); 512 } 513 ierr = PetscFree((*dm)->fields);CHKERRQ(ierr); 514 /* if memory was published with SAWs then destroy it */ 515 ierr = PetscObjectSAWsViewOff((PetscObject)*dm);CHKERRQ(ierr); 516 517 ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr); 518 /* We do not destroy (*dm)->data here so that we can reference count backend objects */ 519 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 520 PetscFunctionReturn(0); 521 } 522 523 #undef __FUNCT__ 524 #define __FUNCT__ "DMSetUp" 525 /*@ 526 DMSetUp - sets up the data structures inside a DM object 527 528 Collective on DM 529 530 Input Parameter: 531 . dm - the DM object to setup 532 533 Level: developer 534 535 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 536 537 @*/ 538 PetscErrorCode DMSetUp(DM dm) 539 { 540 PetscErrorCode ierr; 541 542 PetscFunctionBegin; 543 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 544 if (dm->setupcalled) PetscFunctionReturn(0); 545 if (dm->ops->setup) { 546 ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr); 547 } 548 dm->setupcalled = PETSC_TRUE; 549 PetscFunctionReturn(0); 550 } 551 552 #undef __FUNCT__ 553 #define __FUNCT__ "DMSetFromOptions" 554 /*@ 555 DMSetFromOptions - sets parameters in a DM from the options database 556 557 Collective on DM 558 559 Input Parameter: 560 . dm - the DM object to set options for 561 562 Options Database: 563 + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros 564 . -dm_vec_type <type> type of vector to create inside DM 565 . -dm_mat_type <type> type of matrix to create inside DM 566 - -dm_coloring_type <global or ghosted> 567 568 Level: developer 569 570 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 571 572 @*/ 573 PetscErrorCode DMSetFromOptions(DM dm) 574 { 575 char typeName[256]; 576 PetscBool flg; 577 PetscErrorCode ierr; 578 579 PetscFunctionBegin; 580 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 581 ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr); 582 ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr); 583 ierr = PetscOptionsFList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr); 584 if (flg) { 585 ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr); 586 } 587 ierr = PetscOptionsFList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr); 588 if (flg) { 589 ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr); 590 } 591 ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr); 592 if (dm->ops->setfromoptions) { 593 ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr); 594 } 595 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 596 ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr); 597 ierr = PetscOptionsEnd();CHKERRQ(ierr); 598 PetscFunctionReturn(0); 599 } 600 601 #undef __FUNCT__ 602 #define __FUNCT__ "DMView" 603 /*@C 604 DMView - Views a vector packer or DMDA. 605 606 Collective on DM 607 608 Input Parameter: 609 + dm - the DM object to view 610 - v - the viewer 611 612 Level: developer 613 614 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 615 616 @*/ 617 PetscErrorCode DMView(DM dm,PetscViewer v) 618 { 619 PetscErrorCode ierr; 620 PetscBool isbinary; 621 622 PetscFunctionBegin; 623 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 624 if (!v) { 625 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr); 626 } 627 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)dm,v);CHKERRQ(ierr); 628 ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 629 if (isbinary) { 630 PetscInt classid = DM_FILE_CLASSID; 631 char type[256]; 632 633 ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr); 634 ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr); 635 ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr); 636 } 637 if (dm->ops->view) { 638 ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr); 639 } 640 PetscFunctionReturn(0); 641 } 642 643 #undef __FUNCT__ 644 #define __FUNCT__ "DMCreateGlobalVector" 645 /*@ 646 DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object 647 648 Collective on DM 649 650 Input Parameter: 651 . dm - the DM object 652 653 Output Parameter: 654 . vec - the global vector 655 656 Level: beginner 657 658 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 659 660 @*/ 661 PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec) 662 { 663 PetscErrorCode ierr; 664 665 PetscFunctionBegin; 666 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 667 ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr); 668 PetscFunctionReturn(0); 669 } 670 671 #undef __FUNCT__ 672 #define __FUNCT__ "DMCreateLocalVector" 673 /*@ 674 DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object 675 676 Not Collective 677 678 Input Parameter: 679 . dm - the DM object 680 681 Output Parameter: 682 . vec - the local vector 683 684 Level: beginner 685 686 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 687 688 @*/ 689 PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec) 690 { 691 PetscErrorCode ierr; 692 693 PetscFunctionBegin; 694 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 695 ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr); 696 PetscFunctionReturn(0); 697 } 698 699 #undef __FUNCT__ 700 #define __FUNCT__ "DMGetLocalToGlobalMapping" 701 /*@ 702 DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM. 703 704 Collective on DM 705 706 Input Parameter: 707 . dm - the DM that provides the mapping 708 709 Output Parameter: 710 . ltog - the mapping 711 712 Level: intermediate 713 714 Notes: 715 This mapping can then be used by VecSetLocalToGlobalMapping() or 716 MatSetLocalToGlobalMapping(). 717 718 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock() 719 @*/ 720 PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog) 721 { 722 PetscErrorCode ierr; 723 724 PetscFunctionBegin; 725 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 726 PetscValidPointer(ltog,2); 727 if (!dm->ltogmap) { 728 PetscSection section, sectionGlobal; 729 730 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 731 if (section) { 732 PetscInt *ltog; 733 PetscInt pStart, pEnd, size, p, l; 734 735 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 736 ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr); 737 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 738 ierr = PetscMalloc1(size, <og);CHKERRQ(ierr); /* We want the local+overlap size */ 739 for (p = pStart, l = 0; p < pEnd; ++p) { 740 PetscInt dof, off, c; 741 742 /* Should probably use constrained dofs */ 743 ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr); 744 ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr); 745 for (c = 0; c < dof; ++c, ++l) { 746 ltog[l] = off+c; 747 } 748 } 749 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr); 750 ierr = PetscLogObjectParent((PetscObject)dm, (PetscObject)dm->ltogmap);CHKERRQ(ierr); 751 } else { 752 if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping"); 753 ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr); 754 } 755 } 756 *ltog = dm->ltogmap; 757 PetscFunctionReturn(0); 758 } 759 760 #undef __FUNCT__ 761 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock" 762 /*@ 763 DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM. 764 765 Collective on DM 766 767 Input Parameter: 768 . da - the distributed array that provides the mapping 769 770 Output Parameter: 771 . ltog - the block mapping 772 773 Level: intermediate 774 775 Notes: 776 This mapping can then be used by VecSetLocalToGlobalMappingBlock() or 777 MatSetLocalToGlobalMappingBlock(). 778 779 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize() 780 @*/ 781 PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog) 782 { 783 PetscErrorCode ierr; 784 785 PetscFunctionBegin; 786 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 787 PetscValidPointer(ltog,2); 788 if (!dm->ltogmapb) { 789 PetscInt bs; 790 ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr); 791 if (bs > 1) { 792 if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock"); 793 ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr); 794 } else { 795 ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr); 796 ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr); 797 } 798 } 799 *ltog = dm->ltogmapb; 800 PetscFunctionReturn(0); 801 } 802 803 #undef __FUNCT__ 804 #define __FUNCT__ "DMGetBlockSize" 805 /*@ 806 DMGetBlockSize - Gets the inherent block size associated with a DM 807 808 Not Collective 809 810 Input Parameter: 811 . dm - the DM with block structure 812 813 Output Parameter: 814 . bs - the block size, 1 implies no exploitable block structure 815 816 Level: intermediate 817 818 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock() 819 @*/ 820 PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs) 821 { 822 PetscFunctionBegin; 823 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 824 PetscValidPointer(bs,2); 825 if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet"); 826 *bs = dm->bs; 827 PetscFunctionReturn(0); 828 } 829 830 #undef __FUNCT__ 831 #define __FUNCT__ "DMCreateInterpolation" 832 /*@ 833 DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects 834 835 Collective on DM 836 837 Input Parameter: 838 + dm1 - the DM object 839 - dm2 - the second, finer DM object 840 841 Output Parameter: 842 + mat - the interpolation 843 - vec - the scaling (optional) 844 845 Level: developer 846 847 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 848 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation. 849 850 For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors 851 EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic. 852 853 854 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen() 855 856 @*/ 857 PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 858 { 859 PetscErrorCode ierr; 860 861 PetscFunctionBegin; 862 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 863 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 864 ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr); 865 PetscFunctionReturn(0); 866 } 867 868 #undef __FUNCT__ 869 #define __FUNCT__ "DMCreateInjection" 870 /*@ 871 DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects 872 873 Collective on DM 874 875 Input Parameter: 876 + dm1 - the DM object 877 - dm2 - the second, finer DM object 878 879 Output Parameter: 880 . ctx - the injection 881 882 Level: developer 883 884 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 885 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection. 886 887 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation() 888 889 @*/ 890 PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx) 891 { 892 PetscErrorCode ierr; 893 894 PetscFunctionBegin; 895 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 896 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 897 ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr); 898 PetscFunctionReturn(0); 899 } 900 901 #undef __FUNCT__ 902 #define __FUNCT__ "DMCreateColoring" 903 /*@ 904 DMCreateColoring - Gets coloring for a DM 905 906 Collective on DM 907 908 Input Parameter: 909 + dm - the DM object 910 - ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL 911 912 Output Parameter: 913 . coloring - the coloring 914 915 Level: developer 916 917 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix(), DMSetMatType() 918 919 @*/ 920 PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,ISColoring *coloring) 921 { 922 PetscErrorCode ierr; 923 924 PetscFunctionBegin; 925 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 926 if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet"); 927 ierr = (*dm->ops->getcoloring)(dm,ctype,coloring);CHKERRQ(ierr); 928 PetscFunctionReturn(0); 929 } 930 931 #undef __FUNCT__ 932 #define __FUNCT__ "DMCreateMatrix" 933 /*@ 934 DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite 935 936 Collective on DM 937 938 Input Parameter: 939 . dm - the DM object 940 941 Output Parameter: 942 . mat - the empty Jacobian 943 944 Level: beginner 945 946 Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 947 do not need to do it yourself. 948 949 By default it also sets the nonzero structure and puts in the zero entries. To prevent setting 950 the nonzero pattern call DMDASetMatPreallocateOnly() 951 952 For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used 953 internally by PETSc. 954 955 For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires 956 the indices for the global numbering for DMDAs which is complicated. 957 958 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMSetMatType() 959 960 @*/ 961 PetscErrorCode DMCreateMatrix(DM dm,Mat *mat) 962 { 963 PetscErrorCode ierr; 964 965 PetscFunctionBegin; 966 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 967 ierr = MatInitializePackage();CHKERRQ(ierr); 968 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 969 PetscValidPointer(mat,3); 970 ierr = (*dm->ops->creatematrix)(dm,mat);CHKERRQ(ierr); 971 PetscFunctionReturn(0); 972 } 973 974 #undef __FUNCT__ 975 #define __FUNCT__ "DMSetMatrixPreallocateOnly" 976 /*@ 977 DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly 978 preallocated but the nonzero structure and zero values will not be set. 979 980 Logically Collective on DMDA 981 982 Input Parameter: 983 + dm - the DM 984 - only - PETSC_TRUE if only want preallocation 985 986 Level: developer 987 .seealso DMCreateMatrix() 988 @*/ 989 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only) 990 { 991 PetscFunctionBegin; 992 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 993 dm->prealloc_only = only; 994 PetscFunctionReturn(0); 995 } 996 997 #undef __FUNCT__ 998 #define __FUNCT__ "DMGetWorkArray" 999 /*@C 1000 DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 1001 1002 Not Collective 1003 1004 Input Parameters: 1005 + dm - the DM object 1006 . count - The minium size 1007 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 1008 1009 Output Parameter: 1010 . array - the work array 1011 1012 Level: developer 1013 1014 .seealso DMDestroy(), DMCreate() 1015 @*/ 1016 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1017 { 1018 PetscErrorCode ierr; 1019 DMWorkLink link; 1020 size_t size; 1021 1022 PetscFunctionBegin; 1023 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1024 PetscValidPointer(mem,4); 1025 if (dm->workin) { 1026 link = dm->workin; 1027 dm->workin = dm->workin->next; 1028 } else { 1029 ierr = PetscNewLog(dm,&link);CHKERRQ(ierr); 1030 } 1031 ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr); 1032 if (size*count > link->bytes) { 1033 ierr = PetscFree(link->mem);CHKERRQ(ierr); 1034 ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr); 1035 link->bytes = size*count; 1036 } 1037 link->next = dm->workout; 1038 dm->workout = link; 1039 *(void**)mem = link->mem; 1040 PetscFunctionReturn(0); 1041 } 1042 1043 #undef __FUNCT__ 1044 #define __FUNCT__ "DMRestoreWorkArray" 1045 /*@C 1046 DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 1047 1048 Not Collective 1049 1050 Input Parameters: 1051 + dm - the DM object 1052 . count - The minium size 1053 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 1054 1055 Output Parameter: 1056 . array - the work array 1057 1058 Level: developer 1059 1060 .seealso DMDestroy(), DMCreate() 1061 @*/ 1062 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1063 { 1064 DMWorkLink *p,link; 1065 1066 PetscFunctionBegin; 1067 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1068 PetscValidPointer(mem,4); 1069 for (p=&dm->workout; (link=*p); p=&link->next) { 1070 if (link->mem == *(void**)mem) { 1071 *p = link->next; 1072 link->next = dm->workin; 1073 dm->workin = link; 1074 *(void**)mem = NULL; 1075 PetscFunctionReturn(0); 1076 } 1077 } 1078 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out"); 1079 PetscFunctionReturn(0); 1080 } 1081 1082 #undef __FUNCT__ 1083 #define __FUNCT__ "DMSetNullSpaceConstructor" 1084 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace)) 1085 { 1086 PetscFunctionBegin; 1087 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1088 if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field); 1089 dm->nullspaceConstructors[field] = nullsp; 1090 PetscFunctionReturn(0); 1091 } 1092 1093 #undef __FUNCT__ 1094 #define __FUNCT__ "DMCreateFieldIS" 1095 /*@C 1096 DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field 1097 1098 Not collective 1099 1100 Input Parameter: 1101 . dm - the DM object 1102 1103 Output Parameters: 1104 + numFields - The number of fields (or NULL if not requested) 1105 . fieldNames - The name for each field (or NULL if not requested) 1106 - fields - The global indices for each field (or NULL if not requested) 1107 1108 Level: intermediate 1109 1110 Notes: 1111 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1112 PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with 1113 PetscFree(). 1114 1115 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 1116 @*/ 1117 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields) 1118 { 1119 PetscSection section, sectionGlobal; 1120 PetscErrorCode ierr; 1121 1122 PetscFunctionBegin; 1123 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1124 if (numFields) { 1125 PetscValidPointer(numFields,2); 1126 *numFields = 0; 1127 } 1128 if (fieldNames) { 1129 PetscValidPointer(fieldNames,3); 1130 *fieldNames = NULL; 1131 } 1132 if (fields) { 1133 PetscValidPointer(fields,4); 1134 *fields = NULL; 1135 } 1136 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1137 if (section) { 1138 PetscInt *fieldSizes, **fieldIndices; 1139 PetscInt nF, f, pStart, pEnd, p; 1140 1141 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 1142 ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr); 1143 ierr = PetscMalloc2(nF,&fieldSizes,nF,&fieldIndices);CHKERRQ(ierr); 1144 ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr); 1145 for (f = 0; f < nF; ++f) { 1146 fieldSizes[f] = 0; 1147 } 1148 for (p = pStart; p < pEnd; ++p) { 1149 PetscInt gdof; 1150 1151 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1152 if (gdof > 0) { 1153 for (f = 0; f < nF; ++f) { 1154 PetscInt fdof, fcdof; 1155 1156 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1157 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1158 fieldSizes[f] += fdof-fcdof; 1159 } 1160 } 1161 } 1162 for (f = 0; f < nF; ++f) { 1163 ierr = PetscMalloc1(fieldSizes[f], &fieldIndices[f]);CHKERRQ(ierr); 1164 fieldSizes[f] = 0; 1165 } 1166 for (p = pStart; p < pEnd; ++p) { 1167 PetscInt gdof, goff; 1168 1169 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1170 if (gdof > 0) { 1171 ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr); 1172 for (f = 0; f < nF; ++f) { 1173 PetscInt fdof, fcdof, fc; 1174 1175 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1176 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1177 for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) { 1178 fieldIndices[f][fieldSizes[f]] = goff++; 1179 } 1180 } 1181 } 1182 } 1183 if (numFields) *numFields = nF; 1184 if (fieldNames) { 1185 ierr = PetscMalloc1(nF, fieldNames);CHKERRQ(ierr); 1186 for (f = 0; f < nF; ++f) { 1187 const char *fieldName; 1188 1189 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1190 ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr); 1191 } 1192 } 1193 if (fields) { 1194 ierr = PetscMalloc1(nF, fields);CHKERRQ(ierr); 1195 for (f = 0; f < nF; ++f) { 1196 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr); 1197 } 1198 } 1199 ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr); 1200 } else if (dm->ops->createfieldis) { 1201 ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr); 1202 } 1203 PetscFunctionReturn(0); 1204 } 1205 1206 1207 #undef __FUNCT__ 1208 #define __FUNCT__ "DMCreateFieldDecomposition" 1209 /*@C 1210 DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems 1211 corresponding to different fields: each IS contains the global indices of the dofs of the 1212 corresponding field. The optional list of DMs define the DM for each subproblem. 1213 Generalizes DMCreateFieldIS(). 1214 1215 Not collective 1216 1217 Input Parameter: 1218 . dm - the DM object 1219 1220 Output Parameters: 1221 + len - The number of subproblems in the field decomposition (or NULL if not requested) 1222 . namelist - The name for each field (or NULL if not requested) 1223 . islist - The global indices for each field (or NULL if not requested) 1224 - dmlist - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1225 1226 Level: intermediate 1227 1228 Notes: 1229 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1230 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1231 and all of the arrays should be freed with PetscFree(). 1232 1233 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1234 @*/ 1235 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) 1236 { 1237 PetscErrorCode ierr; 1238 1239 PetscFunctionBegin; 1240 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1241 if (len) { 1242 PetscValidPointer(len,2); 1243 *len = 0; 1244 } 1245 if (namelist) { 1246 PetscValidPointer(namelist,3); 1247 *namelist = 0; 1248 } 1249 if (islist) { 1250 PetscValidPointer(islist,4); 1251 *islist = 0; 1252 } 1253 if (dmlist) { 1254 PetscValidPointer(dmlist,5); 1255 *dmlist = 0; 1256 } 1257 /* 1258 Is it a good idea to apply the following check across all impls? 1259 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1260 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1261 */ 1262 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1263 if (!dm->ops->createfielddecomposition) { 1264 PetscSection section; 1265 PetscInt numFields, f; 1266 1267 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1268 if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);} 1269 if (section && numFields && dm->ops->createsubdm) { 1270 *len = numFields; 1271 if (namelist) {ierr = PetscMalloc1(numFields,namelist);CHKERRQ(ierr);} 1272 if (islist) {ierr = PetscMalloc1(numFields,islist);CHKERRQ(ierr);} 1273 if (dmlist) {ierr = PetscMalloc1(numFields,dmlist);CHKERRQ(ierr);} 1274 for (f = 0; f < numFields; ++f) { 1275 const char *fieldName; 1276 1277 ierr = DMCreateSubDM(dm, 1, &f, islist ? &(*islist)[f] : NULL, dmlist ? &(*dmlist)[f] : NULL);CHKERRQ(ierr); 1278 if (namelist) { 1279 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1280 ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr); 1281 } 1282 } 1283 } else { 1284 ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr); 1285 /* By default there are no DMs associated with subproblems. */ 1286 if (dmlist) *dmlist = NULL; 1287 } 1288 } else { 1289 ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr); 1290 } 1291 PetscFunctionReturn(0); 1292 } 1293 1294 #undef __FUNCT__ 1295 #define __FUNCT__ "DMCreateSubDM" 1296 /*@C 1297 DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in. 1298 The fields are defined by DMCreateFieldIS(). 1299 1300 Not collective 1301 1302 Input Parameters: 1303 + dm - the DM object 1304 . numFields - number of fields in this subproblem 1305 - len - The number of subproblems in the decomposition (or NULL if not requested) 1306 1307 Output Parameters: 1308 . is - The global indices for the subproblem 1309 - dm - The DM for the subproblem 1310 1311 Level: intermediate 1312 1313 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1314 @*/ 1315 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm) 1316 { 1317 PetscErrorCode ierr; 1318 1319 PetscFunctionBegin; 1320 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1321 PetscValidPointer(fields,3); 1322 if (is) PetscValidPointer(is,4); 1323 if (subdm) PetscValidPointer(subdm,5); 1324 if (dm->ops->createsubdm) { 1325 ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr); 1326 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined"); 1327 PetscFunctionReturn(0); 1328 } 1329 1330 1331 #undef __FUNCT__ 1332 #define __FUNCT__ "DMCreateDomainDecomposition" 1333 /*@C 1334 DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems 1335 corresponding to restrictions to pairs nested subdomains: each IS contains the global 1336 indices of the dofs of the corresponding subdomains. The inner subdomains conceptually 1337 define a nonoverlapping covering, while outer subdomains can overlap. 1338 The optional list of DMs define the DM for each subproblem. 1339 1340 Not collective 1341 1342 Input Parameter: 1343 . dm - the DM object 1344 1345 Output Parameters: 1346 + len - The number of subproblems in the domain decomposition (or NULL if not requested) 1347 . namelist - The name for each subdomain (or NULL if not requested) 1348 . innerislist - The global indices for each inner subdomain (or NULL, if not requested) 1349 . outerislist - The global indices for each outer subdomain (or NULL, if not requested) 1350 - dmlist - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1351 1352 Level: intermediate 1353 1354 Notes: 1355 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1356 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1357 and all of the arrays should be freed with PetscFree(). 1358 1359 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition() 1360 @*/ 1361 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) 1362 { 1363 PetscErrorCode ierr; 1364 DMSubDomainHookLink link; 1365 PetscInt i,l; 1366 1367 PetscFunctionBegin; 1368 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1369 if (len) {PetscValidPointer(len,2); *len = 0;} 1370 if (namelist) {PetscValidPointer(namelist,3); *namelist = NULL;} 1371 if (innerislist) {PetscValidPointer(innerislist,4); *innerislist = NULL;} 1372 if (outerislist) {PetscValidPointer(outerislist,5); *outerislist = NULL;} 1373 if (dmlist) {PetscValidPointer(dmlist,6); *dmlist = NULL;} 1374 /* 1375 Is it a good idea to apply the following check across all impls? 1376 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1377 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1378 */ 1379 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1380 if (dm->ops->createdomaindecomposition) { 1381 ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr); 1382 /* copy subdomain hooks and context over to the subdomain DMs */ 1383 if (dmlist) { 1384 for (i = 0; i < l; i++) { 1385 for (link=dm->subdomainhook; link; link=link->next) { 1386 if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);} 1387 } 1388 (*dmlist)[i]->ctx = dm->ctx; 1389 } 1390 } 1391 if (len) *len = l; 1392 } 1393 PetscFunctionReturn(0); 1394 } 1395 1396 1397 #undef __FUNCT__ 1398 #define __FUNCT__ "DMCreateDomainDecompositionScatters" 1399 /*@C 1400 DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector 1401 1402 Not collective 1403 1404 Input Parameters: 1405 + dm - the DM object 1406 . n - the number of subdomain scatters 1407 - subdms - the local subdomains 1408 1409 Output Parameters: 1410 + n - the number of scatters returned 1411 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain 1412 . oscat - scatter from global vector to overlapping global vector entries on subdomain 1413 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts) 1414 1415 Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution 1416 of general nonlinear problems with overlapping subdomain methods. While merely having index sets that enable subsets 1417 of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of 1418 solution and residual data. 1419 1420 Level: developer 1421 1422 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1423 @*/ 1424 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat) 1425 { 1426 PetscErrorCode ierr; 1427 1428 PetscFunctionBegin; 1429 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1430 PetscValidPointer(subdms,3); 1431 if (dm->ops->createddscatters) { 1432 ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr); 1433 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined"); 1434 PetscFunctionReturn(0); 1435 } 1436 1437 #undef __FUNCT__ 1438 #define __FUNCT__ "DMRefine" 1439 /*@ 1440 DMRefine - Refines a DM object 1441 1442 Collective on DM 1443 1444 Input Parameter: 1445 + dm - the DM object 1446 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1447 1448 Output Parameter: 1449 . dmf - the refined DM, or NULL 1450 1451 Note: If no refinement was done, the return value is NULL 1452 1453 Level: developer 1454 1455 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1456 @*/ 1457 PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf) 1458 { 1459 PetscErrorCode ierr; 1460 DMRefineHookLink link; 1461 1462 PetscFunctionBegin; 1463 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1464 ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr); 1465 if (*dmf) { 1466 (*dmf)->ops->creatematrix = dm->ops->creatematrix; 1467 1468 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr); 1469 1470 (*dmf)->ctx = dm->ctx; 1471 (*dmf)->leveldown = dm->leveldown; 1472 (*dmf)->levelup = dm->levelup + 1; 1473 1474 ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr); 1475 for (link=dm->refinehook; link; link=link->next) { 1476 if (link->refinehook) { 1477 ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr); 1478 } 1479 } 1480 } 1481 PetscFunctionReturn(0); 1482 } 1483 1484 #undef __FUNCT__ 1485 #define __FUNCT__ "DMRefineHookAdd" 1486 /*@C 1487 DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid 1488 1489 Logically Collective 1490 1491 Input Arguments: 1492 + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level 1493 . refinehook - function to run when setting up a coarser level 1494 . interphook - function to run to update data on finer levels (once per SNESSolve()) 1495 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1496 1497 Calling sequence of refinehook: 1498 $ refinehook(DM coarse,DM fine,void *ctx); 1499 1500 + coarse - coarse level DM 1501 . fine - fine level DM to interpolate problem to 1502 - ctx - optional user-defined function context 1503 1504 Calling sequence for interphook: 1505 $ interphook(DM coarse,Mat interp,DM fine,void *ctx) 1506 1507 + coarse - coarse level DM 1508 . interp - matrix interpolating a coarse-level solution to the finer grid 1509 . fine - fine level DM to update 1510 - ctx - optional user-defined function context 1511 1512 Level: advanced 1513 1514 Notes: 1515 This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing 1516 1517 If this function is called multiple times, the hooks will be run in the order they are added. 1518 1519 This function is currently not available from Fortran. 1520 1521 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1522 @*/ 1523 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx) 1524 { 1525 PetscErrorCode ierr; 1526 DMRefineHookLink link,*p; 1527 1528 PetscFunctionBegin; 1529 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1530 for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1531 ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr); 1532 link->refinehook = refinehook; 1533 link->interphook = interphook; 1534 link->ctx = ctx; 1535 link->next = NULL; 1536 *p = link; 1537 PetscFunctionReturn(0); 1538 } 1539 1540 #undef __FUNCT__ 1541 #define __FUNCT__ "DMInterpolate" 1542 /*@ 1543 DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd() 1544 1545 Collective if any hooks are 1546 1547 Input Arguments: 1548 + coarse - coarser DM to use as a base 1549 . restrct - interpolation matrix, apply using MatInterpolate() 1550 - fine - finer DM to update 1551 1552 Level: developer 1553 1554 .seealso: DMRefineHookAdd(), MatInterpolate() 1555 @*/ 1556 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine) 1557 { 1558 PetscErrorCode ierr; 1559 DMRefineHookLink link; 1560 1561 PetscFunctionBegin; 1562 for (link=fine->refinehook; link; link=link->next) { 1563 if (link->interphook) { 1564 ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr); 1565 } 1566 } 1567 PetscFunctionReturn(0); 1568 } 1569 1570 #undef __FUNCT__ 1571 #define __FUNCT__ "DMGetRefineLevel" 1572 /*@ 1573 DMGetRefineLevel - Get's the number of refinements that have generated this DM. 1574 1575 Not Collective 1576 1577 Input Parameter: 1578 . dm - the DM object 1579 1580 Output Parameter: 1581 . level - number of refinements 1582 1583 Level: developer 1584 1585 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1586 1587 @*/ 1588 PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level) 1589 { 1590 PetscFunctionBegin; 1591 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1592 *level = dm->levelup; 1593 PetscFunctionReturn(0); 1594 } 1595 1596 #undef __FUNCT__ 1597 #define __FUNCT__ "DMGlobalToLocalHookAdd" 1598 /*@C 1599 DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called 1600 1601 Logically Collective 1602 1603 Input Arguments: 1604 + dm - the DM 1605 . beginhook - function to run at the beginning of DMGlobalToLocalBegin() 1606 . endhook - function to run after DMGlobalToLocalEnd() has completed 1607 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1608 1609 Calling sequence for beginhook: 1610 $ beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1611 1612 + dm - global DM 1613 . g - global vector 1614 . mode - mode 1615 . l - local vector 1616 - ctx - optional user-defined function context 1617 1618 1619 Calling sequence for endhook: 1620 $ endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1621 1622 + global - global DM 1623 - ctx - optional user-defined function context 1624 1625 Level: advanced 1626 1627 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1628 @*/ 1629 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx) 1630 { 1631 PetscErrorCode ierr; 1632 DMGlobalToLocalHookLink link,*p; 1633 1634 PetscFunctionBegin; 1635 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1636 for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1637 ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr); 1638 link->beginhook = beginhook; 1639 link->endhook = endhook; 1640 link->ctx = ctx; 1641 link->next = NULL; 1642 *p = link; 1643 PetscFunctionReturn(0); 1644 } 1645 1646 #undef __FUNCT__ 1647 #define __FUNCT__ "DMGlobalToLocalBegin" 1648 /*@ 1649 DMGlobalToLocalBegin - Begins updating local vectors from global vector 1650 1651 Neighbor-wise Collective on DM 1652 1653 Input Parameters: 1654 + dm - the DM object 1655 . g - the global vector 1656 . mode - INSERT_VALUES or ADD_VALUES 1657 - l - the local vector 1658 1659 1660 Level: beginner 1661 1662 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1663 1664 @*/ 1665 PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1666 { 1667 PetscSF sf; 1668 PetscErrorCode ierr; 1669 DMGlobalToLocalHookLink link; 1670 1671 PetscFunctionBegin; 1672 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1673 for (link=dm->gtolhook; link; link=link->next) { 1674 if (link->beginhook) { 1675 ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr); 1676 } 1677 } 1678 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1679 if (sf) { 1680 PetscScalar *lArray, *gArray; 1681 1682 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1683 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1684 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1685 ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1686 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1687 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1688 } else { 1689 ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1690 } 1691 PetscFunctionReturn(0); 1692 } 1693 1694 #undef __FUNCT__ 1695 #define __FUNCT__ "DMGlobalToLocalEnd" 1696 /*@ 1697 DMGlobalToLocalEnd - Ends updating local vectors from global vector 1698 1699 Neighbor-wise Collective on DM 1700 1701 Input Parameters: 1702 + dm - the DM object 1703 . g - the global vector 1704 . mode - INSERT_VALUES or ADD_VALUES 1705 - l - the local vector 1706 1707 1708 Level: beginner 1709 1710 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1711 1712 @*/ 1713 PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1714 { 1715 PetscSF sf; 1716 PetscErrorCode ierr; 1717 PetscScalar *lArray, *gArray; 1718 DMGlobalToLocalHookLink link; 1719 1720 PetscFunctionBegin; 1721 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1722 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1723 if (sf) { 1724 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1725 1726 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1727 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1728 ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1729 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1730 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1731 } else { 1732 ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1733 } 1734 for (link=dm->gtolhook; link; link=link->next) { 1735 if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);} 1736 } 1737 PetscFunctionReturn(0); 1738 } 1739 1740 #undef __FUNCT__ 1741 #define __FUNCT__ "DMLocalToGlobalBegin" 1742 /*@ 1743 DMLocalToGlobalBegin - updates global vectors from local vectors 1744 1745 Neighbor-wise Collective on DM 1746 1747 Input Parameters: 1748 + dm - the DM object 1749 . l - the local vector 1750 . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that 1751 base point. 1752 - - the global vector 1753 1754 Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local 1755 array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work 1756 global array to the final global array with VecAXPY(). 1757 1758 Level: beginner 1759 1760 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin() 1761 1762 @*/ 1763 PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g) 1764 { 1765 PetscSF sf; 1766 PetscErrorCode ierr; 1767 1768 PetscFunctionBegin; 1769 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1770 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1771 if (sf) { 1772 MPI_Op op; 1773 PetscScalar *lArray, *gArray; 1774 1775 switch (mode) { 1776 case INSERT_VALUES: 1777 case INSERT_ALL_VALUES: 1778 op = MPIU_REPLACE; break; 1779 case ADD_VALUES: 1780 case ADD_ALL_VALUES: 1781 op = MPI_SUM; break; 1782 default: 1783 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1784 } 1785 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1786 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1787 ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1788 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1789 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1790 } else { 1791 ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1792 } 1793 PetscFunctionReturn(0); 1794 } 1795 1796 #undef __FUNCT__ 1797 #define __FUNCT__ "DMLocalToGlobalEnd" 1798 /*@ 1799 DMLocalToGlobalEnd - updates global vectors from local vectors 1800 1801 Neighbor-wise Collective on DM 1802 1803 Input Parameters: 1804 + dm - the DM object 1805 . l - the local vector 1806 . mode - INSERT_VALUES or ADD_VALUES 1807 - g - the global vector 1808 1809 1810 Level: beginner 1811 1812 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd() 1813 1814 @*/ 1815 PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g) 1816 { 1817 PetscSF sf; 1818 PetscErrorCode ierr; 1819 1820 PetscFunctionBegin; 1821 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1822 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1823 if (sf) { 1824 MPI_Op op; 1825 PetscScalar *lArray, *gArray; 1826 1827 switch (mode) { 1828 case INSERT_VALUES: 1829 case INSERT_ALL_VALUES: 1830 op = MPIU_REPLACE; break; 1831 case ADD_VALUES: 1832 case ADD_ALL_VALUES: 1833 op = MPI_SUM; break; 1834 default: 1835 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1836 } 1837 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1838 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1839 ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1840 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1841 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1842 } else { 1843 ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1844 } 1845 PetscFunctionReturn(0); 1846 } 1847 1848 #undef __FUNCT__ 1849 #define __FUNCT__ "DMLocalToLocalBegin" 1850 /*@ 1851 DMLocalToLocalBegin - Maps from a local vector (including ghost points 1852 that contain irrelevant values) to another local vector where the ghost 1853 points in the second are set correctly. Must be followed by DMLocalToLocalEnd(). 1854 1855 Neighbor-wise Collective on DM and Vec 1856 1857 Input Parameters: 1858 + dm - the DM object 1859 . g - the original local vector 1860 - mode - one of INSERT_VALUES or ADD_VALUES 1861 1862 Output Parameter: 1863 . l - the local vector with correct ghost values 1864 1865 Level: intermediate 1866 1867 Notes: 1868 The local vectors used here need not be the same as those 1869 obtained from DMCreateLocalVector(), BUT they 1870 must have the same parallel data layout; they could, for example, be 1871 obtained with VecDuplicate() from the DM originating vectors. 1872 1873 .keywords: DM, local-to-local, begin 1874 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalEnd(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1875 1876 @*/ 1877 PetscErrorCode DMLocalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1878 { 1879 PetscErrorCode ierr; 1880 1881 PetscFunctionBegin; 1882 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1883 ierr = (*dm->ops->localtolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1884 PetscFunctionReturn(0); 1885 } 1886 1887 #undef __FUNCT__ 1888 #define __FUNCT__ "DMLocalToLocalEnd" 1889 /*@ 1890 DMLocalToLocalEnd - Maps from a local vector (including ghost points 1891 that contain irrelevant values) to another local vector where the ghost 1892 points in the second are set correctly. Must be preceded by DMLocalToLocalBegin(). 1893 1894 Neighbor-wise Collective on DM and Vec 1895 1896 Input Parameters: 1897 + da - the DM object 1898 . g - the original local vector 1899 - mode - one of INSERT_VALUES or ADD_VALUES 1900 1901 Output Parameter: 1902 . l - the local vector with correct ghost values 1903 1904 Level: intermediate 1905 1906 Notes: 1907 The local vectors used here need not be the same as those 1908 obtained from DMCreateLocalVector(), BUT they 1909 must have the same parallel data layout; they could, for example, be 1910 obtained with VecDuplicate() from the DM originating vectors. 1911 1912 .keywords: DM, local-to-local, end 1913 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1914 1915 @*/ 1916 PetscErrorCode DMLocalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1917 { 1918 PetscErrorCode ierr; 1919 1920 PetscFunctionBegin; 1921 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1922 ierr = (*dm->ops->localtolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1923 PetscFunctionReturn(0); 1924 } 1925 1926 1927 #undef __FUNCT__ 1928 #define __FUNCT__ "DMCoarsen" 1929 /*@ 1930 DMCoarsen - Coarsens a DM object 1931 1932 Collective on DM 1933 1934 Input Parameter: 1935 + dm - the DM object 1936 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1937 1938 Output Parameter: 1939 . dmc - the coarsened DM 1940 1941 Level: developer 1942 1943 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1944 1945 @*/ 1946 PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) 1947 { 1948 PetscErrorCode ierr; 1949 DMCoarsenHookLink link; 1950 1951 PetscFunctionBegin; 1952 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1953 ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr); 1954 (*dmc)->ops->creatematrix = dm->ops->creatematrix; 1955 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr); 1956 (*dmc)->ctx = dm->ctx; 1957 (*dmc)->levelup = dm->levelup; 1958 (*dmc)->leveldown = dm->leveldown + 1; 1959 ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr); 1960 for (link=dm->coarsenhook; link; link=link->next) { 1961 if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);} 1962 } 1963 PetscFunctionReturn(0); 1964 } 1965 1966 #undef __FUNCT__ 1967 #define __FUNCT__ "DMCoarsenHookAdd" 1968 /*@C 1969 DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1970 1971 Logically Collective 1972 1973 Input Arguments: 1974 + fine - nonlinear solver context on which to run a hook when restricting to a coarser level 1975 . coarsenhook - function to run when setting up a coarser level 1976 . restricthook - function to run to update data on coarser levels (once per SNESSolve()) 1977 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1978 1979 Calling sequence of coarsenhook: 1980 $ coarsenhook(DM fine,DM coarse,void *ctx); 1981 1982 + fine - fine level DM 1983 . coarse - coarse level DM to restrict problem to 1984 - ctx - optional user-defined function context 1985 1986 Calling sequence for restricthook: 1987 $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx) 1988 1989 + fine - fine level DM 1990 . mrestrict - matrix restricting a fine-level solution to the coarse grid 1991 . rscale - scaling vector for restriction 1992 . inject - matrix restricting by injection 1993 . coarse - coarse level DM to update 1994 - ctx - optional user-defined function context 1995 1996 Level: advanced 1997 1998 Notes: 1999 This function is only needed if auxiliary data needs to be set up on coarse grids. 2000 2001 If this function is called multiple times, the hooks will be run in the order they are added. 2002 2003 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 2004 extract the finest level information from its context (instead of from the SNES). 2005 2006 This function is currently not available from Fortran. 2007 2008 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 2009 @*/ 2010 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx) 2011 { 2012 PetscErrorCode ierr; 2013 DMCoarsenHookLink link,*p; 2014 2015 PetscFunctionBegin; 2016 PetscValidHeaderSpecific(fine,DM_CLASSID,1); 2017 for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 2018 ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr); 2019 link->coarsenhook = coarsenhook; 2020 link->restricthook = restricthook; 2021 link->ctx = ctx; 2022 link->next = NULL; 2023 *p = link; 2024 PetscFunctionReturn(0); 2025 } 2026 2027 #undef __FUNCT__ 2028 #define __FUNCT__ "DMRestrict" 2029 /*@ 2030 DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd() 2031 2032 Collective if any hooks are 2033 2034 Input Arguments: 2035 + fine - finer DM to use as a base 2036 . restrct - restriction matrix, apply using MatRestrict() 2037 . inject - injection matrix, also use MatRestrict() 2038 - coarse - coarer DM to update 2039 2040 Level: developer 2041 2042 .seealso: DMCoarsenHookAdd(), MatRestrict() 2043 @*/ 2044 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse) 2045 { 2046 PetscErrorCode ierr; 2047 DMCoarsenHookLink link; 2048 2049 PetscFunctionBegin; 2050 for (link=fine->coarsenhook; link; link=link->next) { 2051 if (link->restricthook) { 2052 ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr); 2053 } 2054 } 2055 PetscFunctionReturn(0); 2056 } 2057 2058 #undef __FUNCT__ 2059 #define __FUNCT__ "DMSubDomainHookAdd" 2060 /*@C 2061 DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid 2062 2063 Logically Collective 2064 2065 Input Arguments: 2066 + global - global DM 2067 . ddhook - function to run to pass data to the decomposition DM upon its creation 2068 . restricthook - function to run to update data on block solve (at the beginning of the block solve) 2069 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 2070 2071 2072 Calling sequence for ddhook: 2073 $ ddhook(DM global,DM block,void *ctx) 2074 2075 + global - global DM 2076 . block - block DM 2077 - ctx - optional user-defined function context 2078 2079 Calling sequence for restricthook: 2080 $ restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx) 2081 2082 + global - global DM 2083 . out - scatter to the outer (with ghost and overlap points) block vector 2084 . in - scatter to block vector values only owned locally 2085 . block - block DM 2086 - ctx - optional user-defined function context 2087 2088 Level: advanced 2089 2090 Notes: 2091 This function is only needed if auxiliary data needs to be set up on subdomain DMs. 2092 2093 If this function is called multiple times, the hooks will be run in the order they are added. 2094 2095 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 2096 extract the global information from its context (instead of from the SNES). 2097 2098 This function is currently not available from Fortran. 2099 2100 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 2101 @*/ 2102 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx) 2103 { 2104 PetscErrorCode ierr; 2105 DMSubDomainHookLink link,*p; 2106 2107 PetscFunctionBegin; 2108 PetscValidHeaderSpecific(global,DM_CLASSID,1); 2109 for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 2110 ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr); 2111 link->restricthook = restricthook; 2112 link->ddhook = ddhook; 2113 link->ctx = ctx; 2114 link->next = NULL; 2115 *p = link; 2116 PetscFunctionReturn(0); 2117 } 2118 2119 #undef __FUNCT__ 2120 #define __FUNCT__ "DMSubDomainRestrict" 2121 /*@ 2122 DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd() 2123 2124 Collective if any hooks are 2125 2126 Input Arguments: 2127 + fine - finer DM to use as a base 2128 . oscatter - scatter from domain global vector filling subdomain global vector with overlap 2129 . gscatter - scatter from domain global vector filling subdomain local vector with ghosts 2130 - coarse - coarer DM to update 2131 2132 Level: developer 2133 2134 .seealso: DMCoarsenHookAdd(), MatRestrict() 2135 @*/ 2136 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm) 2137 { 2138 PetscErrorCode ierr; 2139 DMSubDomainHookLink link; 2140 2141 PetscFunctionBegin; 2142 for (link=global->subdomainhook; link; link=link->next) { 2143 if (link->restricthook) { 2144 ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr); 2145 } 2146 } 2147 PetscFunctionReturn(0); 2148 } 2149 2150 #undef __FUNCT__ 2151 #define __FUNCT__ "DMGetCoarsenLevel" 2152 /*@ 2153 DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM. 2154 2155 Not Collective 2156 2157 Input Parameter: 2158 . dm - the DM object 2159 2160 Output Parameter: 2161 . level - number of coarsenings 2162 2163 Level: developer 2164 2165 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2166 2167 @*/ 2168 PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level) 2169 { 2170 PetscFunctionBegin; 2171 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2172 *level = dm->leveldown; 2173 PetscFunctionReturn(0); 2174 } 2175 2176 2177 2178 #undef __FUNCT__ 2179 #define __FUNCT__ "DMRefineHierarchy" 2180 /*@C 2181 DMRefineHierarchy - Refines a DM object, all levels at once 2182 2183 Collective on DM 2184 2185 Input Parameter: 2186 + dm - the DM object 2187 - nlevels - the number of levels of refinement 2188 2189 Output Parameter: 2190 . dmf - the refined DM hierarchy 2191 2192 Level: developer 2193 2194 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2195 2196 @*/ 2197 PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[]) 2198 { 2199 PetscErrorCode ierr; 2200 2201 PetscFunctionBegin; 2202 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2203 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2204 if (nlevels == 0) PetscFunctionReturn(0); 2205 if (dm->ops->refinehierarchy) { 2206 ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr); 2207 } else if (dm->ops->refine) { 2208 PetscInt i; 2209 2210 ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr); 2211 for (i=1; i<nlevels; i++) { 2212 ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr); 2213 } 2214 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet"); 2215 PetscFunctionReturn(0); 2216 } 2217 2218 #undef __FUNCT__ 2219 #define __FUNCT__ "DMCoarsenHierarchy" 2220 /*@C 2221 DMCoarsenHierarchy - Coarsens a DM object, all levels at once 2222 2223 Collective on DM 2224 2225 Input Parameter: 2226 + dm - the DM object 2227 - nlevels - the number of levels of coarsening 2228 2229 Output Parameter: 2230 . dmc - the coarsened DM hierarchy 2231 2232 Level: developer 2233 2234 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2235 2236 @*/ 2237 PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[]) 2238 { 2239 PetscErrorCode ierr; 2240 2241 PetscFunctionBegin; 2242 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2243 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2244 if (nlevels == 0) PetscFunctionReturn(0); 2245 PetscValidPointer(dmc,3); 2246 if (dm->ops->coarsenhierarchy) { 2247 ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr); 2248 } else if (dm->ops->coarsen) { 2249 PetscInt i; 2250 2251 ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr); 2252 for (i=1; i<nlevels; i++) { 2253 ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr); 2254 } 2255 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet"); 2256 PetscFunctionReturn(0); 2257 } 2258 2259 #undef __FUNCT__ 2260 #define __FUNCT__ "DMCreateAggregates" 2261 /*@ 2262 DMCreateAggregates - Gets the aggregates that map between 2263 grids associated with two DMs. 2264 2265 Collective on DM 2266 2267 Input Parameters: 2268 + dmc - the coarse grid DM 2269 - dmf - the fine grid DM 2270 2271 Output Parameters: 2272 . rest - the restriction matrix (transpose of the projection matrix) 2273 2274 Level: intermediate 2275 2276 .keywords: interpolation, restriction, multigrid 2277 2278 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation() 2279 @*/ 2280 PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest) 2281 { 2282 PetscErrorCode ierr; 2283 2284 PetscFunctionBegin; 2285 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 2286 PetscValidHeaderSpecific(dmf,DM_CLASSID,2); 2287 ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr); 2288 PetscFunctionReturn(0); 2289 } 2290 2291 #undef __FUNCT__ 2292 #define __FUNCT__ "DMSetApplicationContextDestroy" 2293 /*@C 2294 DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed 2295 2296 Not Collective 2297 2298 Input Parameters: 2299 + dm - the DM object 2300 - destroy - the destroy function 2301 2302 Level: intermediate 2303 2304 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2305 2306 @*/ 2307 PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**)) 2308 { 2309 PetscFunctionBegin; 2310 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2311 dm->ctxdestroy = destroy; 2312 PetscFunctionReturn(0); 2313 } 2314 2315 #undef __FUNCT__ 2316 #define __FUNCT__ "DMSetApplicationContext" 2317 /*@ 2318 DMSetApplicationContext - Set a user context into a DM object 2319 2320 Not Collective 2321 2322 Input Parameters: 2323 + dm - the DM object 2324 - ctx - the user context 2325 2326 Level: intermediate 2327 2328 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2329 2330 @*/ 2331 PetscErrorCode DMSetApplicationContext(DM dm,void *ctx) 2332 { 2333 PetscFunctionBegin; 2334 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2335 dm->ctx = ctx; 2336 PetscFunctionReturn(0); 2337 } 2338 2339 #undef __FUNCT__ 2340 #define __FUNCT__ "DMGetApplicationContext" 2341 /*@ 2342 DMGetApplicationContext - Gets a user context from a DM object 2343 2344 Not Collective 2345 2346 Input Parameter: 2347 . dm - the DM object 2348 2349 Output Parameter: 2350 . ctx - the user context 2351 2352 Level: intermediate 2353 2354 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2355 2356 @*/ 2357 PetscErrorCode DMGetApplicationContext(DM dm,void *ctx) 2358 { 2359 PetscFunctionBegin; 2360 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2361 *(void**)ctx = dm->ctx; 2362 PetscFunctionReturn(0); 2363 } 2364 2365 #undef __FUNCT__ 2366 #define __FUNCT__ "DMSetVariableBounds" 2367 /*@C 2368 DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI. 2369 2370 Logically Collective on DM 2371 2372 Input Parameter: 2373 + dm - the DM object 2374 - f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set) 2375 2376 Level: intermediate 2377 2378 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), 2379 DMSetJacobian() 2380 2381 @*/ 2382 PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2383 { 2384 PetscFunctionBegin; 2385 dm->ops->computevariablebounds = f; 2386 PetscFunctionReturn(0); 2387 } 2388 2389 #undef __FUNCT__ 2390 #define __FUNCT__ "DMHasVariableBounds" 2391 /*@ 2392 DMHasVariableBounds - does the DM object have a variable bounds function? 2393 2394 Not Collective 2395 2396 Input Parameter: 2397 . dm - the DM object to destroy 2398 2399 Output Parameter: 2400 . flg - PETSC_TRUE if the variable bounds function exists 2401 2402 Level: developer 2403 2404 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2405 2406 @*/ 2407 PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg) 2408 { 2409 PetscFunctionBegin; 2410 *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE; 2411 PetscFunctionReturn(0); 2412 } 2413 2414 #undef __FUNCT__ 2415 #define __FUNCT__ "DMComputeVariableBounds" 2416 /*@C 2417 DMComputeVariableBounds - compute variable bounds used by SNESVI. 2418 2419 Logically Collective on DM 2420 2421 Input Parameters: 2422 + dm - the DM object to destroy 2423 - x - current solution at which the bounds are computed 2424 2425 Output parameters: 2426 + xl - lower bound 2427 - xu - upper bound 2428 2429 Level: intermediate 2430 2431 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2432 2433 @*/ 2434 PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu) 2435 { 2436 PetscErrorCode ierr; 2437 2438 PetscFunctionBegin; 2439 PetscValidHeaderSpecific(xl,VEC_CLASSID,2); 2440 PetscValidHeaderSpecific(xu,VEC_CLASSID,2); 2441 if (dm->ops->computevariablebounds) { 2442 ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr); 2443 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds."); 2444 PetscFunctionReturn(0); 2445 } 2446 2447 #undef __FUNCT__ 2448 #define __FUNCT__ "DMHasColoring" 2449 /*@ 2450 DMHasColoring - does the DM object have a method of providing a coloring? 2451 2452 Not Collective 2453 2454 Input Parameter: 2455 . dm - the DM object 2456 2457 Output Parameter: 2458 . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring(). 2459 2460 Level: developer 2461 2462 .seealso DMHasFunction(), DMCreateColoring() 2463 2464 @*/ 2465 PetscErrorCode DMHasColoring(DM dm,PetscBool *flg) 2466 { 2467 PetscFunctionBegin; 2468 *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE; 2469 PetscFunctionReturn(0); 2470 } 2471 2472 #undef __FUNCT__ 2473 #define __FUNCT__ "DMSetVec" 2474 /*@C 2475 DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear. 2476 2477 Collective on DM 2478 2479 Input Parameter: 2480 + dm - the DM object 2481 - x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems. 2482 2483 Level: developer 2484 2485 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2486 2487 @*/ 2488 PetscErrorCode DMSetVec(DM dm,Vec x) 2489 { 2490 PetscErrorCode ierr; 2491 2492 PetscFunctionBegin; 2493 if (x) { 2494 if (!dm->x) { 2495 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2496 } 2497 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2498 } else if (dm->x) { 2499 ierr = VecDestroy(&dm->x);CHKERRQ(ierr); 2500 } 2501 PetscFunctionReturn(0); 2502 } 2503 2504 PetscFunctionList DMList = NULL; 2505 PetscBool DMRegisterAllCalled = PETSC_FALSE; 2506 2507 #undef __FUNCT__ 2508 #define __FUNCT__ "DMSetType" 2509 /*@C 2510 DMSetType - Builds a DM, for a particular DM implementation. 2511 2512 Collective on DM 2513 2514 Input Parameters: 2515 + dm - The DM object 2516 - method - The name of the DM type 2517 2518 Options Database Key: 2519 . -dm_type <type> - Sets the DM type; use -help for a list of available types 2520 2521 Notes: 2522 See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D). 2523 2524 Level: intermediate 2525 2526 .keywords: DM, set, type 2527 .seealso: DMGetType(), DMCreate() 2528 @*/ 2529 PetscErrorCode DMSetType(DM dm, DMType method) 2530 { 2531 PetscErrorCode (*r)(DM); 2532 PetscBool match; 2533 PetscErrorCode ierr; 2534 2535 PetscFunctionBegin; 2536 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2537 ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr); 2538 if (match) PetscFunctionReturn(0); 2539 2540 if (!DMRegisterAllCalled) {ierr = DMRegisterAll();CHKERRQ(ierr);} 2541 ierr = PetscFunctionListFind(DMList,method,&r);CHKERRQ(ierr); 2542 if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method); 2543 2544 if (dm->ops->destroy) { 2545 ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr); 2546 dm->ops->destroy = NULL; 2547 } 2548 ierr = (*r)(dm);CHKERRQ(ierr); 2549 ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr); 2550 PetscFunctionReturn(0); 2551 } 2552 2553 #undef __FUNCT__ 2554 #define __FUNCT__ "DMGetType" 2555 /*@C 2556 DMGetType - Gets the DM type name (as a string) from the DM. 2557 2558 Not Collective 2559 2560 Input Parameter: 2561 . dm - The DM 2562 2563 Output Parameter: 2564 . type - The DM type name 2565 2566 Level: intermediate 2567 2568 .keywords: DM, get, type, name 2569 .seealso: DMSetType(), DMCreate() 2570 @*/ 2571 PetscErrorCode DMGetType(DM dm, DMType *type) 2572 { 2573 PetscErrorCode ierr; 2574 2575 PetscFunctionBegin; 2576 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2577 PetscValidCharPointer(type,2); 2578 if (!DMRegisterAllCalled) { 2579 ierr = DMRegisterAll();CHKERRQ(ierr); 2580 } 2581 *type = ((PetscObject)dm)->type_name; 2582 PetscFunctionReturn(0); 2583 } 2584 2585 #undef __FUNCT__ 2586 #define __FUNCT__ "DMConvert" 2587 /*@C 2588 DMConvert - Converts a DM to another DM, either of the same or different type. 2589 2590 Collective on DM 2591 2592 Input Parameters: 2593 + dm - the DM 2594 - newtype - new DM type (use "same" for the same type) 2595 2596 Output Parameter: 2597 . M - pointer to new DM 2598 2599 Notes: 2600 Cannot be used to convert a sequential DM to parallel or parallel to sequential, 2601 the MPI communicator of the generated DM is always the same as the communicator 2602 of the input DM. 2603 2604 Level: intermediate 2605 2606 .seealso: DMCreate() 2607 @*/ 2608 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M) 2609 { 2610 DM B; 2611 char convname[256]; 2612 PetscBool sametype, issame; 2613 PetscErrorCode ierr; 2614 2615 PetscFunctionBegin; 2616 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2617 PetscValidType(dm,1); 2618 PetscValidPointer(M,3); 2619 ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr); 2620 ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr); 2621 { 2622 PetscErrorCode (*conv)(DM, DMType, DM*) = NULL; 2623 2624 /* 2625 Order of precedence: 2626 1) See if a specialized converter is known to the current DM. 2627 2) See if a specialized converter is known to the desired DM class. 2628 3) See if a good general converter is registered for the desired class 2629 4) See if a good general converter is known for the current matrix. 2630 5) Use a really basic converter. 2631 */ 2632 2633 /* 1) See if a specialized converter is known to the current DM and the desired class */ 2634 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2635 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2636 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2637 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2638 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2639 ierr = PetscObjectQueryFunction((PetscObject)dm,convname,&conv);CHKERRQ(ierr); 2640 if (conv) goto foundconv; 2641 2642 /* 2) See if a specialized converter is known to the desired DM class. */ 2643 ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr); 2644 ierr = DMSetType(B, newtype);CHKERRQ(ierr); 2645 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2646 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2647 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2648 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2649 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2650 ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv);CHKERRQ(ierr); 2651 if (conv) { 2652 ierr = DMDestroy(&B);CHKERRQ(ierr); 2653 goto foundconv; 2654 } 2655 2656 #if 0 2657 /* 3) See if a good general converter is registered for the desired class */ 2658 conv = B->ops->convertfrom; 2659 ierr = DMDestroy(&B);CHKERRQ(ierr); 2660 if (conv) goto foundconv; 2661 2662 /* 4) See if a good general converter is known for the current matrix */ 2663 if (dm->ops->convert) { 2664 conv = dm->ops->convert; 2665 } 2666 if (conv) goto foundconv; 2667 #endif 2668 2669 /* 5) Use a really basic converter. */ 2670 SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype); 2671 2672 foundconv: 2673 ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2674 ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr); 2675 ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2676 } 2677 ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr); 2678 PetscFunctionReturn(0); 2679 } 2680 2681 /*--------------------------------------------------------------------------------------------------------------------*/ 2682 2683 #undef __FUNCT__ 2684 #define __FUNCT__ "DMRegister" 2685 /*@C 2686 DMRegister - Adds a new DM component implementation 2687 2688 Not Collective 2689 2690 Input Parameters: 2691 + name - The name of a new user-defined creation routine 2692 - create_func - The creation routine itself 2693 2694 Notes: 2695 DMRegister() may be called multiple times to add several user-defined DMs 2696 2697 2698 Sample usage: 2699 .vb 2700 DMRegister("my_da", MyDMCreate); 2701 .ve 2702 2703 Then, your DM type can be chosen with the procedural interface via 2704 .vb 2705 DMCreate(MPI_Comm, DM *); 2706 DMSetType(DM,"my_da"); 2707 .ve 2708 or at runtime via the option 2709 .vb 2710 -da_type my_da 2711 .ve 2712 2713 Level: advanced 2714 2715 .keywords: DM, register 2716 .seealso: DMRegisterAll(), DMRegisterDestroy() 2717 2718 @*/ 2719 PetscErrorCode DMRegister(const char sname[],PetscErrorCode (*function)(DM)) 2720 { 2721 PetscErrorCode ierr; 2722 2723 PetscFunctionBegin; 2724 ierr = PetscFunctionListAdd(&DMList,sname,function);CHKERRQ(ierr); 2725 PetscFunctionReturn(0); 2726 } 2727 2728 #undef __FUNCT__ 2729 #define __FUNCT__ "DMLoad" 2730 /*@C 2731 DMLoad - Loads a DM that has been stored in binary with DMView(). 2732 2733 Collective on PetscViewer 2734 2735 Input Parameters: 2736 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or 2737 some related function before a call to DMLoad(). 2738 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or 2739 HDF5 file viewer, obtained from PetscViewerHDF5Open() 2740 2741 Level: intermediate 2742 2743 Notes: 2744 The type is determined by the data in the file, any type set into the DM before this call is ignored. 2745 2746 Notes for advanced users: 2747 Most users should not need to know the details of the binary storage 2748 format, since DMLoad() and DMView() completely hide these details. 2749 But for anyone who's interested, the standard binary matrix storage 2750 format is 2751 .vb 2752 has not yet been determined 2753 .ve 2754 2755 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad() 2756 @*/ 2757 PetscErrorCode DMLoad(DM newdm, PetscViewer viewer) 2758 { 2759 PetscErrorCode ierr; 2760 PetscBool isbinary; 2761 PetscInt classid; 2762 char type[256]; 2763 2764 PetscFunctionBegin; 2765 PetscValidHeaderSpecific(newdm,DM_CLASSID,1); 2766 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 2767 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 2768 if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 2769 2770 ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr); 2771 if (classid != DM_FILE_CLASSID) SETERRQ1(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file, classid found %d",(int)classid); 2772 ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr); 2773 ierr = DMSetType(newdm, type);CHKERRQ(ierr); 2774 if (newdm->ops->load) { 2775 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 2776 } 2777 PetscFunctionReturn(0); 2778 } 2779 2780 /******************************** FEM Support **********************************/ 2781 2782 #undef __FUNCT__ 2783 #define __FUNCT__ "DMPrintCellVector" 2784 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) 2785 { 2786 PetscInt f; 2787 PetscErrorCode ierr; 2788 2789 PetscFunctionBegin; 2790 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2791 for (f = 0; f < len; ++f) { 2792 ierr = PetscPrintf(PETSC_COMM_SELF, " | %g |\n", (double)PetscRealPart(x[f]));CHKERRQ(ierr); 2793 } 2794 PetscFunctionReturn(0); 2795 } 2796 2797 #undef __FUNCT__ 2798 #define __FUNCT__ "DMPrintCellMatrix" 2799 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) 2800 { 2801 PetscInt f, g; 2802 PetscErrorCode ierr; 2803 2804 PetscFunctionBegin; 2805 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2806 for (f = 0; f < rows; ++f) { 2807 ierr = PetscPrintf(PETSC_COMM_SELF, " |");CHKERRQ(ierr); 2808 for (g = 0; g < cols; ++g) { 2809 ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5g", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr); 2810 } 2811 ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr); 2812 } 2813 PetscFunctionReturn(0); 2814 } 2815 2816 #undef __FUNCT__ 2817 #define __FUNCT__ "DMPrintLocalVec" 2818 PetscErrorCode DMPrintLocalVec(DM dm, const char name[], PetscReal tol, Vec X) 2819 { 2820 PetscMPIInt rank, numProcs; 2821 PetscInt p; 2822 PetscErrorCode ierr; 2823 2824 PetscFunctionBegin; 2825 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr); 2826 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr); 2827 ierr = PetscPrintf(PetscObjectComm((PetscObject) dm), "%s:\n", name);CHKERRQ(ierr); 2828 for (p = 0; p < numProcs; ++p) { 2829 if (p == rank) { 2830 Vec x; 2831 2832 ierr = VecDuplicate(X, &x);CHKERRQ(ierr); 2833 ierr = VecCopy(X, x);CHKERRQ(ierr); 2834 ierr = VecChop(x, tol);CHKERRQ(ierr); 2835 ierr = VecView(x, PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr); 2836 ierr = VecDestroy(&x);CHKERRQ(ierr); 2837 ierr = PetscViewerFlush(PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr); 2838 } 2839 ierr = PetscBarrier((PetscObject) dm);CHKERRQ(ierr); 2840 } 2841 PetscFunctionReturn(0); 2842 } 2843 2844 #undef __FUNCT__ 2845 #define __FUNCT__ "DMGetDefaultSection" 2846 /*@ 2847 DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM. 2848 2849 Input Parameter: 2850 . dm - The DM 2851 2852 Output Parameter: 2853 . section - The PetscSection 2854 2855 Level: intermediate 2856 2857 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2858 2859 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2860 @*/ 2861 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) 2862 { 2863 PetscErrorCode ierr; 2864 2865 PetscFunctionBegin; 2866 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2867 PetscValidPointer(section, 2); 2868 if (!dm->defaultSection && dm->ops->createdefaultsection) {ierr = (*dm->ops->createdefaultsection)(dm);CHKERRQ(ierr);} 2869 *section = dm->defaultSection; 2870 PetscFunctionReturn(0); 2871 } 2872 2873 #undef __FUNCT__ 2874 #define __FUNCT__ "DMSetDefaultSection" 2875 /*@ 2876 DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM. 2877 2878 Input Parameters: 2879 + dm - The DM 2880 - section - The PetscSection 2881 2882 Level: intermediate 2883 2884 Note: Any existing Section will be destroyed 2885 2886 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2887 @*/ 2888 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) 2889 { 2890 PetscInt numFields; 2891 PetscInt f; 2892 PetscErrorCode ierr; 2893 2894 PetscFunctionBegin; 2895 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2896 PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2897 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2898 ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr); 2899 dm->defaultSection = section; 2900 ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr); 2901 if (numFields) { 2902 ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr); 2903 for (f = 0; f < numFields; ++f) { 2904 const char *name; 2905 2906 ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr); 2907 ierr = PetscObjectSetName((PetscObject) dm->fields[f], name);CHKERRQ(ierr); 2908 } 2909 } 2910 /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */ 2911 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2912 PetscFunctionReturn(0); 2913 } 2914 2915 #undef __FUNCT__ 2916 #define __FUNCT__ "DMGetDefaultGlobalSection" 2917 /*@ 2918 DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM. 2919 2920 Collective on DM 2921 2922 Input Parameter: 2923 . dm - The DM 2924 2925 Output Parameter: 2926 . section - The PetscSection 2927 2928 Level: intermediate 2929 2930 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2931 2932 .seealso: DMSetDefaultSection(), DMGetDefaultSection() 2933 @*/ 2934 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) 2935 { 2936 PetscErrorCode ierr; 2937 2938 PetscFunctionBegin; 2939 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2940 PetscValidPointer(section, 2); 2941 if (!dm->defaultGlobalSection) { 2942 PetscSection s; 2943 2944 ierr = DMGetDefaultSection(dm, &s);CHKERRQ(ierr); 2945 if (!s) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection in order to create a global PetscSection"); 2946 if (!dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSF in order to create a global PetscSection"); 2947 ierr = PetscSectionCreateGlobalSection(s, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr); 2948 ierr = PetscLayoutDestroy(&dm->map);CHKERRQ(ierr); 2949 ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm), dm->defaultGlobalSection, &dm->map);CHKERRQ(ierr); 2950 } 2951 *section = dm->defaultGlobalSection; 2952 PetscFunctionReturn(0); 2953 } 2954 2955 #undef __FUNCT__ 2956 #define __FUNCT__ "DMSetDefaultGlobalSection" 2957 /*@ 2958 DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM. 2959 2960 Input Parameters: 2961 + dm - The DM 2962 - section - The PetscSection, or NULL 2963 2964 Level: intermediate 2965 2966 Note: Any existing Section will be destroyed 2967 2968 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection() 2969 @*/ 2970 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) 2971 { 2972 PetscErrorCode ierr; 2973 2974 PetscFunctionBegin; 2975 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2976 if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2977 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2978 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2979 dm->defaultGlobalSection = section; 2980 PetscFunctionReturn(0); 2981 } 2982 2983 #undef __FUNCT__ 2984 #define __FUNCT__ "DMGetDefaultSF" 2985 /*@ 2986 DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set, 2987 it is created from the default PetscSection layouts in the DM. 2988 2989 Input Parameter: 2990 . dm - The DM 2991 2992 Output Parameter: 2993 . sf - The PetscSF 2994 2995 Level: intermediate 2996 2997 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 2998 2999 .seealso: DMSetDefaultSF(), DMCreateDefaultSF() 3000 @*/ 3001 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) 3002 { 3003 PetscInt nroots; 3004 PetscErrorCode ierr; 3005 3006 PetscFunctionBegin; 3007 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3008 PetscValidPointer(sf, 2); 3009 ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr); 3010 if (nroots < 0) { 3011 PetscSection section, gSection; 3012 3013 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 3014 if (section) { 3015 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 3016 ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr); 3017 } else { 3018 *sf = NULL; 3019 PetscFunctionReturn(0); 3020 } 3021 } 3022 *sf = dm->defaultSF; 3023 PetscFunctionReturn(0); 3024 } 3025 3026 #undef __FUNCT__ 3027 #define __FUNCT__ "DMSetDefaultSF" 3028 /*@ 3029 DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM 3030 3031 Input Parameters: 3032 + dm - The DM 3033 - sf - The PetscSF 3034 3035 Level: intermediate 3036 3037 Note: Any previous SF is destroyed 3038 3039 .seealso: DMGetDefaultSF(), DMCreateDefaultSF() 3040 @*/ 3041 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) 3042 { 3043 PetscErrorCode ierr; 3044 3045 PetscFunctionBegin; 3046 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3047 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2); 3048 ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr); 3049 dm->defaultSF = sf; 3050 PetscFunctionReturn(0); 3051 } 3052 3053 #undef __FUNCT__ 3054 #define __FUNCT__ "DMCreateDefaultSF" 3055 /*@C 3056 DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections 3057 describing the data layout. 3058 3059 Input Parameters: 3060 + dm - The DM 3061 . localSection - PetscSection describing the local data layout 3062 - globalSection - PetscSection describing the global data layout 3063 3064 Level: intermediate 3065 3066 .seealso: DMGetDefaultSF(), DMSetDefaultSF() 3067 @*/ 3068 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection) 3069 { 3070 MPI_Comm comm; 3071 PetscLayout layout; 3072 const PetscInt *ranges; 3073 PetscInt *local; 3074 PetscSFNode *remote; 3075 PetscInt pStart, pEnd, p, nroots, nleaves = 0, l; 3076 PetscMPIInt size, rank; 3077 PetscErrorCode ierr; 3078 3079 PetscFunctionBegin; 3080 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 3081 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3082 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 3083 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 3084 ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr); 3085 ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr); 3086 ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr); 3087 ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr); 3088 ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr); 3089 ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr); 3090 ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr); 3091 for (p = pStart; p < pEnd; ++p) { 3092 PetscInt gdof, gcdof; 3093 3094 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3095 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3096 nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3097 } 3098 ierr = PetscMalloc1(nleaves, &local);CHKERRQ(ierr); 3099 ierr = PetscMalloc1(nleaves, &remote);CHKERRQ(ierr); 3100 for (p = pStart, l = 0; p < pEnd; ++p) { 3101 const PetscInt *cind; 3102 PetscInt dof, cdof, off, gdof, gcdof, goff, gsize, d, c; 3103 3104 ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr); 3105 ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr); 3106 ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr); 3107 ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr); 3108 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3109 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3110 ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr); 3111 if (!gdof) continue; /* Censored point */ 3112 gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3113 if (gsize != dof-cdof) { 3114 if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof); 3115 cdof = 0; /* Ignore constraints */ 3116 } 3117 for (d = 0, c = 0; d < dof; ++d) { 3118 if ((c < cdof) && (cind[c] == d)) {++c; continue;} 3119 local[l+d-c] = off+d; 3120 } 3121 if (gdof < 0) { 3122 for (d = 0; d < gsize; ++d, ++l) { 3123 PetscInt offset = -(goff+1) + d, r; 3124 3125 ierr = PetscFindInt(offset,size+1,ranges,&r);CHKERRQ(ierr); 3126 if (r < 0) r = -(r+2); 3127 if ((r < 0) || (r >= size)) SETERRQ4(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Point %d mapped to invalid process %d (%d, %d)", p, r, gdof, goff); 3128 remote[l].rank = r; 3129 remote[l].index = offset - ranges[r]; 3130 } 3131 } else { 3132 for (d = 0; d < gsize; ++d, ++l) { 3133 remote[l].rank = rank; 3134 remote[l].index = goff+d - ranges[rank]; 3135 } 3136 } 3137 } 3138 if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves); 3139 ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr); 3140 ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 3141 PetscFunctionReturn(0); 3142 } 3143 3144 #undef __FUNCT__ 3145 #define __FUNCT__ "DMGetPointSF" 3146 /*@ 3147 DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM. 3148 3149 Input Parameter: 3150 . dm - The DM 3151 3152 Output Parameter: 3153 . sf - The PetscSF 3154 3155 Level: intermediate 3156 3157 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3158 3159 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3160 @*/ 3161 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) 3162 { 3163 PetscFunctionBegin; 3164 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3165 PetscValidPointer(sf, 2); 3166 *sf = dm->sf; 3167 PetscFunctionReturn(0); 3168 } 3169 3170 #undef __FUNCT__ 3171 #define __FUNCT__ "DMSetPointSF" 3172 /*@ 3173 DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM. 3174 3175 Input Parameters: 3176 + dm - The DM 3177 - sf - The PetscSF 3178 3179 Level: intermediate 3180 3181 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3182 @*/ 3183 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) 3184 { 3185 PetscErrorCode ierr; 3186 3187 PetscFunctionBegin; 3188 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3189 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1); 3190 ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr); 3191 ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr); 3192 dm->sf = sf; 3193 PetscFunctionReturn(0); 3194 } 3195 3196 #undef __FUNCT__ 3197 #define __FUNCT__ "DMGetNumFields" 3198 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields) 3199 { 3200 PetscFunctionBegin; 3201 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3202 PetscValidPointer(numFields, 2); 3203 *numFields = dm->numFields; 3204 PetscFunctionReturn(0); 3205 } 3206 3207 #undef __FUNCT__ 3208 #define __FUNCT__ "DMSetNumFields" 3209 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields) 3210 { 3211 PetscInt f; 3212 PetscErrorCode ierr; 3213 3214 PetscFunctionBegin; 3215 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3216 if (dm->numFields == numFields) PetscFunctionReturn(0); 3217 for (f = 0; f < dm->numFields; ++f) { 3218 ierr = PetscObjectDestroy((PetscObject *) &dm->fields[f]);CHKERRQ(ierr); 3219 } 3220 ierr = PetscFree(dm->fields);CHKERRQ(ierr); 3221 dm->numFields = numFields; 3222 ierr = PetscMalloc1(dm->numFields, &dm->fields);CHKERRQ(ierr); 3223 for (f = 0; f < dm->numFields; ++f) { 3224 ierr = PetscContainerCreate(PetscObjectComm((PetscObject) dm), (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr); 3225 } 3226 PetscFunctionReturn(0); 3227 } 3228 3229 #undef __FUNCT__ 3230 #define __FUNCT__ "DMGetField" 3231 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field) 3232 { 3233 PetscFunctionBegin; 3234 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3235 PetscValidPointer(field, 3); 3236 if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3237 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3238 *field = (PetscObject) dm->fields[f]; 3239 PetscFunctionReturn(0); 3240 } 3241 3242 #undef __FUNCT__ 3243 #define __FUNCT__ "DMSetField" 3244 PetscErrorCode DMSetField(DM dm, PetscInt f, PetscObject field) 3245 { 3246 PetscErrorCode ierr; 3247 3248 PetscFunctionBegin; 3249 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3250 if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3251 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3252 if (((PetscObject) dm->fields[f]) == field) PetscFunctionReturn(0); 3253 ierr = PetscObjectDestroy((PetscObject *) &dm->fields[f]);CHKERRQ(ierr); 3254 dm->fields[f] = (PetscFE) field; 3255 ierr = PetscObjectReference((PetscObject) dm->fields[f]);CHKERRQ(ierr); 3256 PetscFunctionReturn(0); 3257 } 3258 3259 #undef __FUNCT__ 3260 #define __FUNCT__ "DMRestrictHook_Coordinates" 3261 PetscErrorCode DMRestrictHook_Coordinates(DM dm,DM dmc,void *ctx) 3262 { 3263 DM dm_coord,dmc_coord; 3264 PetscErrorCode ierr; 3265 Vec coords,ccoords; 3266 VecScatter scat; 3267 PetscFunctionBegin; 3268 ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr); 3269 ierr = DMGetCoordinateDM(dmc,&dmc_coord);CHKERRQ(ierr); 3270 ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr); 3271 ierr = DMGetCoordinates(dmc,&ccoords);CHKERRQ(ierr); 3272 if (coords && !ccoords) { 3273 ierr = DMCreateGlobalVector(dmc_coord,&ccoords);CHKERRQ(ierr); 3274 ierr = DMCreateInjection(dmc_coord,dm_coord,&scat);CHKERRQ(ierr); 3275 ierr = VecScatterBegin(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3276 ierr = VecScatterEnd(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3277 ierr = DMSetCoordinates(dmc,ccoords);CHKERRQ(ierr); 3278 ierr = VecScatterDestroy(&scat);CHKERRQ(ierr); 3279 ierr = VecDestroy(&ccoords);CHKERRQ(ierr); 3280 } 3281 PetscFunctionReturn(0); 3282 } 3283 3284 #undef __FUNCT__ 3285 #define __FUNCT__ "DMSubDomainHook_Coordinates" 3286 static PetscErrorCode DMSubDomainHook_Coordinates(DM dm,DM subdm,void *ctx) 3287 { 3288 DM dm_coord,subdm_coord; 3289 PetscErrorCode ierr; 3290 Vec coords,ccoords,clcoords; 3291 VecScatter *scat_i,*scat_g; 3292 PetscFunctionBegin; 3293 ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr); 3294 ierr = DMGetCoordinateDM(subdm,&subdm_coord);CHKERRQ(ierr); 3295 ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr); 3296 ierr = DMGetCoordinates(subdm,&ccoords);CHKERRQ(ierr); 3297 if (coords && !ccoords) { 3298 ierr = DMCreateGlobalVector(subdm_coord,&ccoords);CHKERRQ(ierr); 3299 ierr = DMCreateLocalVector(subdm_coord,&clcoords);CHKERRQ(ierr); 3300 ierr = DMCreateDomainDecompositionScatters(dm_coord,1,&subdm_coord,NULL,&scat_i,&scat_g);CHKERRQ(ierr); 3301 ierr = VecScatterBegin(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3302 ierr = VecScatterBegin(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3303 ierr = VecScatterEnd(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3304 ierr = VecScatterEnd(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3305 ierr = DMSetCoordinates(subdm,ccoords);CHKERRQ(ierr); 3306 ierr = DMSetCoordinatesLocal(subdm,clcoords);CHKERRQ(ierr); 3307 ierr = VecScatterDestroy(&scat_i[0]);CHKERRQ(ierr); 3308 ierr = VecScatterDestroy(&scat_g[0]);CHKERRQ(ierr); 3309 ierr = VecDestroy(&ccoords);CHKERRQ(ierr); 3310 ierr = VecDestroy(&clcoords);CHKERRQ(ierr); 3311 ierr = PetscFree(scat_i);CHKERRQ(ierr); 3312 ierr = PetscFree(scat_g);CHKERRQ(ierr); 3313 } 3314 PetscFunctionReturn(0); 3315 } 3316 3317 #undef __FUNCT__ 3318 #define __FUNCT__ "DMSetCoordinates" 3319 /*@ 3320 DMSetCoordinates - Sets into the DM a global vector that holds the coordinates 3321 3322 Collective on DM 3323 3324 Input Parameters: 3325 + dm - the DM 3326 - c - coordinate vector 3327 3328 Note: 3329 The coordinates do include those for ghost points, which are in the local vector 3330 3331 Level: intermediate 3332 3333 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3334 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM() 3335 @*/ 3336 PetscErrorCode DMSetCoordinates(DM dm, Vec c) 3337 { 3338 PetscErrorCode ierr; 3339 3340 PetscFunctionBegin; 3341 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3342 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3343 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3344 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3345 dm->coordinates = c; 3346 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3347 ierr = DMCoarsenHookAdd(dm,DMRestrictHook_Coordinates,NULL,NULL);CHKERRQ(ierr); 3348 ierr = DMSubDomainHookAdd(dm,DMSubDomainHook_Coordinates,NULL,NULL);CHKERRQ(ierr); 3349 PetscFunctionReturn(0); 3350 } 3351 3352 #undef __FUNCT__ 3353 #define __FUNCT__ "DMSetCoordinatesLocal" 3354 /*@ 3355 DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates 3356 3357 Collective on DM 3358 3359 Input Parameters: 3360 + dm - the DM 3361 - c - coordinate vector 3362 3363 Note: 3364 The coordinates of ghost points can be set using DMSetCoordinates() 3365 followed by DMGetCoordinatesLocal(). This is intended to enable the 3366 setting of ghost coordinates outside of the domain. 3367 3368 Level: intermediate 3369 3370 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3371 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM() 3372 @*/ 3373 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c) 3374 { 3375 PetscErrorCode ierr; 3376 3377 PetscFunctionBegin; 3378 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3379 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3380 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3381 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3382 3383 dm->coordinatesLocal = c; 3384 3385 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3386 PetscFunctionReturn(0); 3387 } 3388 3389 #undef __FUNCT__ 3390 #define __FUNCT__ "DMGetCoordinates" 3391 /*@ 3392 DMGetCoordinates - Gets a global vector with the coordinates associated with the DM. 3393 3394 Not Collective 3395 3396 Input Parameter: 3397 . dm - the DM 3398 3399 Output Parameter: 3400 . c - global coordinate vector 3401 3402 Note: 3403 This is a borrowed reference, so the user should NOT destroy this vector 3404 3405 Each process has only the local coordinates (does NOT have the ghost coordinates). 3406 3407 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3408 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3409 3410 Level: intermediate 3411 3412 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3413 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM() 3414 @*/ 3415 PetscErrorCode DMGetCoordinates(DM dm, Vec *c) 3416 { 3417 PetscErrorCode ierr; 3418 3419 PetscFunctionBegin; 3420 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3421 PetscValidPointer(c,2); 3422 if (!dm->coordinates && dm->coordinatesLocal) { 3423 DM cdm = NULL; 3424 3425 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3426 ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr); 3427 ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr); 3428 ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3429 ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3430 } 3431 *c = dm->coordinates; 3432 PetscFunctionReturn(0); 3433 } 3434 3435 #undef __FUNCT__ 3436 #define __FUNCT__ "DMGetCoordinatesLocal" 3437 /*@ 3438 DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM. 3439 3440 Collective on DM 3441 3442 Input Parameter: 3443 . dm - the DM 3444 3445 Output Parameter: 3446 . c - coordinate vector 3447 3448 Note: 3449 This is a borrowed reference, so the user should NOT destroy this vector 3450 3451 Each process has the local and ghost coordinates 3452 3453 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3454 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3455 3456 Level: intermediate 3457 3458 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3459 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM() 3460 @*/ 3461 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c) 3462 { 3463 PetscErrorCode ierr; 3464 3465 PetscFunctionBegin; 3466 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3467 PetscValidPointer(c,2); 3468 if (!dm->coordinatesLocal && dm->coordinates) { 3469 DM cdm = NULL; 3470 3471 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3472 ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr); 3473 ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr); 3474 ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3475 ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3476 } 3477 *c = dm->coordinatesLocal; 3478 PetscFunctionReturn(0); 3479 } 3480 3481 #undef __FUNCT__ 3482 #define __FUNCT__ "DMGetCoordinateDM" 3483 /*@ 3484 DMGetCoordinateDM - Gets the DM that prescribes coordinate layout and scatters between global and local coordinates 3485 3486 Collective on DM 3487 3488 Input Parameter: 3489 . dm - the DM 3490 3491 Output Parameter: 3492 . cdm - coordinate DM 3493 3494 Level: intermediate 3495 3496 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3497 .seealso: DMSetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3498 @*/ 3499 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm) 3500 { 3501 PetscErrorCode ierr; 3502 3503 PetscFunctionBegin; 3504 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3505 PetscValidPointer(cdm,2); 3506 if (!dm->coordinateDM) { 3507 if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM"); 3508 ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr); 3509 } 3510 *cdm = dm->coordinateDM; 3511 PetscFunctionReturn(0); 3512 } 3513 3514 #undef __FUNCT__ 3515 #define __FUNCT__ "DMSetCoordinateDM" 3516 /*@ 3517 DMSetCoordinateDM - Sets the DM that prescribes coordinate layout and scatters between global and local coordinates 3518 3519 Logically Collective on DM 3520 3521 Input Parameters: 3522 + dm - the DM 3523 - cdm - coordinate DM 3524 3525 Level: intermediate 3526 3527 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3528 .seealso: DMGetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3529 @*/ 3530 PetscErrorCode DMSetCoordinateDM(DM dm, DM cdm) 3531 { 3532 PetscErrorCode ierr; 3533 3534 PetscFunctionBegin; 3535 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3536 PetscValidHeaderSpecific(cdm,DM_CLASSID,2); 3537 ierr = DMDestroy(&dm->coordinateDM);CHKERRQ(ierr); 3538 dm->coordinateDM = cdm; 3539 ierr = PetscObjectReference((PetscObject) dm->coordinateDM);CHKERRQ(ierr); 3540 PetscFunctionReturn(0); 3541 } 3542 3543 #undef __FUNCT__ 3544 #define __FUNCT__ "DMGetCoordinateSection" 3545 /*@ 3546 DMGetCoordinateSection - Retrieve the layout of coordinate values over the mesh. 3547 3548 Not Collective 3549 3550 Input Parameter: 3551 . dm - The DM object 3552 3553 Output Parameter: 3554 . section - The PetscSection object 3555 3556 Level: intermediate 3557 3558 .keywords: mesh, coordinates 3559 .seealso: DMGetCoordinateDM(), DMGetDefaultSection(), DMSetDefaultSection() 3560 @*/ 3561 PetscErrorCode DMGetCoordinateSection(DM dm, PetscSection *section) 3562 { 3563 DM cdm; 3564 PetscErrorCode ierr; 3565 3566 PetscFunctionBegin; 3567 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3568 PetscValidPointer(section, 2); 3569 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3570 ierr = DMGetDefaultSection(cdm, section);CHKERRQ(ierr); 3571 PetscFunctionReturn(0); 3572 } 3573 3574 #undef __FUNCT__ 3575 #define __FUNCT__ "DMSetCoordinateSection" 3576 /*@ 3577 DMSetCoordinateSection - Set the layout of coordinate values over the mesh. 3578 3579 Not Collective 3580 3581 Input Parameters: 3582 + dm - The DM object 3583 - section - The PetscSection object 3584 3585 Level: intermediate 3586 3587 .keywords: mesh, coordinates 3588 .seealso: DMGetCoordinateSection(), DMGetDefaultSection(), DMSetDefaultSection() 3589 @*/ 3590 PetscErrorCode DMSetCoordinateSection(DM dm, PetscSection section) 3591 { 3592 DM cdm; 3593 PetscErrorCode ierr; 3594 3595 PetscFunctionBegin; 3596 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3597 PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 3598 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3599 ierr = DMSetDefaultSection(cdm, section);CHKERRQ(ierr); 3600 PetscFunctionReturn(0); 3601 } 3602 3603 #undef __FUNCT__ 3604 #define __FUNCT__ "DMLocatePoints" 3605 /*@ 3606 DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells 3607 3608 Not collective 3609 3610 Input Parameters: 3611 + dm - The DM 3612 - v - The Vec of points 3613 3614 Output Parameter: 3615 . cells - The local cell numbers for cells which contain the points 3616 3617 Level: developer 3618 3619 .keywords: point location, mesh 3620 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3621 @*/ 3622 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells) 3623 { 3624 PetscErrorCode ierr; 3625 3626 PetscFunctionBegin; 3627 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3628 PetscValidHeaderSpecific(v,VEC_CLASSID,2); 3629 PetscValidPointer(cells,3); 3630 if (dm->ops->locatepoints) { 3631 ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr); 3632 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM"); 3633 PetscFunctionReturn(0); 3634 } 3635