1 #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ 2 #include <petscsf.h> 3 4 PetscClassId DM_CLASSID; 5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal, DM_LocalToLocal; 6 7 const char *const DMBoundaryTypes[] = {"NONE","GHOSTED","MIRROR","PERIODIC","TWIST","DM_BOUNDARY_",0}; 8 9 #undef __FUNCT__ 10 #define __FUNCT__ "DMCreate" 11 /*@ 12 DMCreate - Creates an empty DM object. The type can then be set with DMSetType(). 13 14 If you never call DMSetType() it will generate an 15 error when you try to use the vector. 16 17 Collective on MPI_Comm 18 19 Input Parameter: 20 . comm - The communicator for the DM object 21 22 Output Parameter: 23 . dm - The DM object 24 25 Level: beginner 26 27 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE 28 @*/ 29 PetscErrorCode DMCreate(MPI_Comm comm,DM *dm) 30 { 31 DM v; 32 PetscErrorCode ierr; 33 34 PetscFunctionBegin; 35 PetscValidPointer(dm,2); 36 *dm = NULL; 37 ierr = PetscSysInitializePackage();CHKERRQ(ierr); 38 ierr = VecInitializePackage();CHKERRQ(ierr); 39 ierr = MatInitializePackage();CHKERRQ(ierr); 40 ierr = DMInitializePackage();CHKERRQ(ierr); 41 42 ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr); 43 ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr); 44 45 46 v->ltogmap = NULL; 47 v->ltogmapb = NULL; 48 v->bs = 1; 49 v->coloringtype = IS_COLORING_GLOBAL; 50 ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr); 51 ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr); 52 v->defaultSection = NULL; 53 v->defaultGlobalSection = NULL; 54 { 55 PetscInt i; 56 for (i = 0; i < 10; ++i) { 57 v->nullspaceConstructors[i] = NULL; 58 } 59 } 60 v->numFields = 0; 61 v->fields = NULL; 62 v->dmBC = NULL; 63 v->outputSequenceNum = -1; 64 ierr = DMSetVecType(v,VECSTANDARD);CHKERRQ(ierr); 65 ierr = DMSetMatType(v,MATAIJ);CHKERRQ(ierr); 66 *dm = v; 67 PetscFunctionReturn(0); 68 } 69 70 #undef __FUNCT__ 71 #define __FUNCT__ "DMClone" 72 /*@ 73 DMClone - Creates a DM object with the same topology as the original. 74 75 Collective on MPI_Comm 76 77 Input Parameter: 78 . dm - The original DM object 79 80 Output Parameter: 81 . newdm - The new DM object 82 83 Level: beginner 84 85 .keywords: DM, topology, create 86 @*/ 87 PetscErrorCode DMClone(DM dm, DM *newdm) 88 { 89 PetscSF sf; 90 Vec coords; 91 void *ctx; 92 PetscErrorCode ierr; 93 94 PetscFunctionBegin; 95 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 96 PetscValidPointer(newdm,2); 97 ierr = DMCreate(PetscObjectComm((PetscObject)dm), newdm);CHKERRQ(ierr); 98 if (dm->ops->clone) { 99 ierr = (*dm->ops->clone)(dm, newdm);CHKERRQ(ierr); 100 } 101 (*newdm)->setupcalled = PETSC_TRUE; 102 ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr); 103 ierr = DMSetPointSF(*newdm, sf);CHKERRQ(ierr); 104 ierr = DMGetApplicationContext(dm, &ctx);CHKERRQ(ierr); 105 ierr = DMSetApplicationContext(*newdm, ctx);CHKERRQ(ierr); 106 ierr = DMGetCoordinatesLocal(dm, &coords);CHKERRQ(ierr); 107 if (coords) { 108 ierr = DMSetCoordinatesLocal(*newdm, coords);CHKERRQ(ierr); 109 } else { 110 ierr = DMGetCoordinates(dm, &coords);CHKERRQ(ierr); 111 if (coords) {ierr = DMSetCoordinates(*newdm, coords);CHKERRQ(ierr);} 112 } 113 PetscFunctionReturn(0); 114 } 115 116 #undef __FUNCT__ 117 #define __FUNCT__ "DMSetVecType" 118 /*@C 119 DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 120 121 Logically Collective on DMDA 122 123 Input Parameter: 124 + da - initial distributed array 125 . ctype - the vector type, currently either VECSTANDARD or VECCUSP 126 127 Options Database: 128 . -dm_vec_type ctype 129 130 Level: intermediate 131 132 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType, DMGetVecType() 133 @*/ 134 PetscErrorCode DMSetVecType(DM da,VecType ctype) 135 { 136 PetscErrorCode ierr; 137 138 PetscFunctionBegin; 139 PetscValidHeaderSpecific(da,DM_CLASSID,1); 140 ierr = PetscFree(da->vectype);CHKERRQ(ierr); 141 ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr); 142 PetscFunctionReturn(0); 143 } 144 145 #undef __FUNCT__ 146 #define __FUNCT__ "DMGetVecType" 147 /*@C 148 DMGetVecType - Gets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 149 150 Logically Collective on DMDA 151 152 Input Parameter: 153 . da - initial distributed array 154 155 Output Parameter: 156 . ctype - the vector type 157 158 Level: intermediate 159 160 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType 161 @*/ 162 PetscErrorCode DMGetVecType(DM da,VecType *ctype) 163 { 164 PetscFunctionBegin; 165 PetscValidHeaderSpecific(da,DM_CLASSID,1); 166 *ctype = da->vectype; 167 PetscFunctionReturn(0); 168 } 169 170 #undef __FUNCT__ 171 #define __FUNCT__ "VecGetDM" 172 /*@ 173 VecGetDM - Gets the DM defining the data layout of the vector 174 175 Not collective 176 177 Input Parameter: 178 . v - The Vec 179 180 Output Parameter: 181 . dm - The DM 182 183 Level: intermediate 184 185 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 186 @*/ 187 PetscErrorCode VecGetDM(Vec v, DM *dm) 188 { 189 PetscErrorCode ierr; 190 191 PetscFunctionBegin; 192 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 193 PetscValidPointer(dm,2); 194 ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 195 PetscFunctionReturn(0); 196 } 197 198 #undef __FUNCT__ 199 #define __FUNCT__ "VecSetDM" 200 /*@ 201 VecSetDM - Sets the DM defining the data layout of the vector 202 203 Not collective 204 205 Input Parameters: 206 + v - The Vec 207 - dm - The DM 208 209 Level: intermediate 210 211 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 212 @*/ 213 PetscErrorCode VecSetDM(Vec v, DM dm) 214 { 215 PetscErrorCode ierr; 216 217 PetscFunctionBegin; 218 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 219 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 220 ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 221 PetscFunctionReturn(0); 222 } 223 224 #undef __FUNCT__ 225 #define __FUNCT__ "DMSetMatType" 226 /*@C 227 DMSetMatType - Sets the type of matrix created with DMCreateMatrix() 228 229 Logically Collective on DM 230 231 Input Parameter: 232 + dm - the DM context 233 . ctype - the matrix type 234 235 Options Database: 236 . -dm_mat_type ctype 237 238 Level: intermediate 239 240 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMGetMatType() 241 @*/ 242 PetscErrorCode DMSetMatType(DM dm,MatType ctype) 243 { 244 PetscErrorCode ierr; 245 246 PetscFunctionBegin; 247 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 248 ierr = PetscFree(dm->mattype);CHKERRQ(ierr); 249 ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr); 250 PetscFunctionReturn(0); 251 } 252 253 #undef __FUNCT__ 254 #define __FUNCT__ "DMGetMatType" 255 /*@C 256 DMGetMatType - Gets the type of matrix created with DMCreateMatrix() 257 258 Logically Collective on DM 259 260 Input Parameter: 261 . dm - the DM context 262 263 Output Parameter: 264 . ctype - the matrix type 265 266 Options Database: 267 . -dm_mat_type ctype 268 269 Level: intermediate 270 271 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMSetMatType() 272 @*/ 273 PetscErrorCode DMGetMatType(DM dm,MatType *ctype) 274 { 275 PetscFunctionBegin; 276 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 277 *ctype = dm->mattype; 278 PetscFunctionReturn(0); 279 } 280 281 #undef __FUNCT__ 282 #define __FUNCT__ "MatGetDM" 283 /*@ 284 MatGetDM - Gets the DM defining the data layout of the matrix 285 286 Not collective 287 288 Input Parameter: 289 . A - The Mat 290 291 Output Parameter: 292 . dm - The DM 293 294 Level: intermediate 295 296 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType() 297 @*/ 298 PetscErrorCode MatGetDM(Mat A, DM *dm) 299 { 300 PetscErrorCode ierr; 301 302 PetscFunctionBegin; 303 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 304 PetscValidPointer(dm,2); 305 ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 306 PetscFunctionReturn(0); 307 } 308 309 #undef __FUNCT__ 310 #define __FUNCT__ "MatSetDM" 311 /*@ 312 MatSetDM - Sets the DM defining the data layout of the matrix 313 314 Not collective 315 316 Input Parameters: 317 + A - The Mat 318 - dm - The DM 319 320 Level: intermediate 321 322 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType() 323 @*/ 324 PetscErrorCode MatSetDM(Mat A, DM dm) 325 { 326 PetscErrorCode ierr; 327 328 PetscFunctionBegin; 329 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 330 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 331 ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 332 PetscFunctionReturn(0); 333 } 334 335 #undef __FUNCT__ 336 #define __FUNCT__ "DMSetOptionsPrefix" 337 /*@C 338 DMSetOptionsPrefix - Sets the prefix used for searching for all 339 DMDA options in the database. 340 341 Logically Collective on DMDA 342 343 Input Parameter: 344 + da - the DMDA context 345 - prefix - the prefix to prepend to all option names 346 347 Notes: 348 A hyphen (-) must NOT be given at the beginning of the prefix name. 349 The first character of all runtime options is AUTOMATICALLY the hyphen. 350 351 Level: advanced 352 353 .keywords: DMDA, set, options, prefix, database 354 355 .seealso: DMSetFromOptions() 356 @*/ 357 PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[]) 358 { 359 PetscErrorCode ierr; 360 361 PetscFunctionBegin; 362 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 363 ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr); 364 PetscFunctionReturn(0); 365 } 366 367 #undef __FUNCT__ 368 #define __FUNCT__ "DMDestroy" 369 /*@ 370 DMDestroy - Destroys a vector packer or DMDA. 371 372 Collective on DM 373 374 Input Parameter: 375 . dm - the DM object to destroy 376 377 Level: developer 378 379 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 380 381 @*/ 382 PetscErrorCode DMDestroy(DM *dm) 383 { 384 PetscInt i, cnt = 0, f; 385 DMNamedVecLink nlink,nnext; 386 PetscErrorCode ierr; 387 388 PetscFunctionBegin; 389 if (!*dm) PetscFunctionReturn(0); 390 PetscValidHeaderSpecific((*dm),DM_CLASSID,1); 391 392 /* I think it makes sense to dump all attached things when you are destroyed, which also eliminates circular references */ 393 for (f = 0; f < (*dm)->numFields; ++f) { 394 ierr = PetscObjectCompose((PetscObject) (*dm)->fields[f], "pmat", NULL);CHKERRQ(ierr); 395 ierr = PetscObjectCompose((PetscObject) (*dm)->fields[f], "nullspace", NULL);CHKERRQ(ierr); 396 ierr = PetscObjectCompose((PetscObject) (*dm)->fields[f], "nearnullspace", NULL);CHKERRQ(ierr); 397 } 398 /* count all the circular references of DM and its contained Vecs */ 399 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 400 if ((*dm)->localin[i]) cnt++; 401 if ((*dm)->globalin[i]) cnt++; 402 } 403 for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++; 404 for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++; 405 if ((*dm)->x) { 406 DM obj; 407 ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr); 408 if (obj == *dm) cnt++; 409 } 410 411 if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);} 412 /* 413 Need this test because the dm references the vectors that 414 reference the dm, so destroying the dm calls destroy on the 415 vectors that cause another destroy on the dm 416 */ 417 if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0); 418 ((PetscObject) (*dm))->refct = 0; 419 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 420 if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()"); 421 ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr); 422 } 423 nnext=(*dm)->namedglobal; 424 (*dm)->namedglobal = NULL; 425 for (nlink=nnext; nlink; nlink=nnext) { /* Destroy the named vectors */ 426 nnext = nlink->next; 427 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 428 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 429 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 430 ierr = PetscFree(nlink);CHKERRQ(ierr); 431 } 432 nnext=(*dm)->namedlocal; 433 (*dm)->namedlocal = NULL; 434 for (nlink=nnext; nlink; nlink=nnext) { /* Destroy the named local vectors */ 435 nnext = nlink->next; 436 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 437 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 438 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 439 ierr = PetscFree(nlink);CHKERRQ(ierr); 440 } 441 442 /* Destroy the list of hooks */ 443 { 444 DMCoarsenHookLink link,next; 445 for (link=(*dm)->coarsenhook; link; link=next) { 446 next = link->next; 447 ierr = PetscFree(link);CHKERRQ(ierr); 448 } 449 (*dm)->coarsenhook = NULL; 450 } 451 { 452 DMRefineHookLink link,next; 453 for (link=(*dm)->refinehook; link; link=next) { 454 next = link->next; 455 ierr = PetscFree(link);CHKERRQ(ierr); 456 } 457 (*dm)->refinehook = NULL; 458 } 459 { 460 DMSubDomainHookLink link,next; 461 for (link=(*dm)->subdomainhook; link; link=next) { 462 next = link->next; 463 ierr = PetscFree(link);CHKERRQ(ierr); 464 } 465 (*dm)->subdomainhook = NULL; 466 } 467 { 468 DMGlobalToLocalHookLink link,next; 469 for (link=(*dm)->gtolhook; link; link=next) { 470 next = link->next; 471 ierr = PetscFree(link);CHKERRQ(ierr); 472 } 473 (*dm)->gtolhook = NULL; 474 } 475 /* Destroy the work arrays */ 476 { 477 DMWorkLink link,next; 478 if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out"); 479 for (link=(*dm)->workin; link; link=next) { 480 next = link->next; 481 ierr = PetscFree(link->mem);CHKERRQ(ierr); 482 ierr = PetscFree(link);CHKERRQ(ierr); 483 } 484 (*dm)->workin = NULL; 485 } 486 487 ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr); 488 ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr); 489 ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr); 490 491 if ((*dm)->ctx && (*dm)->ctxdestroy) { 492 ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr); 493 } 494 ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr); 495 ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr); 496 ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr); 497 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr); 498 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr); 499 ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr); 500 ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr); 501 502 ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr); 503 ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr); 504 ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr); 505 ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr); 506 ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr); 507 508 ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr); 509 ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr); 510 ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr); 511 512 for (f = 0; f < (*dm)->numFields; ++f) { 513 ierr = PetscObjectDestroy((PetscObject *) &(*dm)->fields[f]);CHKERRQ(ierr); 514 } 515 ierr = PetscFree((*dm)->fields);CHKERRQ(ierr); 516 ierr = DMDestroy(&(*dm)->dmBC);CHKERRQ(ierr); 517 /* if memory was published with SAWs then destroy it */ 518 ierr = PetscObjectSAWsViewOff((PetscObject)*dm);CHKERRQ(ierr); 519 520 ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr); 521 /* We do not destroy (*dm)->data here so that we can reference count backend objects */ 522 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 523 PetscFunctionReturn(0); 524 } 525 526 #undef __FUNCT__ 527 #define __FUNCT__ "DMSetUp" 528 /*@ 529 DMSetUp - sets up the data structures inside a DM object 530 531 Collective on DM 532 533 Input Parameter: 534 . dm - the DM object to setup 535 536 Level: developer 537 538 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 539 540 @*/ 541 PetscErrorCode DMSetUp(DM dm) 542 { 543 PetscErrorCode ierr; 544 545 PetscFunctionBegin; 546 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 547 if (dm->setupcalled) PetscFunctionReturn(0); 548 if (dm->ops->setup) { 549 ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr); 550 } 551 dm->setupcalled = PETSC_TRUE; 552 PetscFunctionReturn(0); 553 } 554 555 #undef __FUNCT__ 556 #define __FUNCT__ "DMSetFromOptions" 557 /*@ 558 DMSetFromOptions - sets parameters in a DM from the options database 559 560 Collective on DM 561 562 Input Parameter: 563 . dm - the DM object to set options for 564 565 Options Database: 566 + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros 567 . -dm_vec_type <type> type of vector to create inside DM 568 . -dm_mat_type <type> type of matrix to create inside DM 569 - -dm_coloring_type <global or ghosted> 570 571 Level: developer 572 573 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 574 575 @*/ 576 PetscErrorCode DMSetFromOptions(DM dm) 577 { 578 char typeName[256]; 579 PetscBool flg; 580 PetscErrorCode ierr; 581 582 PetscFunctionBegin; 583 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 584 ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr); 585 ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr); 586 ierr = PetscOptionsFList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr); 587 if (flg) { 588 ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr); 589 } 590 ierr = PetscOptionsFList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr); 591 if (flg) { 592 ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr); 593 } 594 ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr); 595 if (dm->ops->setfromoptions) { 596 ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr); 597 } 598 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 599 ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr); 600 ierr = PetscOptionsEnd();CHKERRQ(ierr); 601 PetscFunctionReturn(0); 602 } 603 604 #undef __FUNCT__ 605 #define __FUNCT__ "DMView" 606 /*@C 607 DMView - Views a vector packer or DMDA. 608 609 Collective on DM 610 611 Input Parameter: 612 + dm - the DM object to view 613 - v - the viewer 614 615 Level: developer 616 617 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 618 619 @*/ 620 PetscErrorCode DMView(DM dm,PetscViewer v) 621 { 622 PetscErrorCode ierr; 623 PetscBool isbinary; 624 625 PetscFunctionBegin; 626 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 627 if (!v) { 628 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr); 629 } 630 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)dm,v);CHKERRQ(ierr); 631 ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 632 if (isbinary) { 633 PetscInt classid = DM_FILE_CLASSID; 634 char type[256]; 635 636 ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr); 637 ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr); 638 ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr); 639 } 640 if (dm->ops->view) { 641 ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr); 642 } 643 PetscFunctionReturn(0); 644 } 645 646 #undef __FUNCT__ 647 #define __FUNCT__ "DMCreateGlobalVector" 648 /*@ 649 DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object 650 651 Collective on DM 652 653 Input Parameter: 654 . dm - the DM object 655 656 Output Parameter: 657 . vec - the global vector 658 659 Level: beginner 660 661 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 662 663 @*/ 664 PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec) 665 { 666 PetscErrorCode ierr; 667 668 PetscFunctionBegin; 669 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 670 ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr); 671 PetscFunctionReturn(0); 672 } 673 674 #undef __FUNCT__ 675 #define __FUNCT__ "DMCreateLocalVector" 676 /*@ 677 DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object 678 679 Not Collective 680 681 Input Parameter: 682 . dm - the DM object 683 684 Output Parameter: 685 . vec - the local vector 686 687 Level: beginner 688 689 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 690 691 @*/ 692 PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec) 693 { 694 PetscErrorCode ierr; 695 696 PetscFunctionBegin; 697 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 698 ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr); 699 PetscFunctionReturn(0); 700 } 701 702 #undef __FUNCT__ 703 #define __FUNCT__ "DMGetLocalToGlobalMapping" 704 /*@ 705 DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM. 706 707 Collective on DM 708 709 Input Parameter: 710 . dm - the DM that provides the mapping 711 712 Output Parameter: 713 . ltog - the mapping 714 715 Level: intermediate 716 717 Notes: 718 This mapping can then be used by VecSetLocalToGlobalMapping() or 719 MatSetLocalToGlobalMapping(). 720 721 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock() 722 @*/ 723 PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog) 724 { 725 PetscErrorCode ierr; 726 727 PetscFunctionBegin; 728 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 729 PetscValidPointer(ltog,2); 730 if (!dm->ltogmap) { 731 PetscSection section, sectionGlobal; 732 733 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 734 if (section) { 735 PetscInt *ltog; 736 PetscInt pStart, pEnd, size, p, l; 737 738 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 739 ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr); 740 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 741 ierr = PetscMalloc1(size, <og);CHKERRQ(ierr); /* We want the local+overlap size */ 742 for (p = pStart, l = 0; p < pEnd; ++p) { 743 PetscInt dof, off, c; 744 745 /* Should probably use constrained dofs */ 746 ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr); 747 ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr); 748 for (c = 0; c < dof; ++c, ++l) { 749 ltog[l] = off+c; 750 } 751 } 752 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr); 753 ierr = PetscLogObjectParent((PetscObject)dm, (PetscObject)dm->ltogmap);CHKERRQ(ierr); 754 } else { 755 if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping"); 756 ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr); 757 } 758 } 759 *ltog = dm->ltogmap; 760 PetscFunctionReturn(0); 761 } 762 763 #undef __FUNCT__ 764 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock" 765 /*@ 766 DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM. 767 768 Collective on DM 769 770 Input Parameter: 771 . da - the distributed array that provides the mapping 772 773 Output Parameter: 774 . ltog - the block mapping 775 776 Level: intermediate 777 778 Notes: 779 This mapping can then be used by VecSetLocalToGlobalMappingBlock() or 780 MatSetLocalToGlobalMappingBlock(). 781 782 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize() 783 @*/ 784 PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog) 785 { 786 PetscErrorCode ierr; 787 788 PetscFunctionBegin; 789 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 790 PetscValidPointer(ltog,2); 791 if (!dm->ltogmapb) { 792 PetscInt bs; 793 ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr); 794 if (bs > 1) { 795 if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock"); 796 ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr); 797 } else { 798 ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr); 799 ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr); 800 } 801 } 802 *ltog = dm->ltogmapb; 803 PetscFunctionReturn(0); 804 } 805 806 #undef __FUNCT__ 807 #define __FUNCT__ "DMGetBlockSize" 808 /*@ 809 DMGetBlockSize - Gets the inherent block size associated with a DM 810 811 Not Collective 812 813 Input Parameter: 814 . dm - the DM with block structure 815 816 Output Parameter: 817 . bs - the block size, 1 implies no exploitable block structure 818 819 Level: intermediate 820 821 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock() 822 @*/ 823 PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs) 824 { 825 PetscFunctionBegin; 826 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 827 PetscValidPointer(bs,2); 828 if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet"); 829 *bs = dm->bs; 830 PetscFunctionReturn(0); 831 } 832 833 #undef __FUNCT__ 834 #define __FUNCT__ "DMCreateInterpolation" 835 /*@ 836 DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects 837 838 Collective on DM 839 840 Input Parameter: 841 + dm1 - the DM object 842 - dm2 - the second, finer DM object 843 844 Output Parameter: 845 + mat - the interpolation 846 - vec - the scaling (optional) 847 848 Level: developer 849 850 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 851 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation. 852 853 For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors 854 EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic. 855 856 857 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen() 858 859 @*/ 860 PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 861 { 862 PetscErrorCode ierr; 863 864 PetscFunctionBegin; 865 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 866 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 867 ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr); 868 PetscFunctionReturn(0); 869 } 870 871 #undef __FUNCT__ 872 #define __FUNCT__ "DMCreateInjection" 873 /*@ 874 DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects 875 876 Collective on DM 877 878 Input Parameter: 879 + dm1 - the DM object 880 - dm2 - the second, finer DM object 881 882 Output Parameter: 883 . ctx - the injection 884 885 Level: developer 886 887 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 888 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection. 889 890 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation() 891 892 @*/ 893 PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx) 894 { 895 PetscErrorCode ierr; 896 897 PetscFunctionBegin; 898 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 899 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 900 ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr); 901 PetscFunctionReturn(0); 902 } 903 904 #undef __FUNCT__ 905 #define __FUNCT__ "DMCreateColoring" 906 /*@ 907 DMCreateColoring - Gets coloring for a DM 908 909 Collective on DM 910 911 Input Parameter: 912 + dm - the DM object 913 - ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL 914 915 Output Parameter: 916 . coloring - the coloring 917 918 Level: developer 919 920 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix(), DMSetMatType() 921 922 @*/ 923 PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,ISColoring *coloring) 924 { 925 PetscErrorCode ierr; 926 927 PetscFunctionBegin; 928 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 929 if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet"); 930 ierr = (*dm->ops->getcoloring)(dm,ctype,coloring);CHKERRQ(ierr); 931 PetscFunctionReturn(0); 932 } 933 934 #undef __FUNCT__ 935 #define __FUNCT__ "DMCreateMatrix" 936 /*@ 937 DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite 938 939 Collective on DM 940 941 Input Parameter: 942 . dm - the DM object 943 944 Output Parameter: 945 . mat - the empty Jacobian 946 947 Level: beginner 948 949 Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 950 do not need to do it yourself. 951 952 By default it also sets the nonzero structure and puts in the zero entries. To prevent setting 953 the nonzero pattern call DMDASetMatPreallocateOnly() 954 955 For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used 956 internally by PETSc. 957 958 For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires 959 the indices for the global numbering for DMDAs which is complicated. 960 961 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMSetMatType() 962 963 @*/ 964 PetscErrorCode DMCreateMatrix(DM dm,Mat *mat) 965 { 966 PetscErrorCode ierr; 967 968 PetscFunctionBegin; 969 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 970 ierr = MatInitializePackage();CHKERRQ(ierr); 971 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 972 PetscValidPointer(mat,3); 973 ierr = (*dm->ops->creatematrix)(dm,mat);CHKERRQ(ierr); 974 PetscFunctionReturn(0); 975 } 976 977 #undef __FUNCT__ 978 #define __FUNCT__ "DMSetMatrixPreallocateOnly" 979 /*@ 980 DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly 981 preallocated but the nonzero structure and zero values will not be set. 982 983 Logically Collective on DMDA 984 985 Input Parameter: 986 + dm - the DM 987 - only - PETSC_TRUE if only want preallocation 988 989 Level: developer 990 .seealso DMCreateMatrix() 991 @*/ 992 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only) 993 { 994 PetscFunctionBegin; 995 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 996 dm->prealloc_only = only; 997 PetscFunctionReturn(0); 998 } 999 1000 #undef __FUNCT__ 1001 #define __FUNCT__ "DMGetWorkArray" 1002 /*@C 1003 DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 1004 1005 Not Collective 1006 1007 Input Parameters: 1008 + dm - the DM object 1009 . count - The minium size 1010 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 1011 1012 Output Parameter: 1013 . array - the work array 1014 1015 Level: developer 1016 1017 .seealso DMDestroy(), DMCreate() 1018 @*/ 1019 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1020 { 1021 PetscErrorCode ierr; 1022 DMWorkLink link; 1023 size_t size; 1024 1025 PetscFunctionBegin; 1026 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1027 PetscValidPointer(mem,4); 1028 if (dm->workin) { 1029 link = dm->workin; 1030 dm->workin = dm->workin->next; 1031 } else { 1032 ierr = PetscNewLog(dm,&link);CHKERRQ(ierr); 1033 } 1034 ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr); 1035 if (size*count > link->bytes) { 1036 ierr = PetscFree(link->mem);CHKERRQ(ierr); 1037 ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr); 1038 link->bytes = size*count; 1039 } 1040 link->next = dm->workout; 1041 dm->workout = link; 1042 *(void**)mem = link->mem; 1043 PetscFunctionReturn(0); 1044 } 1045 1046 #undef __FUNCT__ 1047 #define __FUNCT__ "DMRestoreWorkArray" 1048 /*@C 1049 DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 1050 1051 Not Collective 1052 1053 Input Parameters: 1054 + dm - the DM object 1055 . count - The minium size 1056 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 1057 1058 Output Parameter: 1059 . array - the work array 1060 1061 Level: developer 1062 1063 .seealso DMDestroy(), DMCreate() 1064 @*/ 1065 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1066 { 1067 DMWorkLink *p,link; 1068 1069 PetscFunctionBegin; 1070 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1071 PetscValidPointer(mem,4); 1072 for (p=&dm->workout; (link=*p); p=&link->next) { 1073 if (link->mem == *(void**)mem) { 1074 *p = link->next; 1075 link->next = dm->workin; 1076 dm->workin = link; 1077 *(void**)mem = NULL; 1078 PetscFunctionReturn(0); 1079 } 1080 } 1081 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out"); 1082 PetscFunctionReturn(0); 1083 } 1084 1085 #undef __FUNCT__ 1086 #define __FUNCT__ "DMSetNullSpaceConstructor" 1087 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace)) 1088 { 1089 PetscFunctionBegin; 1090 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1091 if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field); 1092 dm->nullspaceConstructors[field] = nullsp; 1093 PetscFunctionReturn(0); 1094 } 1095 1096 #undef __FUNCT__ 1097 #define __FUNCT__ "DMCreateFieldIS" 1098 /*@C 1099 DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field 1100 1101 Not collective 1102 1103 Input Parameter: 1104 . dm - the DM object 1105 1106 Output Parameters: 1107 + numFields - The number of fields (or NULL if not requested) 1108 . fieldNames - The name for each field (or NULL if not requested) 1109 - fields - The global indices for each field (or NULL if not requested) 1110 1111 Level: intermediate 1112 1113 Notes: 1114 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1115 PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with 1116 PetscFree(). 1117 1118 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 1119 @*/ 1120 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields) 1121 { 1122 PetscSection section, sectionGlobal; 1123 PetscErrorCode ierr; 1124 1125 PetscFunctionBegin; 1126 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1127 if (numFields) { 1128 PetscValidPointer(numFields,2); 1129 *numFields = 0; 1130 } 1131 if (fieldNames) { 1132 PetscValidPointer(fieldNames,3); 1133 *fieldNames = NULL; 1134 } 1135 if (fields) { 1136 PetscValidPointer(fields,4); 1137 *fields = NULL; 1138 } 1139 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1140 if (section) { 1141 PetscInt *fieldSizes, **fieldIndices; 1142 PetscInt nF, f, pStart, pEnd, p; 1143 1144 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 1145 ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr); 1146 ierr = PetscMalloc2(nF,&fieldSizes,nF,&fieldIndices);CHKERRQ(ierr); 1147 ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr); 1148 for (f = 0; f < nF; ++f) { 1149 fieldSizes[f] = 0; 1150 } 1151 for (p = pStart; p < pEnd; ++p) { 1152 PetscInt gdof; 1153 1154 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1155 if (gdof > 0) { 1156 for (f = 0; f < nF; ++f) { 1157 PetscInt fdof, fcdof; 1158 1159 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1160 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1161 fieldSizes[f] += fdof-fcdof; 1162 } 1163 } 1164 } 1165 for (f = 0; f < nF; ++f) { 1166 ierr = PetscMalloc1(fieldSizes[f], &fieldIndices[f]);CHKERRQ(ierr); 1167 fieldSizes[f] = 0; 1168 } 1169 for (p = pStart; p < pEnd; ++p) { 1170 PetscInt gdof, goff; 1171 1172 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1173 if (gdof > 0) { 1174 ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr); 1175 for (f = 0; f < nF; ++f) { 1176 PetscInt fdof, fcdof, fc; 1177 1178 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1179 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1180 for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) { 1181 fieldIndices[f][fieldSizes[f]] = goff++; 1182 } 1183 } 1184 } 1185 } 1186 if (numFields) *numFields = nF; 1187 if (fieldNames) { 1188 ierr = PetscMalloc1(nF, fieldNames);CHKERRQ(ierr); 1189 for (f = 0; f < nF; ++f) { 1190 const char *fieldName; 1191 1192 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1193 ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr); 1194 } 1195 } 1196 if (fields) { 1197 ierr = PetscMalloc1(nF, fields);CHKERRQ(ierr); 1198 for (f = 0; f < nF; ++f) { 1199 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr); 1200 } 1201 } 1202 ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr); 1203 } else if (dm->ops->createfieldis) { 1204 ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr); 1205 } 1206 PetscFunctionReturn(0); 1207 } 1208 1209 1210 #undef __FUNCT__ 1211 #define __FUNCT__ "DMCreateFieldDecomposition" 1212 /*@C 1213 DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems 1214 corresponding to different fields: each IS contains the global indices of the dofs of the 1215 corresponding field. The optional list of DMs define the DM for each subproblem. 1216 Generalizes DMCreateFieldIS(). 1217 1218 Not collective 1219 1220 Input Parameter: 1221 . dm - the DM object 1222 1223 Output Parameters: 1224 + len - The number of subproblems in the field decomposition (or NULL if not requested) 1225 . namelist - The name for each field (or NULL if not requested) 1226 . islist - The global indices for each field (or NULL if not requested) 1227 - dmlist - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1228 1229 Level: intermediate 1230 1231 Notes: 1232 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1233 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1234 and all of the arrays should be freed with PetscFree(). 1235 1236 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1237 @*/ 1238 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) 1239 { 1240 PetscErrorCode ierr; 1241 1242 PetscFunctionBegin; 1243 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1244 if (len) { 1245 PetscValidPointer(len,2); 1246 *len = 0; 1247 } 1248 if (namelist) { 1249 PetscValidPointer(namelist,3); 1250 *namelist = 0; 1251 } 1252 if (islist) { 1253 PetscValidPointer(islist,4); 1254 *islist = 0; 1255 } 1256 if (dmlist) { 1257 PetscValidPointer(dmlist,5); 1258 *dmlist = 0; 1259 } 1260 /* 1261 Is it a good idea to apply the following check across all impls? 1262 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1263 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1264 */ 1265 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1266 if (!dm->ops->createfielddecomposition) { 1267 PetscSection section; 1268 PetscInt numFields, f; 1269 1270 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1271 if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);} 1272 if (section && numFields && dm->ops->createsubdm) { 1273 *len = numFields; 1274 if (namelist) {ierr = PetscMalloc1(numFields,namelist);CHKERRQ(ierr);} 1275 if (islist) {ierr = PetscMalloc1(numFields,islist);CHKERRQ(ierr);} 1276 if (dmlist) {ierr = PetscMalloc1(numFields,dmlist);CHKERRQ(ierr);} 1277 for (f = 0; f < numFields; ++f) { 1278 const char *fieldName; 1279 1280 ierr = DMCreateSubDM(dm, 1, &f, islist ? &(*islist)[f] : NULL, dmlist ? &(*dmlist)[f] : NULL);CHKERRQ(ierr); 1281 if (namelist) { 1282 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1283 ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr); 1284 } 1285 } 1286 } else { 1287 ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr); 1288 /* By default there are no DMs associated with subproblems. */ 1289 if (dmlist) *dmlist = NULL; 1290 } 1291 } else { 1292 ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr); 1293 } 1294 PetscFunctionReturn(0); 1295 } 1296 1297 #undef __FUNCT__ 1298 #define __FUNCT__ "DMCreateSubDM" 1299 /*@C 1300 DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in. 1301 The fields are defined by DMCreateFieldIS(). 1302 1303 Not collective 1304 1305 Input Parameters: 1306 + dm - the DM object 1307 . numFields - number of fields in this subproblem 1308 - len - The number of subproblems in the decomposition (or NULL if not requested) 1309 1310 Output Parameters: 1311 . is - The global indices for the subproblem 1312 - dm - The DM for the subproblem 1313 1314 Level: intermediate 1315 1316 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1317 @*/ 1318 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm) 1319 { 1320 PetscErrorCode ierr; 1321 1322 PetscFunctionBegin; 1323 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1324 PetscValidPointer(fields,3); 1325 if (is) PetscValidPointer(is,4); 1326 if (subdm) PetscValidPointer(subdm,5); 1327 if (dm->ops->createsubdm) { 1328 ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr); 1329 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined"); 1330 PetscFunctionReturn(0); 1331 } 1332 1333 1334 #undef __FUNCT__ 1335 #define __FUNCT__ "DMCreateDomainDecomposition" 1336 /*@C 1337 DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems 1338 corresponding to restrictions to pairs nested subdomains: each IS contains the global 1339 indices of the dofs of the corresponding subdomains. The inner subdomains conceptually 1340 define a nonoverlapping covering, while outer subdomains can overlap. 1341 The optional list of DMs define the DM for each subproblem. 1342 1343 Not collective 1344 1345 Input Parameter: 1346 . dm - the DM object 1347 1348 Output Parameters: 1349 + len - The number of subproblems in the domain decomposition (or NULL if not requested) 1350 . namelist - The name for each subdomain (or NULL if not requested) 1351 . innerislist - The global indices for each inner subdomain (or NULL, if not requested) 1352 . outerislist - The global indices for each outer subdomain (or NULL, if not requested) 1353 - dmlist - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1354 1355 Level: intermediate 1356 1357 Notes: 1358 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1359 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1360 and all of the arrays should be freed with PetscFree(). 1361 1362 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition() 1363 @*/ 1364 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) 1365 { 1366 PetscErrorCode ierr; 1367 DMSubDomainHookLink link; 1368 PetscInt i,l; 1369 1370 PetscFunctionBegin; 1371 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1372 if (len) {PetscValidPointer(len,2); *len = 0;} 1373 if (namelist) {PetscValidPointer(namelist,3); *namelist = NULL;} 1374 if (innerislist) {PetscValidPointer(innerislist,4); *innerislist = NULL;} 1375 if (outerislist) {PetscValidPointer(outerislist,5); *outerislist = NULL;} 1376 if (dmlist) {PetscValidPointer(dmlist,6); *dmlist = NULL;} 1377 /* 1378 Is it a good idea to apply the following check across all impls? 1379 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1380 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1381 */ 1382 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1383 if (dm->ops->createdomaindecomposition) { 1384 ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr); 1385 /* copy subdomain hooks and context over to the subdomain DMs */ 1386 if (dmlist) { 1387 for (i = 0; i < l; i++) { 1388 for (link=dm->subdomainhook; link; link=link->next) { 1389 if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);} 1390 } 1391 (*dmlist)[i]->ctx = dm->ctx; 1392 } 1393 } 1394 if (len) *len = l; 1395 } 1396 PetscFunctionReturn(0); 1397 } 1398 1399 1400 #undef __FUNCT__ 1401 #define __FUNCT__ "DMCreateDomainDecompositionScatters" 1402 /*@C 1403 DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector 1404 1405 Not collective 1406 1407 Input Parameters: 1408 + dm - the DM object 1409 . n - the number of subdomain scatters 1410 - subdms - the local subdomains 1411 1412 Output Parameters: 1413 + n - the number of scatters returned 1414 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain 1415 . oscat - scatter from global vector to overlapping global vector entries on subdomain 1416 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts) 1417 1418 Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution 1419 of general nonlinear problems with overlapping subdomain methods. While merely having index sets that enable subsets 1420 of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of 1421 solution and residual data. 1422 1423 Level: developer 1424 1425 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1426 @*/ 1427 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat) 1428 { 1429 PetscErrorCode ierr; 1430 1431 PetscFunctionBegin; 1432 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1433 PetscValidPointer(subdms,3); 1434 if (dm->ops->createddscatters) { 1435 ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr); 1436 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined"); 1437 PetscFunctionReturn(0); 1438 } 1439 1440 #undef __FUNCT__ 1441 #define __FUNCT__ "DMRefine" 1442 /*@ 1443 DMRefine - Refines a DM object 1444 1445 Collective on DM 1446 1447 Input Parameter: 1448 + dm - the DM object 1449 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1450 1451 Output Parameter: 1452 . dmf - the refined DM, or NULL 1453 1454 Note: If no refinement was done, the return value is NULL 1455 1456 Level: developer 1457 1458 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1459 @*/ 1460 PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf) 1461 { 1462 PetscErrorCode ierr; 1463 DMRefineHookLink link; 1464 1465 PetscFunctionBegin; 1466 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1467 ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr); 1468 if (*dmf) { 1469 (*dmf)->ops->creatematrix = dm->ops->creatematrix; 1470 1471 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr); 1472 1473 (*dmf)->ctx = dm->ctx; 1474 (*dmf)->leveldown = dm->leveldown; 1475 (*dmf)->levelup = dm->levelup + 1; 1476 1477 ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr); 1478 for (link=dm->refinehook; link; link=link->next) { 1479 if (link->refinehook) { 1480 ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr); 1481 } 1482 } 1483 } 1484 PetscFunctionReturn(0); 1485 } 1486 1487 #undef __FUNCT__ 1488 #define __FUNCT__ "DMRefineHookAdd" 1489 /*@C 1490 DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid 1491 1492 Logically Collective 1493 1494 Input Arguments: 1495 + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level 1496 . refinehook - function to run when setting up a coarser level 1497 . interphook - function to run to update data on finer levels (once per SNESSolve()) 1498 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1499 1500 Calling sequence of refinehook: 1501 $ refinehook(DM coarse,DM fine,void *ctx); 1502 1503 + coarse - coarse level DM 1504 . fine - fine level DM to interpolate problem to 1505 - ctx - optional user-defined function context 1506 1507 Calling sequence for interphook: 1508 $ interphook(DM coarse,Mat interp,DM fine,void *ctx) 1509 1510 + coarse - coarse level DM 1511 . interp - matrix interpolating a coarse-level solution to the finer grid 1512 . fine - fine level DM to update 1513 - ctx - optional user-defined function context 1514 1515 Level: advanced 1516 1517 Notes: 1518 This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing 1519 1520 If this function is called multiple times, the hooks will be run in the order they are added. 1521 1522 This function is currently not available from Fortran. 1523 1524 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1525 @*/ 1526 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx) 1527 { 1528 PetscErrorCode ierr; 1529 DMRefineHookLink link,*p; 1530 1531 PetscFunctionBegin; 1532 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1533 for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1534 ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr); 1535 link->refinehook = refinehook; 1536 link->interphook = interphook; 1537 link->ctx = ctx; 1538 link->next = NULL; 1539 *p = link; 1540 PetscFunctionReturn(0); 1541 } 1542 1543 #undef __FUNCT__ 1544 #define __FUNCT__ "DMInterpolate" 1545 /*@ 1546 DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd() 1547 1548 Collective if any hooks are 1549 1550 Input Arguments: 1551 + coarse - coarser DM to use as a base 1552 . restrct - interpolation matrix, apply using MatInterpolate() 1553 - fine - finer DM to update 1554 1555 Level: developer 1556 1557 .seealso: DMRefineHookAdd(), MatInterpolate() 1558 @*/ 1559 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine) 1560 { 1561 PetscErrorCode ierr; 1562 DMRefineHookLink link; 1563 1564 PetscFunctionBegin; 1565 for (link=fine->refinehook; link; link=link->next) { 1566 if (link->interphook) { 1567 ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr); 1568 } 1569 } 1570 PetscFunctionReturn(0); 1571 } 1572 1573 #undef __FUNCT__ 1574 #define __FUNCT__ "DMGetRefineLevel" 1575 /*@ 1576 DMGetRefineLevel - Get's the number of refinements that have generated this DM. 1577 1578 Not Collective 1579 1580 Input Parameter: 1581 . dm - the DM object 1582 1583 Output Parameter: 1584 . level - number of refinements 1585 1586 Level: developer 1587 1588 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1589 1590 @*/ 1591 PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level) 1592 { 1593 PetscFunctionBegin; 1594 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1595 *level = dm->levelup; 1596 PetscFunctionReturn(0); 1597 } 1598 1599 #undef __FUNCT__ 1600 #define __FUNCT__ "DMGlobalToLocalHookAdd" 1601 /*@C 1602 DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called 1603 1604 Logically Collective 1605 1606 Input Arguments: 1607 + dm - the DM 1608 . beginhook - function to run at the beginning of DMGlobalToLocalBegin() 1609 . endhook - function to run after DMGlobalToLocalEnd() has completed 1610 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1611 1612 Calling sequence for beginhook: 1613 $ beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1614 1615 + dm - global DM 1616 . g - global vector 1617 . mode - mode 1618 . l - local vector 1619 - ctx - optional user-defined function context 1620 1621 1622 Calling sequence for endhook: 1623 $ endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1624 1625 + global - global DM 1626 - ctx - optional user-defined function context 1627 1628 Level: advanced 1629 1630 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1631 @*/ 1632 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx) 1633 { 1634 PetscErrorCode ierr; 1635 DMGlobalToLocalHookLink link,*p; 1636 1637 PetscFunctionBegin; 1638 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1639 for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1640 ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr); 1641 link->beginhook = beginhook; 1642 link->endhook = endhook; 1643 link->ctx = ctx; 1644 link->next = NULL; 1645 *p = link; 1646 PetscFunctionReturn(0); 1647 } 1648 1649 #undef __FUNCT__ 1650 #define __FUNCT__ "DMGlobalToLocalBegin" 1651 /*@ 1652 DMGlobalToLocalBegin - Begins updating local vectors from global vector 1653 1654 Neighbor-wise Collective on DM 1655 1656 Input Parameters: 1657 + dm - the DM object 1658 . g - the global vector 1659 . mode - INSERT_VALUES or ADD_VALUES 1660 - l - the local vector 1661 1662 1663 Level: beginner 1664 1665 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1666 1667 @*/ 1668 PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1669 { 1670 PetscSF sf; 1671 PetscErrorCode ierr; 1672 DMGlobalToLocalHookLink link; 1673 1674 PetscFunctionBegin; 1675 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1676 for (link=dm->gtolhook; link; link=link->next) { 1677 if (link->beginhook) { 1678 ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr); 1679 } 1680 } 1681 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1682 if (sf) { 1683 PetscScalar *lArray, *gArray; 1684 1685 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1686 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1687 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1688 ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1689 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1690 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1691 } else { 1692 ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1693 } 1694 PetscFunctionReturn(0); 1695 } 1696 1697 #undef __FUNCT__ 1698 #define __FUNCT__ "DMGlobalToLocalEnd" 1699 /*@ 1700 DMGlobalToLocalEnd - Ends updating local vectors from global vector 1701 1702 Neighbor-wise Collective on DM 1703 1704 Input Parameters: 1705 + dm - the DM object 1706 . g - the global vector 1707 . mode - INSERT_VALUES or ADD_VALUES 1708 - l - the local vector 1709 1710 1711 Level: beginner 1712 1713 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1714 1715 @*/ 1716 PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1717 { 1718 PetscSF sf; 1719 PetscErrorCode ierr; 1720 PetscScalar *lArray, *gArray; 1721 DMGlobalToLocalHookLink link; 1722 1723 PetscFunctionBegin; 1724 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1725 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1726 if (sf) { 1727 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1728 1729 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1730 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1731 ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1732 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1733 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1734 } else { 1735 ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1736 } 1737 for (link=dm->gtolhook; link; link=link->next) { 1738 if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);} 1739 } 1740 PetscFunctionReturn(0); 1741 } 1742 1743 #undef __FUNCT__ 1744 #define __FUNCT__ "DMLocalToGlobalBegin" 1745 /*@ 1746 DMLocalToGlobalBegin - updates global vectors from local vectors 1747 1748 Neighbor-wise Collective on DM 1749 1750 Input Parameters: 1751 + dm - the DM object 1752 . l - the local vector 1753 . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that 1754 base point. 1755 - - the global vector 1756 1757 Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local 1758 array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work 1759 global array to the final global array with VecAXPY(). 1760 1761 Level: beginner 1762 1763 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin() 1764 1765 @*/ 1766 PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g) 1767 { 1768 PetscSF sf; 1769 PetscErrorCode ierr; 1770 1771 PetscFunctionBegin; 1772 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1773 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1774 if (sf) { 1775 MPI_Op op; 1776 PetscScalar *lArray, *gArray; 1777 1778 switch (mode) { 1779 case INSERT_VALUES: 1780 case INSERT_ALL_VALUES: 1781 op = MPIU_REPLACE; break; 1782 case ADD_VALUES: 1783 case ADD_ALL_VALUES: 1784 op = MPI_SUM; break; 1785 default: 1786 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1787 } 1788 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1789 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1790 ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1791 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1792 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1793 } else { 1794 ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1795 } 1796 PetscFunctionReturn(0); 1797 } 1798 1799 #undef __FUNCT__ 1800 #define __FUNCT__ "DMLocalToGlobalEnd" 1801 /*@ 1802 DMLocalToGlobalEnd - updates global vectors from local vectors 1803 1804 Neighbor-wise Collective on DM 1805 1806 Input Parameters: 1807 + dm - the DM object 1808 . l - the local vector 1809 . mode - INSERT_VALUES or ADD_VALUES 1810 - g - the global vector 1811 1812 1813 Level: beginner 1814 1815 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd() 1816 1817 @*/ 1818 PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g) 1819 { 1820 PetscSF sf; 1821 PetscErrorCode ierr; 1822 1823 PetscFunctionBegin; 1824 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1825 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1826 if (sf) { 1827 MPI_Op op; 1828 PetscScalar *lArray, *gArray; 1829 1830 switch (mode) { 1831 case INSERT_VALUES: 1832 case INSERT_ALL_VALUES: 1833 op = MPIU_REPLACE; break; 1834 case ADD_VALUES: 1835 case ADD_ALL_VALUES: 1836 op = MPI_SUM; break; 1837 default: 1838 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1839 } 1840 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1841 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1842 ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1843 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1844 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1845 } else { 1846 ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1847 } 1848 PetscFunctionReturn(0); 1849 } 1850 1851 #undef __FUNCT__ 1852 #define __FUNCT__ "DMLocalToLocalBegin" 1853 /*@ 1854 DMLocalToLocalBegin - Maps from a local vector (including ghost points 1855 that contain irrelevant values) to another local vector where the ghost 1856 points in the second are set correctly. Must be followed by DMLocalToLocalEnd(). 1857 1858 Neighbor-wise Collective on DM and Vec 1859 1860 Input Parameters: 1861 + dm - the DM object 1862 . g - the original local vector 1863 - mode - one of INSERT_VALUES or ADD_VALUES 1864 1865 Output Parameter: 1866 . l - the local vector with correct ghost values 1867 1868 Level: intermediate 1869 1870 Notes: 1871 The local vectors used here need not be the same as those 1872 obtained from DMCreateLocalVector(), BUT they 1873 must have the same parallel data layout; they could, for example, be 1874 obtained with VecDuplicate() from the DM originating vectors. 1875 1876 .keywords: DM, local-to-local, begin 1877 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalEnd(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1878 1879 @*/ 1880 PetscErrorCode DMLocalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1881 { 1882 PetscErrorCode ierr; 1883 1884 PetscFunctionBegin; 1885 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1886 ierr = (*dm->ops->localtolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1887 PetscFunctionReturn(0); 1888 } 1889 1890 #undef __FUNCT__ 1891 #define __FUNCT__ "DMLocalToLocalEnd" 1892 /*@ 1893 DMLocalToLocalEnd - Maps from a local vector (including ghost points 1894 that contain irrelevant values) to another local vector where the ghost 1895 points in the second are set correctly. Must be preceded by DMLocalToLocalBegin(). 1896 1897 Neighbor-wise Collective on DM and Vec 1898 1899 Input Parameters: 1900 + da - the DM object 1901 . g - the original local vector 1902 - mode - one of INSERT_VALUES or ADD_VALUES 1903 1904 Output Parameter: 1905 . l - the local vector with correct ghost values 1906 1907 Level: intermediate 1908 1909 Notes: 1910 The local vectors used here need not be the same as those 1911 obtained from DMCreateLocalVector(), BUT they 1912 must have the same parallel data layout; they could, for example, be 1913 obtained with VecDuplicate() from the DM originating vectors. 1914 1915 .keywords: DM, local-to-local, end 1916 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1917 1918 @*/ 1919 PetscErrorCode DMLocalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1920 { 1921 PetscErrorCode ierr; 1922 1923 PetscFunctionBegin; 1924 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1925 ierr = (*dm->ops->localtolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1926 PetscFunctionReturn(0); 1927 } 1928 1929 1930 #undef __FUNCT__ 1931 #define __FUNCT__ "DMCoarsen" 1932 /*@ 1933 DMCoarsen - Coarsens a DM object 1934 1935 Collective on DM 1936 1937 Input Parameter: 1938 + dm - the DM object 1939 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1940 1941 Output Parameter: 1942 . dmc - the coarsened DM 1943 1944 Level: developer 1945 1946 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1947 1948 @*/ 1949 PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) 1950 { 1951 PetscErrorCode ierr; 1952 DMCoarsenHookLink link; 1953 1954 PetscFunctionBegin; 1955 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1956 ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr); 1957 (*dmc)->ops->creatematrix = dm->ops->creatematrix; 1958 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr); 1959 (*dmc)->ctx = dm->ctx; 1960 (*dmc)->levelup = dm->levelup; 1961 (*dmc)->leveldown = dm->leveldown + 1; 1962 ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr); 1963 for (link=dm->coarsenhook; link; link=link->next) { 1964 if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);} 1965 } 1966 PetscFunctionReturn(0); 1967 } 1968 1969 #undef __FUNCT__ 1970 #define __FUNCT__ "DMCoarsenHookAdd" 1971 /*@C 1972 DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1973 1974 Logically Collective 1975 1976 Input Arguments: 1977 + fine - nonlinear solver context on which to run a hook when restricting to a coarser level 1978 . coarsenhook - function to run when setting up a coarser level 1979 . restricthook - function to run to update data on coarser levels (once per SNESSolve()) 1980 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1981 1982 Calling sequence of coarsenhook: 1983 $ coarsenhook(DM fine,DM coarse,void *ctx); 1984 1985 + fine - fine level DM 1986 . coarse - coarse level DM to restrict problem to 1987 - ctx - optional user-defined function context 1988 1989 Calling sequence for restricthook: 1990 $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx) 1991 1992 + fine - fine level DM 1993 . mrestrict - matrix restricting a fine-level solution to the coarse grid 1994 . rscale - scaling vector for restriction 1995 . inject - matrix restricting by injection 1996 . coarse - coarse level DM to update 1997 - ctx - optional user-defined function context 1998 1999 Level: advanced 2000 2001 Notes: 2002 This function is only needed if auxiliary data needs to be set up on coarse grids. 2003 2004 If this function is called multiple times, the hooks will be run in the order they are added. 2005 2006 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 2007 extract the finest level information from its context (instead of from the SNES). 2008 2009 This function is currently not available from Fortran. 2010 2011 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 2012 @*/ 2013 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx) 2014 { 2015 PetscErrorCode ierr; 2016 DMCoarsenHookLink link,*p; 2017 2018 PetscFunctionBegin; 2019 PetscValidHeaderSpecific(fine,DM_CLASSID,1); 2020 for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 2021 ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr); 2022 link->coarsenhook = coarsenhook; 2023 link->restricthook = restricthook; 2024 link->ctx = ctx; 2025 link->next = NULL; 2026 *p = link; 2027 PetscFunctionReturn(0); 2028 } 2029 2030 #undef __FUNCT__ 2031 #define __FUNCT__ "DMRestrict" 2032 /*@ 2033 DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd() 2034 2035 Collective if any hooks are 2036 2037 Input Arguments: 2038 + fine - finer DM to use as a base 2039 . restrct - restriction matrix, apply using MatRestrict() 2040 . inject - injection matrix, also use MatRestrict() 2041 - coarse - coarer DM to update 2042 2043 Level: developer 2044 2045 .seealso: DMCoarsenHookAdd(), MatRestrict() 2046 @*/ 2047 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse) 2048 { 2049 PetscErrorCode ierr; 2050 DMCoarsenHookLink link; 2051 2052 PetscFunctionBegin; 2053 for (link=fine->coarsenhook; link; link=link->next) { 2054 if (link->restricthook) { 2055 ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr); 2056 } 2057 } 2058 PetscFunctionReturn(0); 2059 } 2060 2061 #undef __FUNCT__ 2062 #define __FUNCT__ "DMSubDomainHookAdd" 2063 /*@C 2064 DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid 2065 2066 Logically Collective 2067 2068 Input Arguments: 2069 + global - global DM 2070 . ddhook - function to run to pass data to the decomposition DM upon its creation 2071 . restricthook - function to run to update data on block solve (at the beginning of the block solve) 2072 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 2073 2074 2075 Calling sequence for ddhook: 2076 $ ddhook(DM global,DM block,void *ctx) 2077 2078 + global - global DM 2079 . block - block DM 2080 - ctx - optional user-defined function context 2081 2082 Calling sequence for restricthook: 2083 $ restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx) 2084 2085 + global - global DM 2086 . out - scatter to the outer (with ghost and overlap points) block vector 2087 . in - scatter to block vector values only owned locally 2088 . block - block DM 2089 - ctx - optional user-defined function context 2090 2091 Level: advanced 2092 2093 Notes: 2094 This function is only needed if auxiliary data needs to be set up on subdomain DMs. 2095 2096 If this function is called multiple times, the hooks will be run in the order they are added. 2097 2098 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 2099 extract the global information from its context (instead of from the SNES). 2100 2101 This function is currently not available from Fortran. 2102 2103 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 2104 @*/ 2105 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx) 2106 { 2107 PetscErrorCode ierr; 2108 DMSubDomainHookLink link,*p; 2109 2110 PetscFunctionBegin; 2111 PetscValidHeaderSpecific(global,DM_CLASSID,1); 2112 for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 2113 ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr); 2114 link->restricthook = restricthook; 2115 link->ddhook = ddhook; 2116 link->ctx = ctx; 2117 link->next = NULL; 2118 *p = link; 2119 PetscFunctionReturn(0); 2120 } 2121 2122 #undef __FUNCT__ 2123 #define __FUNCT__ "DMSubDomainRestrict" 2124 /*@ 2125 DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd() 2126 2127 Collective if any hooks are 2128 2129 Input Arguments: 2130 + fine - finer DM to use as a base 2131 . oscatter - scatter from domain global vector filling subdomain global vector with overlap 2132 . gscatter - scatter from domain global vector filling subdomain local vector with ghosts 2133 - coarse - coarer DM to update 2134 2135 Level: developer 2136 2137 .seealso: DMCoarsenHookAdd(), MatRestrict() 2138 @*/ 2139 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm) 2140 { 2141 PetscErrorCode ierr; 2142 DMSubDomainHookLink link; 2143 2144 PetscFunctionBegin; 2145 for (link=global->subdomainhook; link; link=link->next) { 2146 if (link->restricthook) { 2147 ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr); 2148 } 2149 } 2150 PetscFunctionReturn(0); 2151 } 2152 2153 #undef __FUNCT__ 2154 #define __FUNCT__ "DMGetCoarsenLevel" 2155 /*@ 2156 DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM. 2157 2158 Not Collective 2159 2160 Input Parameter: 2161 . dm - the DM object 2162 2163 Output Parameter: 2164 . level - number of coarsenings 2165 2166 Level: developer 2167 2168 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2169 2170 @*/ 2171 PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level) 2172 { 2173 PetscFunctionBegin; 2174 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2175 *level = dm->leveldown; 2176 PetscFunctionReturn(0); 2177 } 2178 2179 2180 2181 #undef __FUNCT__ 2182 #define __FUNCT__ "DMRefineHierarchy" 2183 /*@C 2184 DMRefineHierarchy - Refines a DM object, all levels at once 2185 2186 Collective on DM 2187 2188 Input Parameter: 2189 + dm - the DM object 2190 - nlevels - the number of levels of refinement 2191 2192 Output Parameter: 2193 . dmf - the refined DM hierarchy 2194 2195 Level: developer 2196 2197 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2198 2199 @*/ 2200 PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[]) 2201 { 2202 PetscErrorCode ierr; 2203 2204 PetscFunctionBegin; 2205 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2206 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2207 if (nlevels == 0) PetscFunctionReturn(0); 2208 if (dm->ops->refinehierarchy) { 2209 ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr); 2210 } else if (dm->ops->refine) { 2211 PetscInt i; 2212 2213 ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr); 2214 for (i=1; i<nlevels; i++) { 2215 ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr); 2216 } 2217 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet"); 2218 PetscFunctionReturn(0); 2219 } 2220 2221 #undef __FUNCT__ 2222 #define __FUNCT__ "DMCoarsenHierarchy" 2223 /*@C 2224 DMCoarsenHierarchy - Coarsens a DM object, all levels at once 2225 2226 Collective on DM 2227 2228 Input Parameter: 2229 + dm - the DM object 2230 - nlevels - the number of levels of coarsening 2231 2232 Output Parameter: 2233 . dmc - the coarsened DM hierarchy 2234 2235 Level: developer 2236 2237 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2238 2239 @*/ 2240 PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[]) 2241 { 2242 PetscErrorCode ierr; 2243 2244 PetscFunctionBegin; 2245 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2246 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2247 if (nlevels == 0) PetscFunctionReturn(0); 2248 PetscValidPointer(dmc,3); 2249 if (dm->ops->coarsenhierarchy) { 2250 ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr); 2251 } else if (dm->ops->coarsen) { 2252 PetscInt i; 2253 2254 ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr); 2255 for (i=1; i<nlevels; i++) { 2256 ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr); 2257 } 2258 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet"); 2259 PetscFunctionReturn(0); 2260 } 2261 2262 #undef __FUNCT__ 2263 #define __FUNCT__ "DMCreateAggregates" 2264 /*@ 2265 DMCreateAggregates - Gets the aggregates that map between 2266 grids associated with two DMs. 2267 2268 Collective on DM 2269 2270 Input Parameters: 2271 + dmc - the coarse grid DM 2272 - dmf - the fine grid DM 2273 2274 Output Parameters: 2275 . rest - the restriction matrix (transpose of the projection matrix) 2276 2277 Level: intermediate 2278 2279 .keywords: interpolation, restriction, multigrid 2280 2281 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation() 2282 @*/ 2283 PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest) 2284 { 2285 PetscErrorCode ierr; 2286 2287 PetscFunctionBegin; 2288 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 2289 PetscValidHeaderSpecific(dmf,DM_CLASSID,2); 2290 ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr); 2291 PetscFunctionReturn(0); 2292 } 2293 2294 #undef __FUNCT__ 2295 #define __FUNCT__ "DMSetApplicationContextDestroy" 2296 /*@C 2297 DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed 2298 2299 Not Collective 2300 2301 Input Parameters: 2302 + dm - the DM object 2303 - destroy - the destroy function 2304 2305 Level: intermediate 2306 2307 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2308 2309 @*/ 2310 PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**)) 2311 { 2312 PetscFunctionBegin; 2313 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2314 dm->ctxdestroy = destroy; 2315 PetscFunctionReturn(0); 2316 } 2317 2318 #undef __FUNCT__ 2319 #define __FUNCT__ "DMSetApplicationContext" 2320 /*@ 2321 DMSetApplicationContext - Set a user context into a DM object 2322 2323 Not Collective 2324 2325 Input Parameters: 2326 + dm - the DM object 2327 - ctx - the user context 2328 2329 Level: intermediate 2330 2331 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2332 2333 @*/ 2334 PetscErrorCode DMSetApplicationContext(DM dm,void *ctx) 2335 { 2336 PetscFunctionBegin; 2337 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2338 dm->ctx = ctx; 2339 PetscFunctionReturn(0); 2340 } 2341 2342 #undef __FUNCT__ 2343 #define __FUNCT__ "DMGetApplicationContext" 2344 /*@ 2345 DMGetApplicationContext - Gets a user context from a DM object 2346 2347 Not Collective 2348 2349 Input Parameter: 2350 . dm - the DM object 2351 2352 Output Parameter: 2353 . ctx - the user context 2354 2355 Level: intermediate 2356 2357 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2358 2359 @*/ 2360 PetscErrorCode DMGetApplicationContext(DM dm,void *ctx) 2361 { 2362 PetscFunctionBegin; 2363 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2364 *(void**)ctx = dm->ctx; 2365 PetscFunctionReturn(0); 2366 } 2367 2368 #undef __FUNCT__ 2369 #define __FUNCT__ "DMSetVariableBounds" 2370 /*@C 2371 DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI. 2372 2373 Logically Collective on DM 2374 2375 Input Parameter: 2376 + dm - the DM object 2377 - f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set) 2378 2379 Level: intermediate 2380 2381 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), 2382 DMSetJacobian() 2383 2384 @*/ 2385 PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2386 { 2387 PetscFunctionBegin; 2388 dm->ops->computevariablebounds = f; 2389 PetscFunctionReturn(0); 2390 } 2391 2392 #undef __FUNCT__ 2393 #define __FUNCT__ "DMHasVariableBounds" 2394 /*@ 2395 DMHasVariableBounds - does the DM object have a variable bounds function? 2396 2397 Not Collective 2398 2399 Input Parameter: 2400 . dm - the DM object to destroy 2401 2402 Output Parameter: 2403 . flg - PETSC_TRUE if the variable bounds function exists 2404 2405 Level: developer 2406 2407 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2408 2409 @*/ 2410 PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg) 2411 { 2412 PetscFunctionBegin; 2413 *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE; 2414 PetscFunctionReturn(0); 2415 } 2416 2417 #undef __FUNCT__ 2418 #define __FUNCT__ "DMComputeVariableBounds" 2419 /*@C 2420 DMComputeVariableBounds - compute variable bounds used by SNESVI. 2421 2422 Logically Collective on DM 2423 2424 Input Parameters: 2425 + dm - the DM object to destroy 2426 - x - current solution at which the bounds are computed 2427 2428 Output parameters: 2429 + xl - lower bound 2430 - xu - upper bound 2431 2432 Level: intermediate 2433 2434 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2435 2436 @*/ 2437 PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu) 2438 { 2439 PetscErrorCode ierr; 2440 2441 PetscFunctionBegin; 2442 PetscValidHeaderSpecific(xl,VEC_CLASSID,2); 2443 PetscValidHeaderSpecific(xu,VEC_CLASSID,2); 2444 if (dm->ops->computevariablebounds) { 2445 ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr); 2446 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds."); 2447 PetscFunctionReturn(0); 2448 } 2449 2450 #undef __FUNCT__ 2451 #define __FUNCT__ "DMHasColoring" 2452 /*@ 2453 DMHasColoring - does the DM object have a method of providing a coloring? 2454 2455 Not Collective 2456 2457 Input Parameter: 2458 . dm - the DM object 2459 2460 Output Parameter: 2461 . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring(). 2462 2463 Level: developer 2464 2465 .seealso DMHasFunction(), DMCreateColoring() 2466 2467 @*/ 2468 PetscErrorCode DMHasColoring(DM dm,PetscBool *flg) 2469 { 2470 PetscFunctionBegin; 2471 *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE; 2472 PetscFunctionReturn(0); 2473 } 2474 2475 #undef __FUNCT__ 2476 #define __FUNCT__ "DMSetVec" 2477 /*@C 2478 DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear. 2479 2480 Collective on DM 2481 2482 Input Parameter: 2483 + dm - the DM object 2484 - x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems. 2485 2486 Level: developer 2487 2488 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2489 2490 @*/ 2491 PetscErrorCode DMSetVec(DM dm,Vec x) 2492 { 2493 PetscErrorCode ierr; 2494 2495 PetscFunctionBegin; 2496 if (x) { 2497 if (!dm->x) { 2498 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2499 } 2500 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2501 } else if (dm->x) { 2502 ierr = VecDestroy(&dm->x);CHKERRQ(ierr); 2503 } 2504 PetscFunctionReturn(0); 2505 } 2506 2507 PetscFunctionList DMList = NULL; 2508 PetscBool DMRegisterAllCalled = PETSC_FALSE; 2509 2510 #undef __FUNCT__ 2511 #define __FUNCT__ "DMSetType" 2512 /*@C 2513 DMSetType - Builds a DM, for a particular DM implementation. 2514 2515 Collective on DM 2516 2517 Input Parameters: 2518 + dm - The DM object 2519 - method - The name of the DM type 2520 2521 Options Database Key: 2522 . -dm_type <type> - Sets the DM type; use -help for a list of available types 2523 2524 Notes: 2525 See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D). 2526 2527 Level: intermediate 2528 2529 .keywords: DM, set, type 2530 .seealso: DMGetType(), DMCreate() 2531 @*/ 2532 PetscErrorCode DMSetType(DM dm, DMType method) 2533 { 2534 PetscErrorCode (*r)(DM); 2535 PetscBool match; 2536 PetscErrorCode ierr; 2537 2538 PetscFunctionBegin; 2539 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2540 ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr); 2541 if (match) PetscFunctionReturn(0); 2542 2543 if (!DMRegisterAllCalled) {ierr = DMRegisterAll();CHKERRQ(ierr);} 2544 ierr = PetscFunctionListFind(DMList,method,&r);CHKERRQ(ierr); 2545 if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method); 2546 2547 if (dm->ops->destroy) { 2548 ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr); 2549 dm->ops->destroy = NULL; 2550 } 2551 ierr = (*r)(dm);CHKERRQ(ierr); 2552 ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr); 2553 PetscFunctionReturn(0); 2554 } 2555 2556 #undef __FUNCT__ 2557 #define __FUNCT__ "DMGetType" 2558 /*@C 2559 DMGetType - Gets the DM type name (as a string) from the DM. 2560 2561 Not Collective 2562 2563 Input Parameter: 2564 . dm - The DM 2565 2566 Output Parameter: 2567 . type - The DM type name 2568 2569 Level: intermediate 2570 2571 .keywords: DM, get, type, name 2572 .seealso: DMSetType(), DMCreate() 2573 @*/ 2574 PetscErrorCode DMGetType(DM dm, DMType *type) 2575 { 2576 PetscErrorCode ierr; 2577 2578 PetscFunctionBegin; 2579 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2580 PetscValidCharPointer(type,2); 2581 if (!DMRegisterAllCalled) { 2582 ierr = DMRegisterAll();CHKERRQ(ierr); 2583 } 2584 *type = ((PetscObject)dm)->type_name; 2585 PetscFunctionReturn(0); 2586 } 2587 2588 #undef __FUNCT__ 2589 #define __FUNCT__ "DMConvert" 2590 /*@C 2591 DMConvert - Converts a DM to another DM, either of the same or different type. 2592 2593 Collective on DM 2594 2595 Input Parameters: 2596 + dm - the DM 2597 - newtype - new DM type (use "same" for the same type) 2598 2599 Output Parameter: 2600 . M - pointer to new DM 2601 2602 Notes: 2603 Cannot be used to convert a sequential DM to parallel or parallel to sequential, 2604 the MPI communicator of the generated DM is always the same as the communicator 2605 of the input DM. 2606 2607 Level: intermediate 2608 2609 .seealso: DMCreate() 2610 @*/ 2611 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M) 2612 { 2613 DM B; 2614 char convname[256]; 2615 PetscBool sametype, issame; 2616 PetscErrorCode ierr; 2617 2618 PetscFunctionBegin; 2619 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2620 PetscValidType(dm,1); 2621 PetscValidPointer(M,3); 2622 ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr); 2623 ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr); 2624 { 2625 PetscErrorCode (*conv)(DM, DMType, DM*) = NULL; 2626 2627 /* 2628 Order of precedence: 2629 1) See if a specialized converter is known to the current DM. 2630 2) See if a specialized converter is known to the desired DM class. 2631 3) See if a good general converter is registered for the desired class 2632 4) See if a good general converter is known for the current matrix. 2633 5) Use a really basic converter. 2634 */ 2635 2636 /* 1) See if a specialized converter is known to the current DM and the desired class */ 2637 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2638 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2639 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2640 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2641 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2642 ierr = PetscObjectQueryFunction((PetscObject)dm,convname,&conv);CHKERRQ(ierr); 2643 if (conv) goto foundconv; 2644 2645 /* 2) See if a specialized converter is known to the desired DM class. */ 2646 ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr); 2647 ierr = DMSetType(B, newtype);CHKERRQ(ierr); 2648 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2649 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2650 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2651 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2652 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2653 ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv);CHKERRQ(ierr); 2654 if (conv) { 2655 ierr = DMDestroy(&B);CHKERRQ(ierr); 2656 goto foundconv; 2657 } 2658 2659 #if 0 2660 /* 3) See if a good general converter is registered for the desired class */ 2661 conv = B->ops->convertfrom; 2662 ierr = DMDestroy(&B);CHKERRQ(ierr); 2663 if (conv) goto foundconv; 2664 2665 /* 4) See if a good general converter is known for the current matrix */ 2666 if (dm->ops->convert) { 2667 conv = dm->ops->convert; 2668 } 2669 if (conv) goto foundconv; 2670 #endif 2671 2672 /* 5) Use a really basic converter. */ 2673 SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype); 2674 2675 foundconv: 2676 ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2677 ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr); 2678 ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2679 } 2680 ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr); 2681 PetscFunctionReturn(0); 2682 } 2683 2684 /*--------------------------------------------------------------------------------------------------------------------*/ 2685 2686 #undef __FUNCT__ 2687 #define __FUNCT__ "DMRegister" 2688 /*@C 2689 DMRegister - Adds a new DM component implementation 2690 2691 Not Collective 2692 2693 Input Parameters: 2694 + name - The name of a new user-defined creation routine 2695 - create_func - The creation routine itself 2696 2697 Notes: 2698 DMRegister() may be called multiple times to add several user-defined DMs 2699 2700 2701 Sample usage: 2702 .vb 2703 DMRegister("my_da", MyDMCreate); 2704 .ve 2705 2706 Then, your DM type can be chosen with the procedural interface via 2707 .vb 2708 DMCreate(MPI_Comm, DM *); 2709 DMSetType(DM,"my_da"); 2710 .ve 2711 or at runtime via the option 2712 .vb 2713 -da_type my_da 2714 .ve 2715 2716 Level: advanced 2717 2718 .keywords: DM, register 2719 .seealso: DMRegisterAll(), DMRegisterDestroy() 2720 2721 @*/ 2722 PetscErrorCode DMRegister(const char sname[],PetscErrorCode (*function)(DM)) 2723 { 2724 PetscErrorCode ierr; 2725 2726 PetscFunctionBegin; 2727 ierr = PetscFunctionListAdd(&DMList,sname,function);CHKERRQ(ierr); 2728 PetscFunctionReturn(0); 2729 } 2730 2731 #undef __FUNCT__ 2732 #define __FUNCT__ "DMLoad" 2733 /*@C 2734 DMLoad - Loads a DM that has been stored in binary with DMView(). 2735 2736 Collective on PetscViewer 2737 2738 Input Parameters: 2739 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or 2740 some related function before a call to DMLoad(). 2741 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or 2742 HDF5 file viewer, obtained from PetscViewerHDF5Open() 2743 2744 Level: intermediate 2745 2746 Notes: 2747 The type is determined by the data in the file, any type set into the DM before this call is ignored. 2748 2749 Notes for advanced users: 2750 Most users should not need to know the details of the binary storage 2751 format, since DMLoad() and DMView() completely hide these details. 2752 But for anyone who's interested, the standard binary matrix storage 2753 format is 2754 .vb 2755 has not yet been determined 2756 .ve 2757 2758 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad() 2759 @*/ 2760 PetscErrorCode DMLoad(DM newdm, PetscViewer viewer) 2761 { 2762 PetscBool isbinary, ishdf5; 2763 PetscErrorCode ierr; 2764 2765 PetscFunctionBegin; 2766 PetscValidHeaderSpecific(newdm,DM_CLASSID,1); 2767 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 2768 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 2769 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);CHKERRQ(ierr); 2770 if (isbinary) { 2771 PetscInt classid; 2772 char type[256]; 2773 2774 ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr); 2775 if (classid != DM_FILE_CLASSID) SETERRQ1(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file, classid found %d",(int)classid); 2776 ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr); 2777 ierr = DMSetType(newdm, type);CHKERRQ(ierr); 2778 if (newdm->ops->load) {ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);} 2779 } else if (ishdf5) { 2780 if (newdm->ops->load) {ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);} 2781 } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen() or PetscViewerHDF5Open()"); 2782 PetscFunctionReturn(0); 2783 } 2784 2785 /******************************** FEM Support **********************************/ 2786 2787 #undef __FUNCT__ 2788 #define __FUNCT__ "DMPrintCellVector" 2789 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) 2790 { 2791 PetscInt f; 2792 PetscErrorCode ierr; 2793 2794 PetscFunctionBegin; 2795 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2796 for (f = 0; f < len; ++f) { 2797 ierr = PetscPrintf(PETSC_COMM_SELF, " | %g |\n", (double)PetscRealPart(x[f]));CHKERRQ(ierr); 2798 } 2799 PetscFunctionReturn(0); 2800 } 2801 2802 #undef __FUNCT__ 2803 #define __FUNCT__ "DMPrintCellMatrix" 2804 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) 2805 { 2806 PetscInt f, g; 2807 PetscErrorCode ierr; 2808 2809 PetscFunctionBegin; 2810 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2811 for (f = 0; f < rows; ++f) { 2812 ierr = PetscPrintf(PETSC_COMM_SELF, " |");CHKERRQ(ierr); 2813 for (g = 0; g < cols; ++g) { 2814 ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5g", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr); 2815 } 2816 ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr); 2817 } 2818 PetscFunctionReturn(0); 2819 } 2820 2821 #undef __FUNCT__ 2822 #define __FUNCT__ "DMPrintLocalVec" 2823 PetscErrorCode DMPrintLocalVec(DM dm, const char name[], PetscReal tol, Vec X) 2824 { 2825 PetscMPIInt rank, numProcs; 2826 PetscInt p; 2827 PetscErrorCode ierr; 2828 2829 PetscFunctionBegin; 2830 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr); 2831 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr); 2832 ierr = PetscPrintf(PetscObjectComm((PetscObject) dm), "%s:\n", name);CHKERRQ(ierr); 2833 for (p = 0; p < numProcs; ++p) { 2834 if (p == rank) { 2835 Vec x; 2836 2837 ierr = VecDuplicate(X, &x);CHKERRQ(ierr); 2838 ierr = VecCopy(X, x);CHKERRQ(ierr); 2839 ierr = VecChop(x, tol);CHKERRQ(ierr); 2840 ierr = VecView(x, PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr); 2841 ierr = VecDestroy(&x);CHKERRQ(ierr); 2842 ierr = PetscViewerFlush(PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr); 2843 } 2844 ierr = PetscBarrier((PetscObject) dm);CHKERRQ(ierr); 2845 } 2846 PetscFunctionReturn(0); 2847 } 2848 2849 #undef __FUNCT__ 2850 #define __FUNCT__ "DMGetDefaultSection" 2851 /*@ 2852 DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM. 2853 2854 Input Parameter: 2855 . dm - The DM 2856 2857 Output Parameter: 2858 . section - The PetscSection 2859 2860 Level: intermediate 2861 2862 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2863 2864 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2865 @*/ 2866 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) 2867 { 2868 PetscErrorCode ierr; 2869 2870 PetscFunctionBegin; 2871 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2872 PetscValidPointer(section, 2); 2873 if (!dm->defaultSection && dm->ops->createdefaultsection) {ierr = (*dm->ops->createdefaultsection)(dm);CHKERRQ(ierr);} 2874 *section = dm->defaultSection; 2875 PetscFunctionReturn(0); 2876 } 2877 2878 #undef __FUNCT__ 2879 #define __FUNCT__ "DMSetDefaultSection" 2880 /*@ 2881 DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM. 2882 2883 Input Parameters: 2884 + dm - The DM 2885 - section - The PetscSection 2886 2887 Level: intermediate 2888 2889 Note: Any existing Section will be destroyed 2890 2891 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2892 @*/ 2893 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) 2894 { 2895 PetscInt numFields; 2896 PetscInt f; 2897 PetscErrorCode ierr; 2898 2899 PetscFunctionBegin; 2900 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2901 PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2902 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2903 ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr); 2904 dm->defaultSection = section; 2905 ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr); 2906 if (numFields) { 2907 ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr); 2908 for (f = 0; f < numFields; ++f) { 2909 const char *name; 2910 2911 ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr); 2912 ierr = PetscObjectSetName((PetscObject) dm->fields[f], name);CHKERRQ(ierr); 2913 } 2914 } 2915 /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */ 2916 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2917 PetscFunctionReturn(0); 2918 } 2919 2920 #undef __FUNCT__ 2921 #define __FUNCT__ "DMGetDefaultGlobalSection" 2922 /*@ 2923 DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM. 2924 2925 Collective on DM 2926 2927 Input Parameter: 2928 . dm - The DM 2929 2930 Output Parameter: 2931 . section - The PetscSection 2932 2933 Level: intermediate 2934 2935 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2936 2937 .seealso: DMSetDefaultSection(), DMGetDefaultSection() 2938 @*/ 2939 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) 2940 { 2941 PetscErrorCode ierr; 2942 2943 PetscFunctionBegin; 2944 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2945 PetscValidPointer(section, 2); 2946 if (!dm->defaultGlobalSection) { 2947 PetscSection s; 2948 2949 ierr = DMGetDefaultSection(dm, &s);CHKERRQ(ierr); 2950 if (!s) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection in order to create a global PetscSection"); 2951 if (!dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSF in order to create a global PetscSection"); 2952 ierr = PetscSectionCreateGlobalSection(s, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr); 2953 ierr = PetscLayoutDestroy(&dm->map);CHKERRQ(ierr); 2954 ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm), dm->defaultGlobalSection, &dm->map);CHKERRQ(ierr); 2955 } 2956 *section = dm->defaultGlobalSection; 2957 PetscFunctionReturn(0); 2958 } 2959 2960 #undef __FUNCT__ 2961 #define __FUNCT__ "DMSetDefaultGlobalSection" 2962 /*@ 2963 DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM. 2964 2965 Input Parameters: 2966 + dm - The DM 2967 - section - The PetscSection, or NULL 2968 2969 Level: intermediate 2970 2971 Note: Any existing Section will be destroyed 2972 2973 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection() 2974 @*/ 2975 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) 2976 { 2977 PetscErrorCode ierr; 2978 2979 PetscFunctionBegin; 2980 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2981 if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2982 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2983 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2984 dm->defaultGlobalSection = section; 2985 PetscFunctionReturn(0); 2986 } 2987 2988 #undef __FUNCT__ 2989 #define __FUNCT__ "DMGetDefaultSF" 2990 /*@ 2991 DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set, 2992 it is created from the default PetscSection layouts in the DM. 2993 2994 Input Parameter: 2995 . dm - The DM 2996 2997 Output Parameter: 2998 . sf - The PetscSF 2999 3000 Level: intermediate 3001 3002 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3003 3004 .seealso: DMSetDefaultSF(), DMCreateDefaultSF() 3005 @*/ 3006 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) 3007 { 3008 PetscInt nroots; 3009 PetscErrorCode ierr; 3010 3011 PetscFunctionBegin; 3012 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3013 PetscValidPointer(sf, 2); 3014 ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr); 3015 if (nroots < 0) { 3016 PetscSection section, gSection; 3017 3018 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 3019 if (section) { 3020 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 3021 ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr); 3022 } else { 3023 *sf = NULL; 3024 PetscFunctionReturn(0); 3025 } 3026 } 3027 *sf = dm->defaultSF; 3028 PetscFunctionReturn(0); 3029 } 3030 3031 #undef __FUNCT__ 3032 #define __FUNCT__ "DMSetDefaultSF" 3033 /*@ 3034 DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM 3035 3036 Input Parameters: 3037 + dm - The DM 3038 - sf - The PetscSF 3039 3040 Level: intermediate 3041 3042 Note: Any previous SF is destroyed 3043 3044 .seealso: DMGetDefaultSF(), DMCreateDefaultSF() 3045 @*/ 3046 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) 3047 { 3048 PetscErrorCode ierr; 3049 3050 PetscFunctionBegin; 3051 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3052 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2); 3053 ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr); 3054 dm->defaultSF = sf; 3055 PetscFunctionReturn(0); 3056 } 3057 3058 #undef __FUNCT__ 3059 #define __FUNCT__ "DMCreateDefaultSF" 3060 /*@C 3061 DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections 3062 describing the data layout. 3063 3064 Input Parameters: 3065 + dm - The DM 3066 . localSection - PetscSection describing the local data layout 3067 - globalSection - PetscSection describing the global data layout 3068 3069 Level: intermediate 3070 3071 .seealso: DMGetDefaultSF(), DMSetDefaultSF() 3072 @*/ 3073 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection) 3074 { 3075 MPI_Comm comm; 3076 PetscLayout layout; 3077 const PetscInt *ranges; 3078 PetscInt *local; 3079 PetscSFNode *remote; 3080 PetscInt pStart, pEnd, p, nroots, nleaves = 0, l; 3081 PetscMPIInt size, rank; 3082 PetscErrorCode ierr; 3083 3084 PetscFunctionBegin; 3085 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 3086 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3087 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 3088 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 3089 ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr); 3090 ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr); 3091 ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr); 3092 ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr); 3093 ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr); 3094 ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr); 3095 ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr); 3096 for (p = pStart; p < pEnd; ++p) { 3097 PetscInt gdof, gcdof; 3098 3099 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3100 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3101 nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3102 } 3103 ierr = PetscMalloc1(nleaves, &local);CHKERRQ(ierr); 3104 ierr = PetscMalloc1(nleaves, &remote);CHKERRQ(ierr); 3105 for (p = pStart, l = 0; p < pEnd; ++p) { 3106 const PetscInt *cind; 3107 PetscInt dof, cdof, off, gdof, gcdof, goff, gsize, d, c; 3108 3109 ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr); 3110 ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr); 3111 ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr); 3112 ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr); 3113 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3114 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3115 ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr); 3116 if (!gdof) continue; /* Censored point */ 3117 gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3118 if (gsize != dof-cdof) { 3119 if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof); 3120 cdof = 0; /* Ignore constraints */ 3121 } 3122 for (d = 0, c = 0; d < dof; ++d) { 3123 if ((c < cdof) && (cind[c] == d)) {++c; continue;} 3124 local[l+d-c] = off+d; 3125 } 3126 if (gdof < 0) { 3127 for (d = 0; d < gsize; ++d, ++l) { 3128 PetscInt offset = -(goff+1) + d, r; 3129 3130 ierr = PetscFindInt(offset,size+1,ranges,&r);CHKERRQ(ierr); 3131 if (r < 0) r = -(r+2); 3132 if ((r < 0) || (r >= size)) SETERRQ4(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Point %d mapped to invalid process %d (%d, %d)", p, r, gdof, goff); 3133 remote[l].rank = r; 3134 remote[l].index = offset - ranges[r]; 3135 } 3136 } else { 3137 for (d = 0; d < gsize; ++d, ++l) { 3138 remote[l].rank = rank; 3139 remote[l].index = goff+d - ranges[rank]; 3140 } 3141 } 3142 } 3143 if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves); 3144 ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr); 3145 ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 3146 PetscFunctionReturn(0); 3147 } 3148 3149 #undef __FUNCT__ 3150 #define __FUNCT__ "DMGetPointSF" 3151 /*@ 3152 DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM. 3153 3154 Input Parameter: 3155 . dm - The DM 3156 3157 Output Parameter: 3158 . sf - The PetscSF 3159 3160 Level: intermediate 3161 3162 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3163 3164 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3165 @*/ 3166 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) 3167 { 3168 PetscFunctionBegin; 3169 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3170 PetscValidPointer(sf, 2); 3171 *sf = dm->sf; 3172 PetscFunctionReturn(0); 3173 } 3174 3175 #undef __FUNCT__ 3176 #define __FUNCT__ "DMSetPointSF" 3177 /*@ 3178 DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM. 3179 3180 Input Parameters: 3181 + dm - The DM 3182 - sf - The PetscSF 3183 3184 Level: intermediate 3185 3186 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3187 @*/ 3188 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) 3189 { 3190 PetscErrorCode ierr; 3191 3192 PetscFunctionBegin; 3193 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3194 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1); 3195 ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr); 3196 ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr); 3197 dm->sf = sf; 3198 PetscFunctionReturn(0); 3199 } 3200 3201 #undef __FUNCT__ 3202 #define __FUNCT__ "DMGetNumFields" 3203 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields) 3204 { 3205 PetscFunctionBegin; 3206 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3207 PetscValidPointer(numFields, 2); 3208 *numFields = dm->numFields; 3209 PetscFunctionReturn(0); 3210 } 3211 3212 #undef __FUNCT__ 3213 #define __FUNCT__ "DMSetNumFields" 3214 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields) 3215 { 3216 PetscInt f; 3217 PetscErrorCode ierr; 3218 3219 PetscFunctionBegin; 3220 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3221 if (dm->numFields == numFields) PetscFunctionReturn(0); 3222 for (f = 0; f < dm->numFields; ++f) { 3223 ierr = PetscObjectDestroy((PetscObject *) &dm->fields[f]);CHKERRQ(ierr); 3224 } 3225 ierr = PetscFree(dm->fields);CHKERRQ(ierr); 3226 dm->numFields = numFields; 3227 ierr = PetscMalloc1(dm->numFields, &dm->fields);CHKERRQ(ierr); 3228 for (f = 0; f < dm->numFields; ++f) { 3229 ierr = PetscContainerCreate(PetscObjectComm((PetscObject) dm), (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr); 3230 } 3231 PetscFunctionReturn(0); 3232 } 3233 3234 #undef __FUNCT__ 3235 #define __FUNCT__ "DMGetField" 3236 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field) 3237 { 3238 PetscFunctionBegin; 3239 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3240 PetscValidPointer(field, 3); 3241 if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3242 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3243 *field = (PetscObject) dm->fields[f]; 3244 PetscFunctionReturn(0); 3245 } 3246 3247 #undef __FUNCT__ 3248 #define __FUNCT__ "DMSetField" 3249 PetscErrorCode DMSetField(DM dm, PetscInt f, PetscObject field) 3250 { 3251 PetscErrorCode ierr; 3252 3253 PetscFunctionBegin; 3254 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3255 if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3256 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3257 if (((PetscObject) dm->fields[f]) == field) PetscFunctionReturn(0); 3258 ierr = PetscObjectDestroy((PetscObject *) &dm->fields[f]);CHKERRQ(ierr); 3259 dm->fields[f] = (PetscFE) field; 3260 ierr = PetscObjectReference((PetscObject) dm->fields[f]);CHKERRQ(ierr); 3261 PetscFunctionReturn(0); 3262 } 3263 3264 #undef __FUNCT__ 3265 #define __FUNCT__ "DMRestrictHook_Coordinates" 3266 PetscErrorCode DMRestrictHook_Coordinates(DM dm,DM dmc,void *ctx) 3267 { 3268 DM dm_coord,dmc_coord; 3269 PetscErrorCode ierr; 3270 Vec coords,ccoords; 3271 VecScatter scat; 3272 PetscFunctionBegin; 3273 ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr); 3274 ierr = DMGetCoordinateDM(dmc,&dmc_coord);CHKERRQ(ierr); 3275 ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr); 3276 ierr = DMGetCoordinates(dmc,&ccoords);CHKERRQ(ierr); 3277 if (coords && !ccoords) { 3278 ierr = DMCreateGlobalVector(dmc_coord,&ccoords);CHKERRQ(ierr); 3279 ierr = DMCreateInjection(dmc_coord,dm_coord,&scat);CHKERRQ(ierr); 3280 ierr = VecScatterBegin(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3281 ierr = VecScatterEnd(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3282 ierr = DMSetCoordinates(dmc,ccoords);CHKERRQ(ierr); 3283 ierr = VecScatterDestroy(&scat);CHKERRQ(ierr); 3284 ierr = VecDestroy(&ccoords);CHKERRQ(ierr); 3285 } 3286 PetscFunctionReturn(0); 3287 } 3288 3289 #undef __FUNCT__ 3290 #define __FUNCT__ "DMSubDomainHook_Coordinates" 3291 static PetscErrorCode DMSubDomainHook_Coordinates(DM dm,DM subdm,void *ctx) 3292 { 3293 DM dm_coord,subdm_coord; 3294 PetscErrorCode ierr; 3295 Vec coords,ccoords,clcoords; 3296 VecScatter *scat_i,*scat_g; 3297 PetscFunctionBegin; 3298 ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr); 3299 ierr = DMGetCoordinateDM(subdm,&subdm_coord);CHKERRQ(ierr); 3300 ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr); 3301 ierr = DMGetCoordinates(subdm,&ccoords);CHKERRQ(ierr); 3302 if (coords && !ccoords) { 3303 ierr = DMCreateGlobalVector(subdm_coord,&ccoords);CHKERRQ(ierr); 3304 ierr = DMCreateLocalVector(subdm_coord,&clcoords);CHKERRQ(ierr); 3305 ierr = DMCreateDomainDecompositionScatters(dm_coord,1,&subdm_coord,NULL,&scat_i,&scat_g);CHKERRQ(ierr); 3306 ierr = VecScatterBegin(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3307 ierr = VecScatterBegin(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3308 ierr = VecScatterEnd(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3309 ierr = VecScatterEnd(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3310 ierr = DMSetCoordinates(subdm,ccoords);CHKERRQ(ierr); 3311 ierr = DMSetCoordinatesLocal(subdm,clcoords);CHKERRQ(ierr); 3312 ierr = VecScatterDestroy(&scat_i[0]);CHKERRQ(ierr); 3313 ierr = VecScatterDestroy(&scat_g[0]);CHKERRQ(ierr); 3314 ierr = VecDestroy(&ccoords);CHKERRQ(ierr); 3315 ierr = VecDestroy(&clcoords);CHKERRQ(ierr); 3316 ierr = PetscFree(scat_i);CHKERRQ(ierr); 3317 ierr = PetscFree(scat_g);CHKERRQ(ierr); 3318 } 3319 PetscFunctionReturn(0); 3320 } 3321 3322 #undef __FUNCT__ 3323 #define __FUNCT__ "DMSetCoordinates" 3324 /*@ 3325 DMSetCoordinates - Sets into the DM a global vector that holds the coordinates 3326 3327 Collective on DM 3328 3329 Input Parameters: 3330 + dm - the DM 3331 - c - coordinate vector 3332 3333 Note: 3334 The coordinates do include those for ghost points, which are in the local vector 3335 3336 Level: intermediate 3337 3338 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3339 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM() 3340 @*/ 3341 PetscErrorCode DMSetCoordinates(DM dm, Vec c) 3342 { 3343 PetscErrorCode ierr; 3344 3345 PetscFunctionBegin; 3346 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3347 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3348 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3349 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3350 dm->coordinates = c; 3351 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3352 ierr = DMCoarsenHookAdd(dm,DMRestrictHook_Coordinates,NULL,NULL);CHKERRQ(ierr); 3353 ierr = DMSubDomainHookAdd(dm,DMSubDomainHook_Coordinates,NULL,NULL);CHKERRQ(ierr); 3354 PetscFunctionReturn(0); 3355 } 3356 3357 #undef __FUNCT__ 3358 #define __FUNCT__ "DMSetCoordinatesLocal" 3359 /*@ 3360 DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates 3361 3362 Collective on DM 3363 3364 Input Parameters: 3365 + dm - the DM 3366 - c - coordinate vector 3367 3368 Note: 3369 The coordinates of ghost points can be set using DMSetCoordinates() 3370 followed by DMGetCoordinatesLocal(). This is intended to enable the 3371 setting of ghost coordinates outside of the domain. 3372 3373 Level: intermediate 3374 3375 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3376 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM() 3377 @*/ 3378 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c) 3379 { 3380 PetscErrorCode ierr; 3381 3382 PetscFunctionBegin; 3383 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3384 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3385 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3386 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3387 3388 dm->coordinatesLocal = c; 3389 3390 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3391 PetscFunctionReturn(0); 3392 } 3393 3394 #undef __FUNCT__ 3395 #define __FUNCT__ "DMGetCoordinates" 3396 /*@ 3397 DMGetCoordinates - Gets a global vector with the coordinates associated with the DM. 3398 3399 Not Collective 3400 3401 Input Parameter: 3402 . dm - the DM 3403 3404 Output Parameter: 3405 . c - global coordinate vector 3406 3407 Note: 3408 This is a borrowed reference, so the user should NOT destroy this vector 3409 3410 Each process has only the local coordinates (does NOT have the ghost coordinates). 3411 3412 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3413 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3414 3415 Level: intermediate 3416 3417 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3418 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM() 3419 @*/ 3420 PetscErrorCode DMGetCoordinates(DM dm, Vec *c) 3421 { 3422 PetscErrorCode ierr; 3423 3424 PetscFunctionBegin; 3425 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3426 PetscValidPointer(c,2); 3427 if (!dm->coordinates && dm->coordinatesLocal) { 3428 DM cdm = NULL; 3429 3430 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3431 ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr); 3432 ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr); 3433 ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3434 ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3435 } 3436 *c = dm->coordinates; 3437 PetscFunctionReturn(0); 3438 } 3439 3440 #undef __FUNCT__ 3441 #define __FUNCT__ "DMGetCoordinatesLocal" 3442 /*@ 3443 DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM. 3444 3445 Collective on DM 3446 3447 Input Parameter: 3448 . dm - the DM 3449 3450 Output Parameter: 3451 . c - coordinate vector 3452 3453 Note: 3454 This is a borrowed reference, so the user should NOT destroy this vector 3455 3456 Each process has the local and ghost coordinates 3457 3458 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3459 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3460 3461 Level: intermediate 3462 3463 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3464 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM() 3465 @*/ 3466 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c) 3467 { 3468 PetscErrorCode ierr; 3469 3470 PetscFunctionBegin; 3471 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3472 PetscValidPointer(c,2); 3473 if (!dm->coordinatesLocal && dm->coordinates) { 3474 DM cdm = NULL; 3475 3476 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3477 ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr); 3478 ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr); 3479 ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3480 ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3481 } 3482 *c = dm->coordinatesLocal; 3483 PetscFunctionReturn(0); 3484 } 3485 3486 #undef __FUNCT__ 3487 #define __FUNCT__ "DMGetCoordinateDM" 3488 /*@ 3489 DMGetCoordinateDM - Gets the DM that prescribes coordinate layout and scatters between global and local coordinates 3490 3491 Collective on DM 3492 3493 Input Parameter: 3494 . dm - the DM 3495 3496 Output Parameter: 3497 . cdm - coordinate DM 3498 3499 Level: intermediate 3500 3501 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3502 .seealso: DMSetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3503 @*/ 3504 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm) 3505 { 3506 PetscErrorCode ierr; 3507 3508 PetscFunctionBegin; 3509 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3510 PetscValidPointer(cdm,2); 3511 if (!dm->coordinateDM) { 3512 if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM"); 3513 ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr); 3514 } 3515 *cdm = dm->coordinateDM; 3516 PetscFunctionReturn(0); 3517 } 3518 3519 #undef __FUNCT__ 3520 #define __FUNCT__ "DMSetCoordinateDM" 3521 /*@ 3522 DMSetCoordinateDM - Sets the DM that prescribes coordinate layout and scatters between global and local coordinates 3523 3524 Logically Collective on DM 3525 3526 Input Parameters: 3527 + dm - the DM 3528 - cdm - coordinate DM 3529 3530 Level: intermediate 3531 3532 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3533 .seealso: DMGetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3534 @*/ 3535 PetscErrorCode DMSetCoordinateDM(DM dm, DM cdm) 3536 { 3537 PetscErrorCode ierr; 3538 3539 PetscFunctionBegin; 3540 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3541 PetscValidHeaderSpecific(cdm,DM_CLASSID,2); 3542 ierr = DMDestroy(&dm->coordinateDM);CHKERRQ(ierr); 3543 dm->coordinateDM = cdm; 3544 ierr = PetscObjectReference((PetscObject) dm->coordinateDM);CHKERRQ(ierr); 3545 PetscFunctionReturn(0); 3546 } 3547 3548 #undef __FUNCT__ 3549 #define __FUNCT__ "DMGetCoordinateSection" 3550 /*@ 3551 DMGetCoordinateSection - Retrieve the layout of coordinate values over the mesh. 3552 3553 Not Collective 3554 3555 Input Parameter: 3556 . dm - The DM object 3557 3558 Output Parameter: 3559 . section - The PetscSection object 3560 3561 Level: intermediate 3562 3563 .keywords: mesh, coordinates 3564 .seealso: DMGetCoordinateDM(), DMGetDefaultSection(), DMSetDefaultSection() 3565 @*/ 3566 PetscErrorCode DMGetCoordinateSection(DM dm, PetscSection *section) 3567 { 3568 DM cdm; 3569 PetscErrorCode ierr; 3570 3571 PetscFunctionBegin; 3572 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3573 PetscValidPointer(section, 2); 3574 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3575 ierr = DMGetDefaultSection(cdm, section);CHKERRQ(ierr); 3576 PetscFunctionReturn(0); 3577 } 3578 3579 #undef __FUNCT__ 3580 #define __FUNCT__ "DMSetCoordinateSection" 3581 /*@ 3582 DMSetCoordinateSection - Set the layout of coordinate values over the mesh. 3583 3584 Not Collective 3585 3586 Input Parameters: 3587 + dm - The DM object 3588 - section - The PetscSection object 3589 3590 Level: intermediate 3591 3592 .keywords: mesh, coordinates 3593 .seealso: DMGetCoordinateSection(), DMGetDefaultSection(), DMSetDefaultSection() 3594 @*/ 3595 PetscErrorCode DMSetCoordinateSection(DM dm, PetscSection section) 3596 { 3597 DM cdm; 3598 PetscErrorCode ierr; 3599 3600 PetscFunctionBegin; 3601 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3602 PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 3603 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3604 ierr = DMSetDefaultSection(cdm, section);CHKERRQ(ierr); 3605 PetscFunctionReturn(0); 3606 } 3607 3608 #undef __FUNCT__ 3609 #define __FUNCT__ "DMLocatePoints" 3610 /*@ 3611 DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells 3612 3613 Not collective 3614 3615 Input Parameters: 3616 + dm - The DM 3617 - v - The Vec of points 3618 3619 Output Parameter: 3620 . cells - The local cell numbers for cells which contain the points 3621 3622 Level: developer 3623 3624 .keywords: point location, mesh 3625 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3626 @*/ 3627 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells) 3628 { 3629 PetscErrorCode ierr; 3630 3631 PetscFunctionBegin; 3632 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3633 PetscValidHeaderSpecific(v,VEC_CLASSID,2); 3634 PetscValidPointer(cells,3); 3635 if (dm->ops->locatepoints) { 3636 ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr); 3637 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM"); 3638 PetscFunctionReturn(0); 3639 } 3640 3641 #undef __FUNCT__ 3642 #define __FUNCT__ "DMGetOutputDM" 3643 /*@ 3644 DMGetOutputDM - Retrieve the DM associated with the layout for output 3645 3646 Input Parameter: 3647 . dm - The original DM 3648 3649 Output Parameter: 3650 . odm - The DM which provides the layout for output 3651 3652 Level: intermediate 3653 3654 .seealso: VecView(), DMGetDefaultGlobalSection() 3655 @*/ 3656 PetscErrorCode DMGetOutputDM(DM dm, DM *odm) 3657 { 3658 PetscSection section; 3659 PetscBool hasConstraints; 3660 PetscErrorCode ierr; 3661 3662 PetscFunctionBegin; 3663 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3664 PetscValidPointer(odm,2); 3665 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 3666 ierr = PetscSectionHasConstraints(section, &hasConstraints);CHKERRQ(ierr); 3667 if (!hasConstraints) { 3668 *odm = dm; 3669 PetscFunctionReturn(0); 3670 } 3671 if (!dm->dmBC) { 3672 PetscSection newSection, gsection; 3673 PetscSF sf; 3674 3675 ierr = DMClone(dm, &dm->dmBC);CHKERRQ(ierr); 3676 ierr = PetscSectionClone(section, &newSection);CHKERRQ(ierr); 3677 ierr = DMSetDefaultSection(dm->dmBC, newSection);CHKERRQ(ierr); 3678 ierr = PetscSectionDestroy(&newSection);CHKERRQ(ierr); 3679 ierr = DMGetPointSF(dm->dmBC, &sf);CHKERRQ(ierr); 3680 ierr = PetscSectionCreateGlobalSection(section, sf, PETSC_TRUE, &gsection);CHKERRQ(ierr); 3681 ierr = DMSetDefaultGlobalSection(dm->dmBC, gsection);CHKERRQ(ierr); 3682 ierr = PetscSectionDestroy(&gsection);CHKERRQ(ierr); 3683 } 3684 *odm = dm->dmBC; 3685 PetscFunctionReturn(0); 3686 } 3687 3688 #undef __FUNCT__ 3689 #define __FUNCT__ "DMGetOutputSequenceNumber" 3690 /*@ 3691 DMGetOutputSequenceNumber - Retrieve the sequence number for output 3692 3693 Input Parameter: 3694 . dm - The original DM 3695 3696 Output Parameter: 3697 . num - The output sequence number 3698 3699 Level: intermediate 3700 3701 Note: This is intended for output that should appear in sequence, for instance 3702 a set of timesteps in an HDF5 file, or a set of realizations of a stochastic system. 3703 3704 .seealso: VecView() 3705 @*/ 3706 PetscErrorCode DMGetOutputSequenceNumber(DM dm, PetscInt *num) 3707 { 3708 PetscFunctionBegin; 3709 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3710 PetscValidPointer(num,2); 3711 *num = dm->outputSequenceNum; 3712 PetscFunctionReturn(0); 3713 } 3714 3715 #undef __FUNCT__ 3716 #define __FUNCT__ "DMSetOutputSequenceNumber" 3717 /*@ 3718 DMSetOutputSequenceNumber - Set the sequence number for output 3719 3720 Input Parameters: 3721 + dm - The original DM 3722 - num - The output sequence number 3723 3724 Level: intermediate 3725 3726 Note: This is intended for output that should appear in sequence, for instance 3727 a set of timesteps in an HDF5 file, or a set of realizations of a stochastic system. 3728 3729 .seealso: VecView() 3730 @*/ 3731 PetscErrorCode DMSetOutputSequenceNumber(DM dm, PetscInt num) 3732 { 3733 PetscFunctionBegin; 3734 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3735 dm->outputSequenceNum = num; 3736 PetscFunctionReturn(0); 3737 } 3738