1 #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ 2 #include <petscsf.h> 3 4 PetscClassId DM_CLASSID; 5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal, DM_LocalToLocal; 6 7 const char *const DMBoundaryTypes[] = {"NONE","GHOSTED","MIRROR","PERIODIC","TWIST","DM_BOUNDARY_",0}; 8 9 #undef __FUNCT__ 10 #define __FUNCT__ "DMCreate" 11 /*@ 12 DMCreate - Creates an empty DM object. The type can then be set with DMSetType(). 13 14 If you never call DMSetType() it will generate an 15 error when you try to use the vector. 16 17 Collective on MPI_Comm 18 19 Input Parameter: 20 . comm - The communicator for the DM object 21 22 Output Parameter: 23 . dm - The DM object 24 25 Level: beginner 26 27 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE 28 @*/ 29 PetscErrorCode DMCreate(MPI_Comm comm,DM *dm) 30 { 31 DM v; 32 PetscErrorCode ierr; 33 34 PetscFunctionBegin; 35 PetscValidPointer(dm,2); 36 *dm = NULL; 37 ierr = PetscSysInitializePackage();CHKERRQ(ierr); 38 ierr = VecInitializePackage();CHKERRQ(ierr); 39 ierr = MatInitializePackage();CHKERRQ(ierr); 40 ierr = DMInitializePackage();CHKERRQ(ierr); 41 42 ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr); 43 ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr); 44 45 46 v->ltogmap = NULL; 47 v->ltogmapb = NULL; 48 v->bs = 1; 49 v->coloringtype = IS_COLORING_GLOBAL; 50 ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr); 51 ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr); 52 v->defaultSection = NULL; 53 v->defaultGlobalSection = NULL; 54 { 55 PetscInt i; 56 for (i = 0; i < 10; ++i) { 57 v->nullspaceConstructors[i] = NULL; 58 } 59 } 60 v->numFields = 0; 61 v->fields = NULL; 62 v->dmBC = NULL; 63 v->outputSequenceNum = -1; 64 ierr = DMSetVecType(v,VECSTANDARD);CHKERRQ(ierr); 65 ierr = DMSetMatType(v,MATAIJ);CHKERRQ(ierr); 66 *dm = v; 67 PetscFunctionReturn(0); 68 } 69 70 #undef __FUNCT__ 71 #define __FUNCT__ "DMClone" 72 /*@ 73 DMClone - Creates a DM object with the same topology as the original. 74 75 Collective on MPI_Comm 76 77 Input Parameter: 78 . dm - The original DM object 79 80 Output Parameter: 81 . newdm - The new DM object 82 83 Level: beginner 84 85 .keywords: DM, topology, create 86 @*/ 87 PetscErrorCode DMClone(DM dm, DM *newdm) 88 { 89 PetscSF sf; 90 Vec coords; 91 void *ctx; 92 PetscErrorCode ierr; 93 94 PetscFunctionBegin; 95 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 96 PetscValidPointer(newdm,2); 97 ierr = DMCreate(PetscObjectComm((PetscObject)dm), newdm);CHKERRQ(ierr); 98 if (dm->ops->clone) { 99 ierr = (*dm->ops->clone)(dm, newdm);CHKERRQ(ierr); 100 } 101 (*newdm)->setupcalled = PETSC_TRUE; 102 ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr); 103 ierr = DMSetPointSF(*newdm, sf);CHKERRQ(ierr); 104 ierr = DMGetApplicationContext(dm, &ctx);CHKERRQ(ierr); 105 ierr = DMSetApplicationContext(*newdm, ctx);CHKERRQ(ierr); 106 ierr = DMGetCoordinatesLocal(dm, &coords);CHKERRQ(ierr); 107 if (coords) { 108 ierr = DMSetCoordinatesLocal(*newdm, coords);CHKERRQ(ierr); 109 } else { 110 ierr = DMGetCoordinates(dm, &coords);CHKERRQ(ierr); 111 if (coords) {ierr = DMSetCoordinates(*newdm, coords);CHKERRQ(ierr);} 112 } 113 PetscFunctionReturn(0); 114 } 115 116 #undef __FUNCT__ 117 #define __FUNCT__ "DMSetVecType" 118 /*@C 119 DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 120 121 Logically Collective on DMDA 122 123 Input Parameter: 124 + da - initial distributed array 125 . ctype - the vector type, currently either VECSTANDARD or VECCUSP 126 127 Options Database: 128 . -dm_vec_type ctype 129 130 Level: intermediate 131 132 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType, DMGetVecType() 133 @*/ 134 PetscErrorCode DMSetVecType(DM da,VecType ctype) 135 { 136 PetscErrorCode ierr; 137 138 PetscFunctionBegin; 139 PetscValidHeaderSpecific(da,DM_CLASSID,1); 140 ierr = PetscFree(da->vectype);CHKERRQ(ierr); 141 ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr); 142 PetscFunctionReturn(0); 143 } 144 145 #undef __FUNCT__ 146 #define __FUNCT__ "DMGetVecType" 147 /*@C 148 DMGetVecType - Gets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 149 150 Logically Collective on DMDA 151 152 Input Parameter: 153 . da - initial distributed array 154 155 Output Parameter: 156 . ctype - the vector type 157 158 Level: intermediate 159 160 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType 161 @*/ 162 PetscErrorCode DMGetVecType(DM da,VecType *ctype) 163 { 164 PetscFunctionBegin; 165 PetscValidHeaderSpecific(da,DM_CLASSID,1); 166 *ctype = da->vectype; 167 PetscFunctionReturn(0); 168 } 169 170 #undef __FUNCT__ 171 #define __FUNCT__ "VecGetDM" 172 /*@ 173 VecGetDM - Gets the DM defining the data layout of the vector 174 175 Not collective 176 177 Input Parameter: 178 . v - The Vec 179 180 Output Parameter: 181 . dm - The DM 182 183 Level: intermediate 184 185 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 186 @*/ 187 PetscErrorCode VecGetDM(Vec v, DM *dm) 188 { 189 PetscErrorCode ierr; 190 191 PetscFunctionBegin; 192 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 193 PetscValidPointer(dm,2); 194 ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 195 PetscFunctionReturn(0); 196 } 197 198 #undef __FUNCT__ 199 #define __FUNCT__ "VecSetDM" 200 /*@ 201 VecSetDM - Sets the DM defining the data layout of the vector 202 203 Not collective 204 205 Input Parameters: 206 + v - The Vec 207 - dm - The DM 208 209 Level: intermediate 210 211 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 212 @*/ 213 PetscErrorCode VecSetDM(Vec v, DM dm) 214 { 215 PetscErrorCode ierr; 216 217 PetscFunctionBegin; 218 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 219 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 220 ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 221 PetscFunctionReturn(0); 222 } 223 224 #undef __FUNCT__ 225 #define __FUNCT__ "DMSetMatType" 226 /*@C 227 DMSetMatType - Sets the type of matrix created with DMCreateMatrix() 228 229 Logically Collective on DM 230 231 Input Parameter: 232 + dm - the DM context 233 . ctype - the matrix type 234 235 Options Database: 236 . -dm_mat_type ctype 237 238 Level: intermediate 239 240 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMGetMatType() 241 @*/ 242 PetscErrorCode DMSetMatType(DM dm,MatType ctype) 243 { 244 PetscErrorCode ierr; 245 246 PetscFunctionBegin; 247 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 248 ierr = PetscFree(dm->mattype);CHKERRQ(ierr); 249 ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr); 250 PetscFunctionReturn(0); 251 } 252 253 #undef __FUNCT__ 254 #define __FUNCT__ "DMGetMatType" 255 /*@C 256 DMGetMatType - Gets the type of matrix created with DMCreateMatrix() 257 258 Logically Collective on DM 259 260 Input Parameter: 261 . dm - the DM context 262 263 Output Parameter: 264 . ctype - the matrix type 265 266 Options Database: 267 . -dm_mat_type ctype 268 269 Level: intermediate 270 271 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMSetMatType() 272 @*/ 273 PetscErrorCode DMGetMatType(DM dm,MatType *ctype) 274 { 275 PetscFunctionBegin; 276 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 277 *ctype = dm->mattype; 278 PetscFunctionReturn(0); 279 } 280 281 #undef __FUNCT__ 282 #define __FUNCT__ "MatGetDM" 283 /*@ 284 MatGetDM - Gets the DM defining the data layout of the matrix 285 286 Not collective 287 288 Input Parameter: 289 . A - The Mat 290 291 Output Parameter: 292 . dm - The DM 293 294 Level: intermediate 295 296 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType() 297 @*/ 298 PetscErrorCode MatGetDM(Mat A, DM *dm) 299 { 300 PetscErrorCode ierr; 301 302 PetscFunctionBegin; 303 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 304 PetscValidPointer(dm,2); 305 ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 306 PetscFunctionReturn(0); 307 } 308 309 #undef __FUNCT__ 310 #define __FUNCT__ "MatSetDM" 311 /*@ 312 MatSetDM - Sets the DM defining the data layout of the matrix 313 314 Not collective 315 316 Input Parameters: 317 + A - The Mat 318 - dm - The DM 319 320 Level: intermediate 321 322 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType() 323 @*/ 324 PetscErrorCode MatSetDM(Mat A, DM dm) 325 { 326 PetscErrorCode ierr; 327 328 PetscFunctionBegin; 329 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 330 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 331 ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 332 PetscFunctionReturn(0); 333 } 334 335 #undef __FUNCT__ 336 #define __FUNCT__ "DMSetOptionsPrefix" 337 /*@C 338 DMSetOptionsPrefix - Sets the prefix used for searching for all 339 DMDA options in the database. 340 341 Logically Collective on DMDA 342 343 Input Parameter: 344 + da - the DMDA context 345 - prefix - the prefix to prepend to all option names 346 347 Notes: 348 A hyphen (-) must NOT be given at the beginning of the prefix name. 349 The first character of all runtime options is AUTOMATICALLY the hyphen. 350 351 Level: advanced 352 353 .keywords: DMDA, set, options, prefix, database 354 355 .seealso: DMSetFromOptions() 356 @*/ 357 PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[]) 358 { 359 PetscErrorCode ierr; 360 361 PetscFunctionBegin; 362 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 363 ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr); 364 PetscFunctionReturn(0); 365 } 366 367 #undef __FUNCT__ 368 #define __FUNCT__ "DMDestroy" 369 /*@ 370 DMDestroy - Destroys a vector packer or DMDA. 371 372 Collective on DM 373 374 Input Parameter: 375 . dm - the DM object to destroy 376 377 Level: developer 378 379 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 380 381 @*/ 382 PetscErrorCode DMDestroy(DM *dm) 383 { 384 PetscInt i, cnt = 0, f; 385 DMNamedVecLink nlink,nnext; 386 PetscErrorCode ierr; 387 388 PetscFunctionBegin; 389 if (!*dm) PetscFunctionReturn(0); 390 PetscValidHeaderSpecific((*dm),DM_CLASSID,1); 391 392 /* I think it makes sense to dump all attached things when you are destroyed, which also eliminates circular references */ 393 for (f = 0; f < (*dm)->numFields; ++f) { 394 ierr = PetscObjectCompose((PetscObject) (*dm)->fields[f], "pmat", NULL);CHKERRQ(ierr); 395 ierr = PetscObjectCompose((PetscObject) (*dm)->fields[f], "nullspace", NULL);CHKERRQ(ierr); 396 ierr = PetscObjectCompose((PetscObject) (*dm)->fields[f], "nearnullspace", NULL);CHKERRQ(ierr); 397 } 398 /* count all the circular references of DM and its contained Vecs */ 399 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 400 if ((*dm)->localin[i]) cnt++; 401 if ((*dm)->globalin[i]) cnt++; 402 } 403 for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++; 404 for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++; 405 if ((*dm)->x) { 406 DM obj; 407 ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr); 408 if (obj == *dm) cnt++; 409 } 410 411 if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);} 412 /* 413 Need this test because the dm references the vectors that 414 reference the dm, so destroying the dm calls destroy on the 415 vectors that cause another destroy on the dm 416 */ 417 if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0); 418 ((PetscObject) (*dm))->refct = 0; 419 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 420 if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()"); 421 ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr); 422 } 423 nnext=(*dm)->namedglobal; 424 (*dm)->namedglobal = NULL; 425 for (nlink=nnext; nlink; nlink=nnext) { /* Destroy the named vectors */ 426 nnext = nlink->next; 427 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 428 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 429 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 430 ierr = PetscFree(nlink);CHKERRQ(ierr); 431 } 432 nnext=(*dm)->namedlocal; 433 (*dm)->namedlocal = NULL; 434 for (nlink=nnext; nlink; nlink=nnext) { /* Destroy the named local vectors */ 435 nnext = nlink->next; 436 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 437 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 438 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 439 ierr = PetscFree(nlink);CHKERRQ(ierr); 440 } 441 442 /* Destroy the list of hooks */ 443 { 444 DMCoarsenHookLink link,next; 445 for (link=(*dm)->coarsenhook; link; link=next) { 446 next = link->next; 447 ierr = PetscFree(link);CHKERRQ(ierr); 448 } 449 (*dm)->coarsenhook = NULL; 450 } 451 { 452 DMRefineHookLink link,next; 453 for (link=(*dm)->refinehook; link; link=next) { 454 next = link->next; 455 ierr = PetscFree(link);CHKERRQ(ierr); 456 } 457 (*dm)->refinehook = NULL; 458 } 459 { 460 DMSubDomainHookLink link,next; 461 for (link=(*dm)->subdomainhook; link; link=next) { 462 next = link->next; 463 ierr = PetscFree(link);CHKERRQ(ierr); 464 } 465 (*dm)->subdomainhook = NULL; 466 } 467 { 468 DMGlobalToLocalHookLink link,next; 469 for (link=(*dm)->gtolhook; link; link=next) { 470 next = link->next; 471 ierr = PetscFree(link);CHKERRQ(ierr); 472 } 473 (*dm)->gtolhook = NULL; 474 } 475 /* Destroy the work arrays */ 476 { 477 DMWorkLink link,next; 478 if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out"); 479 for (link=(*dm)->workin; link; link=next) { 480 next = link->next; 481 ierr = PetscFree(link->mem);CHKERRQ(ierr); 482 ierr = PetscFree(link);CHKERRQ(ierr); 483 } 484 (*dm)->workin = NULL; 485 } 486 487 ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr); 488 ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr); 489 ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr); 490 491 if ((*dm)->ctx && (*dm)->ctxdestroy) { 492 ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr); 493 } 494 ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr); 495 ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr); 496 ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr); 497 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr); 498 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr); 499 ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr); 500 ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr); 501 502 ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr); 503 ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr); 504 ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr); 505 ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr); 506 ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr); 507 508 ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr); 509 ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr); 510 ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr); 511 512 for (f = 0; f < (*dm)->numFields; ++f) { 513 ierr = PetscObjectDestroy((PetscObject *) &(*dm)->fields[f]);CHKERRQ(ierr); 514 } 515 ierr = PetscFree((*dm)->fields);CHKERRQ(ierr); 516 ierr = DMDestroy(&(*dm)->dmBC);CHKERRQ(ierr); 517 /* if memory was published with SAWs then destroy it */ 518 ierr = PetscObjectSAWsViewOff((PetscObject)*dm);CHKERRQ(ierr); 519 520 ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr); 521 /* We do not destroy (*dm)->data here so that we can reference count backend objects */ 522 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 523 PetscFunctionReturn(0); 524 } 525 526 #undef __FUNCT__ 527 #define __FUNCT__ "DMSetUp" 528 /*@ 529 DMSetUp - sets up the data structures inside a DM object 530 531 Collective on DM 532 533 Input Parameter: 534 . dm - the DM object to setup 535 536 Level: developer 537 538 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 539 540 @*/ 541 PetscErrorCode DMSetUp(DM dm) 542 { 543 PetscErrorCode ierr; 544 545 PetscFunctionBegin; 546 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 547 if (dm->setupcalled) PetscFunctionReturn(0); 548 if (dm->ops->setup) { 549 ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr); 550 } 551 dm->setupcalled = PETSC_TRUE; 552 PetscFunctionReturn(0); 553 } 554 555 #undef __FUNCT__ 556 #define __FUNCT__ "DMSetFromOptions" 557 /*@ 558 DMSetFromOptions - sets parameters in a DM from the options database 559 560 Collective on DM 561 562 Input Parameter: 563 . dm - the DM object to set options for 564 565 Options Database: 566 + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros 567 . -dm_vec_type <type> type of vector to create inside DM 568 . -dm_mat_type <type> type of matrix to create inside DM 569 - -dm_coloring_type <global or ghosted> 570 571 Level: developer 572 573 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 574 575 @*/ 576 PetscErrorCode DMSetFromOptions(DM dm) 577 { 578 char typeName[256]; 579 PetscBool flg; 580 PetscErrorCode ierr; 581 582 PetscFunctionBegin; 583 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 584 ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr); 585 ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr); 586 ierr = PetscOptionsFList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr); 587 if (flg) { 588 ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr); 589 } 590 ierr = PetscOptionsFList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr); 591 if (flg) { 592 ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr); 593 } 594 ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr); 595 if (dm->ops->setfromoptions) { 596 ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr); 597 } 598 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 599 ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr); 600 ierr = PetscOptionsEnd();CHKERRQ(ierr); 601 PetscFunctionReturn(0); 602 } 603 604 #undef __FUNCT__ 605 #define __FUNCT__ "DMView" 606 /*@C 607 DMView - Views a vector packer or DMDA. 608 609 Collective on DM 610 611 Input Parameter: 612 + dm - the DM object to view 613 - v - the viewer 614 615 Level: developer 616 617 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 618 619 @*/ 620 PetscErrorCode DMView(DM dm,PetscViewer v) 621 { 622 PetscErrorCode ierr; 623 PetscBool isbinary; 624 625 PetscFunctionBegin; 626 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 627 if (!v) { 628 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr); 629 } 630 ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 631 if (isbinary) { 632 PetscInt classid = DM_FILE_CLASSID; 633 char type[256]; 634 635 ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr); 636 ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr); 637 ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr); 638 } 639 if (dm->ops->view) { 640 ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr); 641 } 642 PetscFunctionReturn(0); 643 } 644 645 #undef __FUNCT__ 646 #define __FUNCT__ "DMCreateGlobalVector" 647 /*@ 648 DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object 649 650 Collective on DM 651 652 Input Parameter: 653 . dm - the DM object 654 655 Output Parameter: 656 . vec - the global vector 657 658 Level: beginner 659 660 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 661 662 @*/ 663 PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec) 664 { 665 PetscErrorCode ierr; 666 667 PetscFunctionBegin; 668 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 669 ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr); 670 PetscFunctionReturn(0); 671 } 672 673 #undef __FUNCT__ 674 #define __FUNCT__ "DMCreateLocalVector" 675 /*@ 676 DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object 677 678 Not Collective 679 680 Input Parameter: 681 . dm - the DM object 682 683 Output Parameter: 684 . vec - the local vector 685 686 Level: beginner 687 688 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 689 690 @*/ 691 PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec) 692 { 693 PetscErrorCode ierr; 694 695 PetscFunctionBegin; 696 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 697 ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr); 698 PetscFunctionReturn(0); 699 } 700 701 #undef __FUNCT__ 702 #define __FUNCT__ "DMGetLocalToGlobalMapping" 703 /*@ 704 DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM. 705 706 Collective on DM 707 708 Input Parameter: 709 . dm - the DM that provides the mapping 710 711 Output Parameter: 712 . ltog - the mapping 713 714 Level: intermediate 715 716 Notes: 717 This mapping can then be used by VecSetLocalToGlobalMapping() or 718 MatSetLocalToGlobalMapping(). 719 720 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock() 721 @*/ 722 PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog) 723 { 724 PetscErrorCode ierr; 725 726 PetscFunctionBegin; 727 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 728 PetscValidPointer(ltog,2); 729 if (!dm->ltogmap) { 730 PetscSection section, sectionGlobal; 731 732 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 733 if (section) { 734 PetscInt *ltog; 735 PetscInt pStart, pEnd, size, p, l; 736 737 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 738 ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr); 739 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 740 ierr = PetscMalloc1(size, <og);CHKERRQ(ierr); /* We want the local+overlap size */ 741 for (p = pStart, l = 0; p < pEnd; ++p) { 742 PetscInt dof, off, c; 743 744 /* Should probably use constrained dofs */ 745 ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr); 746 ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr); 747 for (c = 0; c < dof; ++c, ++l) { 748 ltog[l] = off+c; 749 } 750 } 751 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr); 752 ierr = PetscLogObjectParent((PetscObject)dm, (PetscObject)dm->ltogmap);CHKERRQ(ierr); 753 } else { 754 if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping"); 755 ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr); 756 } 757 } 758 *ltog = dm->ltogmap; 759 PetscFunctionReturn(0); 760 } 761 762 #undef __FUNCT__ 763 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock" 764 /*@ 765 DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM. 766 767 Collective on DM 768 769 Input Parameter: 770 . da - the distributed array that provides the mapping 771 772 Output Parameter: 773 . ltog - the block mapping 774 775 Level: intermediate 776 777 Notes: 778 This mapping can then be used by VecSetLocalToGlobalMappingBlock() or 779 MatSetLocalToGlobalMappingBlock(). 780 781 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize() 782 @*/ 783 PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog) 784 { 785 PetscErrorCode ierr; 786 787 PetscFunctionBegin; 788 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 789 PetscValidPointer(ltog,2); 790 if (!dm->ltogmapb) { 791 PetscInt bs; 792 ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr); 793 if (bs > 1) { 794 if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock"); 795 ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr); 796 } else { 797 ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr); 798 ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr); 799 } 800 } 801 *ltog = dm->ltogmapb; 802 PetscFunctionReturn(0); 803 } 804 805 #undef __FUNCT__ 806 #define __FUNCT__ "DMGetBlockSize" 807 /*@ 808 DMGetBlockSize - Gets the inherent block size associated with a DM 809 810 Not Collective 811 812 Input Parameter: 813 . dm - the DM with block structure 814 815 Output Parameter: 816 . bs - the block size, 1 implies no exploitable block structure 817 818 Level: intermediate 819 820 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock() 821 @*/ 822 PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs) 823 { 824 PetscFunctionBegin; 825 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 826 PetscValidPointer(bs,2); 827 if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet"); 828 *bs = dm->bs; 829 PetscFunctionReturn(0); 830 } 831 832 #undef __FUNCT__ 833 #define __FUNCT__ "DMCreateInterpolation" 834 /*@ 835 DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects 836 837 Collective on DM 838 839 Input Parameter: 840 + dm1 - the DM object 841 - dm2 - the second, finer DM object 842 843 Output Parameter: 844 + mat - the interpolation 845 - vec - the scaling (optional) 846 847 Level: developer 848 849 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 850 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation. 851 852 For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors 853 EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic. 854 855 856 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen() 857 858 @*/ 859 PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 860 { 861 PetscErrorCode ierr; 862 863 PetscFunctionBegin; 864 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 865 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 866 ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr); 867 PetscFunctionReturn(0); 868 } 869 870 #undef __FUNCT__ 871 #define __FUNCT__ "DMCreateInjection" 872 /*@ 873 DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects 874 875 Collective on DM 876 877 Input Parameter: 878 + dm1 - the DM object 879 - dm2 - the second, finer DM object 880 881 Output Parameter: 882 . ctx - the injection 883 884 Level: developer 885 886 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 887 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection. 888 889 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation() 890 891 @*/ 892 PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx) 893 { 894 PetscErrorCode ierr; 895 896 PetscFunctionBegin; 897 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 898 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 899 ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr); 900 PetscFunctionReturn(0); 901 } 902 903 #undef __FUNCT__ 904 #define __FUNCT__ "DMCreateColoring" 905 /*@ 906 DMCreateColoring - Gets coloring for a DM 907 908 Collective on DM 909 910 Input Parameter: 911 + dm - the DM object 912 - ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL 913 914 Output Parameter: 915 . coloring - the coloring 916 917 Level: developer 918 919 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix(), DMSetMatType() 920 921 @*/ 922 PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,ISColoring *coloring) 923 { 924 PetscErrorCode ierr; 925 926 PetscFunctionBegin; 927 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 928 if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet"); 929 ierr = (*dm->ops->getcoloring)(dm,ctype,coloring);CHKERRQ(ierr); 930 PetscFunctionReturn(0); 931 } 932 933 #undef __FUNCT__ 934 #define __FUNCT__ "DMCreateMatrix" 935 /*@ 936 DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite 937 938 Collective on DM 939 940 Input Parameter: 941 . dm - the DM object 942 943 Output Parameter: 944 . mat - the empty Jacobian 945 946 Level: beginner 947 948 Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 949 do not need to do it yourself. 950 951 By default it also sets the nonzero structure and puts in the zero entries. To prevent setting 952 the nonzero pattern call DMDASetMatPreallocateOnly() 953 954 For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used 955 internally by PETSc. 956 957 For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires 958 the indices for the global numbering for DMDAs which is complicated. 959 960 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMSetMatType() 961 962 @*/ 963 PetscErrorCode DMCreateMatrix(DM dm,Mat *mat) 964 { 965 PetscErrorCode ierr; 966 967 PetscFunctionBegin; 968 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 969 ierr = MatInitializePackage();CHKERRQ(ierr); 970 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 971 PetscValidPointer(mat,3); 972 ierr = (*dm->ops->creatematrix)(dm,mat);CHKERRQ(ierr); 973 PetscFunctionReturn(0); 974 } 975 976 #undef __FUNCT__ 977 #define __FUNCT__ "DMSetMatrixPreallocateOnly" 978 /*@ 979 DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly 980 preallocated but the nonzero structure and zero values will not be set. 981 982 Logically Collective on DMDA 983 984 Input Parameter: 985 + dm - the DM 986 - only - PETSC_TRUE if only want preallocation 987 988 Level: developer 989 .seealso DMCreateMatrix() 990 @*/ 991 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only) 992 { 993 PetscFunctionBegin; 994 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 995 dm->prealloc_only = only; 996 PetscFunctionReturn(0); 997 } 998 999 #undef __FUNCT__ 1000 #define __FUNCT__ "DMGetWorkArray" 1001 /*@C 1002 DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 1003 1004 Not Collective 1005 1006 Input Parameters: 1007 + dm - the DM object 1008 . count - The minium size 1009 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 1010 1011 Output Parameter: 1012 . array - the work array 1013 1014 Level: developer 1015 1016 .seealso DMDestroy(), DMCreate() 1017 @*/ 1018 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1019 { 1020 PetscErrorCode ierr; 1021 DMWorkLink link; 1022 size_t size; 1023 1024 PetscFunctionBegin; 1025 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1026 PetscValidPointer(mem,4); 1027 if (dm->workin) { 1028 link = dm->workin; 1029 dm->workin = dm->workin->next; 1030 } else { 1031 ierr = PetscNewLog(dm,&link);CHKERRQ(ierr); 1032 } 1033 ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr); 1034 if (size*count > link->bytes) { 1035 ierr = PetscFree(link->mem);CHKERRQ(ierr); 1036 ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr); 1037 link->bytes = size*count; 1038 } 1039 link->next = dm->workout; 1040 dm->workout = link; 1041 *(void**)mem = link->mem; 1042 PetscFunctionReturn(0); 1043 } 1044 1045 #undef __FUNCT__ 1046 #define __FUNCT__ "DMRestoreWorkArray" 1047 /*@C 1048 DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 1049 1050 Not Collective 1051 1052 Input Parameters: 1053 + dm - the DM object 1054 . count - The minium size 1055 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 1056 1057 Output Parameter: 1058 . array - the work array 1059 1060 Level: developer 1061 1062 .seealso DMDestroy(), DMCreate() 1063 @*/ 1064 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1065 { 1066 DMWorkLink *p,link; 1067 1068 PetscFunctionBegin; 1069 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1070 PetscValidPointer(mem,4); 1071 for (p=&dm->workout; (link=*p); p=&link->next) { 1072 if (link->mem == *(void**)mem) { 1073 *p = link->next; 1074 link->next = dm->workin; 1075 dm->workin = link; 1076 *(void**)mem = NULL; 1077 PetscFunctionReturn(0); 1078 } 1079 } 1080 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out"); 1081 PetscFunctionReturn(0); 1082 } 1083 1084 #undef __FUNCT__ 1085 #define __FUNCT__ "DMSetNullSpaceConstructor" 1086 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace)) 1087 { 1088 PetscFunctionBegin; 1089 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1090 if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field); 1091 dm->nullspaceConstructors[field] = nullsp; 1092 PetscFunctionReturn(0); 1093 } 1094 1095 #undef __FUNCT__ 1096 #define __FUNCT__ "DMCreateFieldIS" 1097 /*@C 1098 DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field 1099 1100 Not collective 1101 1102 Input Parameter: 1103 . dm - the DM object 1104 1105 Output Parameters: 1106 + numFields - The number of fields (or NULL if not requested) 1107 . fieldNames - The name for each field (or NULL if not requested) 1108 - fields - The global indices for each field (or NULL if not requested) 1109 1110 Level: intermediate 1111 1112 Notes: 1113 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1114 PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with 1115 PetscFree(). 1116 1117 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 1118 @*/ 1119 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields) 1120 { 1121 PetscSection section, sectionGlobal; 1122 PetscErrorCode ierr; 1123 1124 PetscFunctionBegin; 1125 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1126 if (numFields) { 1127 PetscValidPointer(numFields,2); 1128 *numFields = 0; 1129 } 1130 if (fieldNames) { 1131 PetscValidPointer(fieldNames,3); 1132 *fieldNames = NULL; 1133 } 1134 if (fields) { 1135 PetscValidPointer(fields,4); 1136 *fields = NULL; 1137 } 1138 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1139 if (section) { 1140 PetscInt *fieldSizes, **fieldIndices; 1141 PetscInt nF, f, pStart, pEnd, p; 1142 1143 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 1144 ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr); 1145 ierr = PetscMalloc2(nF,&fieldSizes,nF,&fieldIndices);CHKERRQ(ierr); 1146 ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr); 1147 for (f = 0; f < nF; ++f) { 1148 fieldSizes[f] = 0; 1149 } 1150 for (p = pStart; p < pEnd; ++p) { 1151 PetscInt gdof; 1152 1153 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1154 if (gdof > 0) { 1155 for (f = 0; f < nF; ++f) { 1156 PetscInt fdof, fcdof; 1157 1158 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1159 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1160 fieldSizes[f] += fdof-fcdof; 1161 } 1162 } 1163 } 1164 for (f = 0; f < nF; ++f) { 1165 ierr = PetscMalloc1(fieldSizes[f], &fieldIndices[f]);CHKERRQ(ierr); 1166 fieldSizes[f] = 0; 1167 } 1168 for (p = pStart; p < pEnd; ++p) { 1169 PetscInt gdof, goff; 1170 1171 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1172 if (gdof > 0) { 1173 ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr); 1174 for (f = 0; f < nF; ++f) { 1175 PetscInt fdof, fcdof, fc; 1176 1177 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1178 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1179 for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) { 1180 fieldIndices[f][fieldSizes[f]] = goff++; 1181 } 1182 } 1183 } 1184 } 1185 if (numFields) *numFields = nF; 1186 if (fieldNames) { 1187 ierr = PetscMalloc1(nF, fieldNames);CHKERRQ(ierr); 1188 for (f = 0; f < nF; ++f) { 1189 const char *fieldName; 1190 1191 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1192 ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr); 1193 } 1194 } 1195 if (fields) { 1196 ierr = PetscMalloc1(nF, fields);CHKERRQ(ierr); 1197 for (f = 0; f < nF; ++f) { 1198 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr); 1199 } 1200 } 1201 ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr); 1202 } else if (dm->ops->createfieldis) { 1203 ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr); 1204 } 1205 PetscFunctionReturn(0); 1206 } 1207 1208 1209 #undef __FUNCT__ 1210 #define __FUNCT__ "DMCreateFieldDecomposition" 1211 /*@C 1212 DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems 1213 corresponding to different fields: each IS contains the global indices of the dofs of the 1214 corresponding field. The optional list of DMs define the DM for each subproblem. 1215 Generalizes DMCreateFieldIS(). 1216 1217 Not collective 1218 1219 Input Parameter: 1220 . dm - the DM object 1221 1222 Output Parameters: 1223 + len - The number of subproblems in the field decomposition (or NULL if not requested) 1224 . namelist - The name for each field (or NULL if not requested) 1225 . islist - The global indices for each field (or NULL if not requested) 1226 - dmlist - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1227 1228 Level: intermediate 1229 1230 Notes: 1231 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1232 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1233 and all of the arrays should be freed with PetscFree(). 1234 1235 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1236 @*/ 1237 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) 1238 { 1239 PetscErrorCode ierr; 1240 1241 PetscFunctionBegin; 1242 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1243 if (len) { 1244 PetscValidPointer(len,2); 1245 *len = 0; 1246 } 1247 if (namelist) { 1248 PetscValidPointer(namelist,3); 1249 *namelist = 0; 1250 } 1251 if (islist) { 1252 PetscValidPointer(islist,4); 1253 *islist = 0; 1254 } 1255 if (dmlist) { 1256 PetscValidPointer(dmlist,5); 1257 *dmlist = 0; 1258 } 1259 /* 1260 Is it a good idea to apply the following check across all impls? 1261 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1262 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1263 */ 1264 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1265 if (!dm->ops->createfielddecomposition) { 1266 PetscSection section; 1267 PetscInt numFields, f; 1268 1269 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1270 if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);} 1271 if (section && numFields && dm->ops->createsubdm) { 1272 *len = numFields; 1273 if (namelist) {ierr = PetscMalloc1(numFields,namelist);CHKERRQ(ierr);} 1274 if (islist) {ierr = PetscMalloc1(numFields,islist);CHKERRQ(ierr);} 1275 if (dmlist) {ierr = PetscMalloc1(numFields,dmlist);CHKERRQ(ierr);} 1276 for (f = 0; f < numFields; ++f) { 1277 const char *fieldName; 1278 1279 ierr = DMCreateSubDM(dm, 1, &f, islist ? &(*islist)[f] : NULL, dmlist ? &(*dmlist)[f] : NULL);CHKERRQ(ierr); 1280 if (namelist) { 1281 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1282 ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr); 1283 } 1284 } 1285 } else { 1286 ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr); 1287 /* By default there are no DMs associated with subproblems. */ 1288 if (dmlist) *dmlist = NULL; 1289 } 1290 } else { 1291 ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr); 1292 } 1293 PetscFunctionReturn(0); 1294 } 1295 1296 #undef __FUNCT__ 1297 #define __FUNCT__ "DMCreateSubDM" 1298 /*@C 1299 DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in. 1300 The fields are defined by DMCreateFieldIS(). 1301 1302 Not collective 1303 1304 Input Parameters: 1305 + dm - the DM object 1306 . numFields - number of fields in this subproblem 1307 - len - The number of subproblems in the decomposition (or NULL if not requested) 1308 1309 Output Parameters: 1310 . is - The global indices for the subproblem 1311 - dm - The DM for the subproblem 1312 1313 Level: intermediate 1314 1315 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1316 @*/ 1317 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm) 1318 { 1319 PetscErrorCode ierr; 1320 1321 PetscFunctionBegin; 1322 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1323 PetscValidPointer(fields,3); 1324 if (is) PetscValidPointer(is,4); 1325 if (subdm) PetscValidPointer(subdm,5); 1326 if (dm->ops->createsubdm) { 1327 ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr); 1328 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined"); 1329 PetscFunctionReturn(0); 1330 } 1331 1332 1333 #undef __FUNCT__ 1334 #define __FUNCT__ "DMCreateDomainDecomposition" 1335 /*@C 1336 DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems 1337 corresponding to restrictions to pairs nested subdomains: each IS contains the global 1338 indices of the dofs of the corresponding subdomains. The inner subdomains conceptually 1339 define a nonoverlapping covering, while outer subdomains can overlap. 1340 The optional list of DMs define the DM for each subproblem. 1341 1342 Not collective 1343 1344 Input Parameter: 1345 . dm - the DM object 1346 1347 Output Parameters: 1348 + len - The number of subproblems in the domain decomposition (or NULL if not requested) 1349 . namelist - The name for each subdomain (or NULL if not requested) 1350 . innerislist - The global indices for each inner subdomain (or NULL, if not requested) 1351 . outerislist - The global indices for each outer subdomain (or NULL, if not requested) 1352 - dmlist - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1353 1354 Level: intermediate 1355 1356 Notes: 1357 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1358 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1359 and all of the arrays should be freed with PetscFree(). 1360 1361 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition() 1362 @*/ 1363 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) 1364 { 1365 PetscErrorCode ierr; 1366 DMSubDomainHookLink link; 1367 PetscInt i,l; 1368 1369 PetscFunctionBegin; 1370 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1371 if (len) {PetscValidPointer(len,2); *len = 0;} 1372 if (namelist) {PetscValidPointer(namelist,3); *namelist = NULL;} 1373 if (innerislist) {PetscValidPointer(innerislist,4); *innerislist = NULL;} 1374 if (outerislist) {PetscValidPointer(outerislist,5); *outerislist = NULL;} 1375 if (dmlist) {PetscValidPointer(dmlist,6); *dmlist = NULL;} 1376 /* 1377 Is it a good idea to apply the following check across all impls? 1378 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1379 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1380 */ 1381 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1382 if (dm->ops->createdomaindecomposition) { 1383 ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr); 1384 /* copy subdomain hooks and context over to the subdomain DMs */ 1385 if (dmlist) { 1386 for (i = 0; i < l; i++) { 1387 for (link=dm->subdomainhook; link; link=link->next) { 1388 if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);} 1389 } 1390 (*dmlist)[i]->ctx = dm->ctx; 1391 } 1392 } 1393 if (len) *len = l; 1394 } 1395 PetscFunctionReturn(0); 1396 } 1397 1398 1399 #undef __FUNCT__ 1400 #define __FUNCT__ "DMCreateDomainDecompositionScatters" 1401 /*@C 1402 DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector 1403 1404 Not collective 1405 1406 Input Parameters: 1407 + dm - the DM object 1408 . n - the number of subdomain scatters 1409 - subdms - the local subdomains 1410 1411 Output Parameters: 1412 + n - the number of scatters returned 1413 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain 1414 . oscat - scatter from global vector to overlapping global vector entries on subdomain 1415 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts) 1416 1417 Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution 1418 of general nonlinear problems with overlapping subdomain methods. While merely having index sets that enable subsets 1419 of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of 1420 solution and residual data. 1421 1422 Level: developer 1423 1424 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1425 @*/ 1426 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat) 1427 { 1428 PetscErrorCode ierr; 1429 1430 PetscFunctionBegin; 1431 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1432 PetscValidPointer(subdms,3); 1433 if (dm->ops->createddscatters) { 1434 ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr); 1435 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined"); 1436 PetscFunctionReturn(0); 1437 } 1438 1439 #undef __FUNCT__ 1440 #define __FUNCT__ "DMRefine" 1441 /*@ 1442 DMRefine - Refines a DM object 1443 1444 Collective on DM 1445 1446 Input Parameter: 1447 + dm - the DM object 1448 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1449 1450 Output Parameter: 1451 . dmf - the refined DM, or NULL 1452 1453 Note: If no refinement was done, the return value is NULL 1454 1455 Level: developer 1456 1457 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1458 @*/ 1459 PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf) 1460 { 1461 PetscErrorCode ierr; 1462 DMRefineHookLink link; 1463 1464 PetscFunctionBegin; 1465 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1466 ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr); 1467 if (*dmf) { 1468 (*dmf)->ops->creatematrix = dm->ops->creatematrix; 1469 1470 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr); 1471 1472 (*dmf)->ctx = dm->ctx; 1473 (*dmf)->leveldown = dm->leveldown; 1474 (*dmf)->levelup = dm->levelup + 1; 1475 1476 ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr); 1477 for (link=dm->refinehook; link; link=link->next) { 1478 if (link->refinehook) { 1479 ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr); 1480 } 1481 } 1482 } 1483 PetscFunctionReturn(0); 1484 } 1485 1486 #undef __FUNCT__ 1487 #define __FUNCT__ "DMRefineHookAdd" 1488 /*@C 1489 DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid 1490 1491 Logically Collective 1492 1493 Input Arguments: 1494 + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level 1495 . refinehook - function to run when setting up a coarser level 1496 . interphook - function to run to update data on finer levels (once per SNESSolve()) 1497 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1498 1499 Calling sequence of refinehook: 1500 $ refinehook(DM coarse,DM fine,void *ctx); 1501 1502 + coarse - coarse level DM 1503 . fine - fine level DM to interpolate problem to 1504 - ctx - optional user-defined function context 1505 1506 Calling sequence for interphook: 1507 $ interphook(DM coarse,Mat interp,DM fine,void *ctx) 1508 1509 + coarse - coarse level DM 1510 . interp - matrix interpolating a coarse-level solution to the finer grid 1511 . fine - fine level DM to update 1512 - ctx - optional user-defined function context 1513 1514 Level: advanced 1515 1516 Notes: 1517 This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing 1518 1519 If this function is called multiple times, the hooks will be run in the order they are added. 1520 1521 This function is currently not available from Fortran. 1522 1523 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1524 @*/ 1525 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx) 1526 { 1527 PetscErrorCode ierr; 1528 DMRefineHookLink link,*p; 1529 1530 PetscFunctionBegin; 1531 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1532 for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1533 ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr); 1534 link->refinehook = refinehook; 1535 link->interphook = interphook; 1536 link->ctx = ctx; 1537 link->next = NULL; 1538 *p = link; 1539 PetscFunctionReturn(0); 1540 } 1541 1542 #undef __FUNCT__ 1543 #define __FUNCT__ "DMInterpolate" 1544 /*@ 1545 DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd() 1546 1547 Collective if any hooks are 1548 1549 Input Arguments: 1550 + coarse - coarser DM to use as a base 1551 . restrct - interpolation matrix, apply using MatInterpolate() 1552 - fine - finer DM to update 1553 1554 Level: developer 1555 1556 .seealso: DMRefineHookAdd(), MatInterpolate() 1557 @*/ 1558 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine) 1559 { 1560 PetscErrorCode ierr; 1561 DMRefineHookLink link; 1562 1563 PetscFunctionBegin; 1564 for (link=fine->refinehook; link; link=link->next) { 1565 if (link->interphook) { 1566 ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr); 1567 } 1568 } 1569 PetscFunctionReturn(0); 1570 } 1571 1572 #undef __FUNCT__ 1573 #define __FUNCT__ "DMGetRefineLevel" 1574 /*@ 1575 DMGetRefineLevel - Get's the number of refinements that have generated this DM. 1576 1577 Not Collective 1578 1579 Input Parameter: 1580 . dm - the DM object 1581 1582 Output Parameter: 1583 . level - number of refinements 1584 1585 Level: developer 1586 1587 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1588 1589 @*/ 1590 PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level) 1591 { 1592 PetscFunctionBegin; 1593 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1594 *level = dm->levelup; 1595 PetscFunctionReturn(0); 1596 } 1597 1598 #undef __FUNCT__ 1599 #define __FUNCT__ "DMGlobalToLocalHookAdd" 1600 /*@C 1601 DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called 1602 1603 Logically Collective 1604 1605 Input Arguments: 1606 + dm - the DM 1607 . beginhook - function to run at the beginning of DMGlobalToLocalBegin() 1608 . endhook - function to run after DMGlobalToLocalEnd() has completed 1609 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1610 1611 Calling sequence for beginhook: 1612 $ beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1613 1614 + dm - global DM 1615 . g - global vector 1616 . mode - mode 1617 . l - local vector 1618 - ctx - optional user-defined function context 1619 1620 1621 Calling sequence for endhook: 1622 $ endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1623 1624 + global - global DM 1625 - ctx - optional user-defined function context 1626 1627 Level: advanced 1628 1629 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1630 @*/ 1631 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx) 1632 { 1633 PetscErrorCode ierr; 1634 DMGlobalToLocalHookLink link,*p; 1635 1636 PetscFunctionBegin; 1637 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1638 for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1639 ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr); 1640 link->beginhook = beginhook; 1641 link->endhook = endhook; 1642 link->ctx = ctx; 1643 link->next = NULL; 1644 *p = link; 1645 PetscFunctionReturn(0); 1646 } 1647 1648 #undef __FUNCT__ 1649 #define __FUNCT__ "DMGlobalToLocalBegin" 1650 /*@ 1651 DMGlobalToLocalBegin - Begins updating local vectors from global vector 1652 1653 Neighbor-wise Collective on DM 1654 1655 Input Parameters: 1656 + dm - the DM object 1657 . g - the global vector 1658 . mode - INSERT_VALUES or ADD_VALUES 1659 - l - the local vector 1660 1661 1662 Level: beginner 1663 1664 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1665 1666 @*/ 1667 PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1668 { 1669 PetscSF sf; 1670 PetscErrorCode ierr; 1671 DMGlobalToLocalHookLink link; 1672 1673 PetscFunctionBegin; 1674 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1675 for (link=dm->gtolhook; link; link=link->next) { 1676 if (link->beginhook) { 1677 ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr); 1678 } 1679 } 1680 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1681 if (sf) { 1682 PetscScalar *lArray, *gArray; 1683 1684 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1685 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1686 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1687 ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1688 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1689 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1690 } else { 1691 ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1692 } 1693 PetscFunctionReturn(0); 1694 } 1695 1696 #undef __FUNCT__ 1697 #define __FUNCT__ "DMGlobalToLocalEnd" 1698 /*@ 1699 DMGlobalToLocalEnd - Ends updating local vectors from global vector 1700 1701 Neighbor-wise Collective on DM 1702 1703 Input Parameters: 1704 + dm - the DM object 1705 . g - the global vector 1706 . mode - INSERT_VALUES or ADD_VALUES 1707 - l - the local vector 1708 1709 1710 Level: beginner 1711 1712 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1713 1714 @*/ 1715 PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1716 { 1717 PetscSF sf; 1718 PetscErrorCode ierr; 1719 PetscScalar *lArray, *gArray; 1720 DMGlobalToLocalHookLink link; 1721 1722 PetscFunctionBegin; 1723 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1724 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1725 if (sf) { 1726 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1727 1728 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1729 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1730 ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1731 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1732 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1733 } else { 1734 ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1735 } 1736 for (link=dm->gtolhook; link; link=link->next) { 1737 if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);} 1738 } 1739 PetscFunctionReturn(0); 1740 } 1741 1742 #undef __FUNCT__ 1743 #define __FUNCT__ "DMLocalToGlobalBegin" 1744 /*@ 1745 DMLocalToGlobalBegin - updates global vectors from local vectors 1746 1747 Neighbor-wise Collective on DM 1748 1749 Input Parameters: 1750 + dm - the DM object 1751 . l - the local vector 1752 . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that 1753 base point. 1754 - - the global vector 1755 1756 Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local 1757 array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work 1758 global array to the final global array with VecAXPY(). 1759 1760 Level: beginner 1761 1762 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin() 1763 1764 @*/ 1765 PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g) 1766 { 1767 PetscSF sf; 1768 PetscErrorCode ierr; 1769 1770 PetscFunctionBegin; 1771 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1772 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1773 if (sf) { 1774 MPI_Op op; 1775 PetscScalar *lArray, *gArray; 1776 1777 switch (mode) { 1778 case INSERT_VALUES: 1779 case INSERT_ALL_VALUES: 1780 op = MPIU_REPLACE; break; 1781 case ADD_VALUES: 1782 case ADD_ALL_VALUES: 1783 op = MPI_SUM; break; 1784 default: 1785 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1786 } 1787 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1788 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1789 ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1790 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1791 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1792 } else { 1793 ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1794 } 1795 PetscFunctionReturn(0); 1796 } 1797 1798 #undef __FUNCT__ 1799 #define __FUNCT__ "DMLocalToGlobalEnd" 1800 /*@ 1801 DMLocalToGlobalEnd - updates global vectors from local vectors 1802 1803 Neighbor-wise Collective on DM 1804 1805 Input Parameters: 1806 + dm - the DM object 1807 . l - the local vector 1808 . mode - INSERT_VALUES or ADD_VALUES 1809 - g - the global vector 1810 1811 1812 Level: beginner 1813 1814 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd() 1815 1816 @*/ 1817 PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g) 1818 { 1819 PetscSF sf; 1820 PetscErrorCode ierr; 1821 1822 PetscFunctionBegin; 1823 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1824 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1825 if (sf) { 1826 MPI_Op op; 1827 PetscScalar *lArray, *gArray; 1828 1829 switch (mode) { 1830 case INSERT_VALUES: 1831 case INSERT_ALL_VALUES: 1832 op = MPIU_REPLACE; break; 1833 case ADD_VALUES: 1834 case ADD_ALL_VALUES: 1835 op = MPI_SUM; break; 1836 default: 1837 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1838 } 1839 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1840 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1841 ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1842 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1843 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1844 } else { 1845 ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1846 } 1847 PetscFunctionReturn(0); 1848 } 1849 1850 #undef __FUNCT__ 1851 #define __FUNCT__ "DMLocalToLocalBegin" 1852 /*@ 1853 DMLocalToLocalBegin - Maps from a local vector (including ghost points 1854 that contain irrelevant values) to another local vector where the ghost 1855 points in the second are set correctly. Must be followed by DMLocalToLocalEnd(). 1856 1857 Neighbor-wise Collective on DM and Vec 1858 1859 Input Parameters: 1860 + dm - the DM object 1861 . g - the original local vector 1862 - mode - one of INSERT_VALUES or ADD_VALUES 1863 1864 Output Parameter: 1865 . l - the local vector with correct ghost values 1866 1867 Level: intermediate 1868 1869 Notes: 1870 The local vectors used here need not be the same as those 1871 obtained from DMCreateLocalVector(), BUT they 1872 must have the same parallel data layout; they could, for example, be 1873 obtained with VecDuplicate() from the DM originating vectors. 1874 1875 .keywords: DM, local-to-local, begin 1876 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalEnd(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1877 1878 @*/ 1879 PetscErrorCode DMLocalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1880 { 1881 PetscErrorCode ierr; 1882 1883 PetscFunctionBegin; 1884 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1885 ierr = (*dm->ops->localtolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1886 PetscFunctionReturn(0); 1887 } 1888 1889 #undef __FUNCT__ 1890 #define __FUNCT__ "DMLocalToLocalEnd" 1891 /*@ 1892 DMLocalToLocalEnd - Maps from a local vector (including ghost points 1893 that contain irrelevant values) to another local vector where the ghost 1894 points in the second are set correctly. Must be preceded by DMLocalToLocalBegin(). 1895 1896 Neighbor-wise Collective on DM and Vec 1897 1898 Input Parameters: 1899 + da - the DM object 1900 . g - the original local vector 1901 - mode - one of INSERT_VALUES or ADD_VALUES 1902 1903 Output Parameter: 1904 . l - the local vector with correct ghost values 1905 1906 Level: intermediate 1907 1908 Notes: 1909 The local vectors used here need not be the same as those 1910 obtained from DMCreateLocalVector(), BUT they 1911 must have the same parallel data layout; they could, for example, be 1912 obtained with VecDuplicate() from the DM originating vectors. 1913 1914 .keywords: DM, local-to-local, end 1915 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1916 1917 @*/ 1918 PetscErrorCode DMLocalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1919 { 1920 PetscErrorCode ierr; 1921 1922 PetscFunctionBegin; 1923 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1924 ierr = (*dm->ops->localtolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1925 PetscFunctionReturn(0); 1926 } 1927 1928 1929 #undef __FUNCT__ 1930 #define __FUNCT__ "DMCoarsen" 1931 /*@ 1932 DMCoarsen - Coarsens a DM object 1933 1934 Collective on DM 1935 1936 Input Parameter: 1937 + dm - the DM object 1938 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1939 1940 Output Parameter: 1941 . dmc - the coarsened DM 1942 1943 Level: developer 1944 1945 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1946 1947 @*/ 1948 PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) 1949 { 1950 PetscErrorCode ierr; 1951 DMCoarsenHookLink link; 1952 1953 PetscFunctionBegin; 1954 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1955 ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr); 1956 (*dmc)->ops->creatematrix = dm->ops->creatematrix; 1957 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr); 1958 (*dmc)->ctx = dm->ctx; 1959 (*dmc)->levelup = dm->levelup; 1960 (*dmc)->leveldown = dm->leveldown + 1; 1961 ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr); 1962 for (link=dm->coarsenhook; link; link=link->next) { 1963 if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);} 1964 } 1965 PetscFunctionReturn(0); 1966 } 1967 1968 #undef __FUNCT__ 1969 #define __FUNCT__ "DMCoarsenHookAdd" 1970 /*@C 1971 DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1972 1973 Logically Collective 1974 1975 Input Arguments: 1976 + fine - nonlinear solver context on which to run a hook when restricting to a coarser level 1977 . coarsenhook - function to run when setting up a coarser level 1978 . restricthook - function to run to update data on coarser levels (once per SNESSolve()) 1979 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1980 1981 Calling sequence of coarsenhook: 1982 $ coarsenhook(DM fine,DM coarse,void *ctx); 1983 1984 + fine - fine level DM 1985 . coarse - coarse level DM to restrict problem to 1986 - ctx - optional user-defined function context 1987 1988 Calling sequence for restricthook: 1989 $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx) 1990 1991 + fine - fine level DM 1992 . mrestrict - matrix restricting a fine-level solution to the coarse grid 1993 . rscale - scaling vector for restriction 1994 . inject - matrix restricting by injection 1995 . coarse - coarse level DM to update 1996 - ctx - optional user-defined function context 1997 1998 Level: advanced 1999 2000 Notes: 2001 This function is only needed if auxiliary data needs to be set up on coarse grids. 2002 2003 If this function is called multiple times, the hooks will be run in the order they are added. 2004 2005 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 2006 extract the finest level information from its context (instead of from the SNES). 2007 2008 This function is currently not available from Fortran. 2009 2010 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 2011 @*/ 2012 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx) 2013 { 2014 PetscErrorCode ierr; 2015 DMCoarsenHookLink link,*p; 2016 2017 PetscFunctionBegin; 2018 PetscValidHeaderSpecific(fine,DM_CLASSID,1); 2019 for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 2020 ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr); 2021 link->coarsenhook = coarsenhook; 2022 link->restricthook = restricthook; 2023 link->ctx = ctx; 2024 link->next = NULL; 2025 *p = link; 2026 PetscFunctionReturn(0); 2027 } 2028 2029 #undef __FUNCT__ 2030 #define __FUNCT__ "DMRestrict" 2031 /*@ 2032 DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd() 2033 2034 Collective if any hooks are 2035 2036 Input Arguments: 2037 + fine - finer DM to use as a base 2038 . restrct - restriction matrix, apply using MatRestrict() 2039 . inject - injection matrix, also use MatRestrict() 2040 - coarse - coarer DM to update 2041 2042 Level: developer 2043 2044 .seealso: DMCoarsenHookAdd(), MatRestrict() 2045 @*/ 2046 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse) 2047 { 2048 PetscErrorCode ierr; 2049 DMCoarsenHookLink link; 2050 2051 PetscFunctionBegin; 2052 for (link=fine->coarsenhook; link; link=link->next) { 2053 if (link->restricthook) { 2054 ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr); 2055 } 2056 } 2057 PetscFunctionReturn(0); 2058 } 2059 2060 #undef __FUNCT__ 2061 #define __FUNCT__ "DMSubDomainHookAdd" 2062 /*@C 2063 DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid 2064 2065 Logically Collective 2066 2067 Input Arguments: 2068 + global - global DM 2069 . ddhook - function to run to pass data to the decomposition DM upon its creation 2070 . restricthook - function to run to update data on block solve (at the beginning of the block solve) 2071 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 2072 2073 2074 Calling sequence for ddhook: 2075 $ ddhook(DM global,DM block,void *ctx) 2076 2077 + global - global DM 2078 . block - block DM 2079 - ctx - optional user-defined function context 2080 2081 Calling sequence for restricthook: 2082 $ restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx) 2083 2084 + global - global DM 2085 . out - scatter to the outer (with ghost and overlap points) block vector 2086 . in - scatter to block vector values only owned locally 2087 . block - block DM 2088 - ctx - optional user-defined function context 2089 2090 Level: advanced 2091 2092 Notes: 2093 This function is only needed if auxiliary data needs to be set up on subdomain DMs. 2094 2095 If this function is called multiple times, the hooks will be run in the order they are added. 2096 2097 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 2098 extract the global information from its context (instead of from the SNES). 2099 2100 This function is currently not available from Fortran. 2101 2102 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 2103 @*/ 2104 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx) 2105 { 2106 PetscErrorCode ierr; 2107 DMSubDomainHookLink link,*p; 2108 2109 PetscFunctionBegin; 2110 PetscValidHeaderSpecific(global,DM_CLASSID,1); 2111 for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 2112 ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr); 2113 link->restricthook = restricthook; 2114 link->ddhook = ddhook; 2115 link->ctx = ctx; 2116 link->next = NULL; 2117 *p = link; 2118 PetscFunctionReturn(0); 2119 } 2120 2121 #undef __FUNCT__ 2122 #define __FUNCT__ "DMSubDomainRestrict" 2123 /*@ 2124 DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd() 2125 2126 Collective if any hooks are 2127 2128 Input Arguments: 2129 + fine - finer DM to use as a base 2130 . oscatter - scatter from domain global vector filling subdomain global vector with overlap 2131 . gscatter - scatter from domain global vector filling subdomain local vector with ghosts 2132 - coarse - coarer DM to update 2133 2134 Level: developer 2135 2136 .seealso: DMCoarsenHookAdd(), MatRestrict() 2137 @*/ 2138 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm) 2139 { 2140 PetscErrorCode ierr; 2141 DMSubDomainHookLink link; 2142 2143 PetscFunctionBegin; 2144 for (link=global->subdomainhook; link; link=link->next) { 2145 if (link->restricthook) { 2146 ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr); 2147 } 2148 } 2149 PetscFunctionReturn(0); 2150 } 2151 2152 #undef __FUNCT__ 2153 #define __FUNCT__ "DMGetCoarsenLevel" 2154 /*@ 2155 DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM. 2156 2157 Not Collective 2158 2159 Input Parameter: 2160 . dm - the DM object 2161 2162 Output Parameter: 2163 . level - number of coarsenings 2164 2165 Level: developer 2166 2167 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2168 2169 @*/ 2170 PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level) 2171 { 2172 PetscFunctionBegin; 2173 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2174 *level = dm->leveldown; 2175 PetscFunctionReturn(0); 2176 } 2177 2178 2179 2180 #undef __FUNCT__ 2181 #define __FUNCT__ "DMRefineHierarchy" 2182 /*@C 2183 DMRefineHierarchy - Refines a DM object, all levels at once 2184 2185 Collective on DM 2186 2187 Input Parameter: 2188 + dm - the DM object 2189 - nlevels - the number of levels of refinement 2190 2191 Output Parameter: 2192 . dmf - the refined DM hierarchy 2193 2194 Level: developer 2195 2196 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2197 2198 @*/ 2199 PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[]) 2200 { 2201 PetscErrorCode ierr; 2202 2203 PetscFunctionBegin; 2204 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2205 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2206 if (nlevels == 0) PetscFunctionReturn(0); 2207 if (dm->ops->refinehierarchy) { 2208 ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr); 2209 } else if (dm->ops->refine) { 2210 PetscInt i; 2211 2212 ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr); 2213 for (i=1; i<nlevels; i++) { 2214 ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr); 2215 } 2216 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet"); 2217 PetscFunctionReturn(0); 2218 } 2219 2220 #undef __FUNCT__ 2221 #define __FUNCT__ "DMCoarsenHierarchy" 2222 /*@C 2223 DMCoarsenHierarchy - Coarsens a DM object, all levels at once 2224 2225 Collective on DM 2226 2227 Input Parameter: 2228 + dm - the DM object 2229 - nlevels - the number of levels of coarsening 2230 2231 Output Parameter: 2232 . dmc - the coarsened DM hierarchy 2233 2234 Level: developer 2235 2236 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2237 2238 @*/ 2239 PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[]) 2240 { 2241 PetscErrorCode ierr; 2242 2243 PetscFunctionBegin; 2244 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2245 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2246 if (nlevels == 0) PetscFunctionReturn(0); 2247 PetscValidPointer(dmc,3); 2248 if (dm->ops->coarsenhierarchy) { 2249 ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr); 2250 } else if (dm->ops->coarsen) { 2251 PetscInt i; 2252 2253 ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr); 2254 for (i=1; i<nlevels; i++) { 2255 ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr); 2256 } 2257 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet"); 2258 PetscFunctionReturn(0); 2259 } 2260 2261 #undef __FUNCT__ 2262 #define __FUNCT__ "DMCreateAggregates" 2263 /*@ 2264 DMCreateAggregates - Gets the aggregates that map between 2265 grids associated with two DMs. 2266 2267 Collective on DM 2268 2269 Input Parameters: 2270 + dmc - the coarse grid DM 2271 - dmf - the fine grid DM 2272 2273 Output Parameters: 2274 . rest - the restriction matrix (transpose of the projection matrix) 2275 2276 Level: intermediate 2277 2278 .keywords: interpolation, restriction, multigrid 2279 2280 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation() 2281 @*/ 2282 PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest) 2283 { 2284 PetscErrorCode ierr; 2285 2286 PetscFunctionBegin; 2287 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 2288 PetscValidHeaderSpecific(dmf,DM_CLASSID,2); 2289 ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr); 2290 PetscFunctionReturn(0); 2291 } 2292 2293 #undef __FUNCT__ 2294 #define __FUNCT__ "DMSetApplicationContextDestroy" 2295 /*@C 2296 DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed 2297 2298 Not Collective 2299 2300 Input Parameters: 2301 + dm - the DM object 2302 - destroy - the destroy function 2303 2304 Level: intermediate 2305 2306 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2307 2308 @*/ 2309 PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**)) 2310 { 2311 PetscFunctionBegin; 2312 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2313 dm->ctxdestroy = destroy; 2314 PetscFunctionReturn(0); 2315 } 2316 2317 #undef __FUNCT__ 2318 #define __FUNCT__ "DMSetApplicationContext" 2319 /*@ 2320 DMSetApplicationContext - Set a user context into a DM object 2321 2322 Not Collective 2323 2324 Input Parameters: 2325 + dm - the DM object 2326 - ctx - the user context 2327 2328 Level: intermediate 2329 2330 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2331 2332 @*/ 2333 PetscErrorCode DMSetApplicationContext(DM dm,void *ctx) 2334 { 2335 PetscFunctionBegin; 2336 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2337 dm->ctx = ctx; 2338 PetscFunctionReturn(0); 2339 } 2340 2341 #undef __FUNCT__ 2342 #define __FUNCT__ "DMGetApplicationContext" 2343 /*@ 2344 DMGetApplicationContext - Gets a user context from a DM object 2345 2346 Not Collective 2347 2348 Input Parameter: 2349 . dm - the DM object 2350 2351 Output Parameter: 2352 . ctx - the user context 2353 2354 Level: intermediate 2355 2356 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2357 2358 @*/ 2359 PetscErrorCode DMGetApplicationContext(DM dm,void *ctx) 2360 { 2361 PetscFunctionBegin; 2362 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2363 *(void**)ctx = dm->ctx; 2364 PetscFunctionReturn(0); 2365 } 2366 2367 #undef __FUNCT__ 2368 #define __FUNCT__ "DMSetVariableBounds" 2369 /*@C 2370 DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI. 2371 2372 Logically Collective on DM 2373 2374 Input Parameter: 2375 + dm - the DM object 2376 - f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set) 2377 2378 Level: intermediate 2379 2380 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), 2381 DMSetJacobian() 2382 2383 @*/ 2384 PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2385 { 2386 PetscFunctionBegin; 2387 dm->ops->computevariablebounds = f; 2388 PetscFunctionReturn(0); 2389 } 2390 2391 #undef __FUNCT__ 2392 #define __FUNCT__ "DMHasVariableBounds" 2393 /*@ 2394 DMHasVariableBounds - does the DM object have a variable bounds function? 2395 2396 Not Collective 2397 2398 Input Parameter: 2399 . dm - the DM object to destroy 2400 2401 Output Parameter: 2402 . flg - PETSC_TRUE if the variable bounds function exists 2403 2404 Level: developer 2405 2406 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2407 2408 @*/ 2409 PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg) 2410 { 2411 PetscFunctionBegin; 2412 *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE; 2413 PetscFunctionReturn(0); 2414 } 2415 2416 #undef __FUNCT__ 2417 #define __FUNCT__ "DMComputeVariableBounds" 2418 /*@C 2419 DMComputeVariableBounds - compute variable bounds used by SNESVI. 2420 2421 Logically Collective on DM 2422 2423 Input Parameters: 2424 + dm - the DM object to destroy 2425 - x - current solution at which the bounds are computed 2426 2427 Output parameters: 2428 + xl - lower bound 2429 - xu - upper bound 2430 2431 Level: intermediate 2432 2433 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2434 2435 @*/ 2436 PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu) 2437 { 2438 PetscErrorCode ierr; 2439 2440 PetscFunctionBegin; 2441 PetscValidHeaderSpecific(xl,VEC_CLASSID,2); 2442 PetscValidHeaderSpecific(xu,VEC_CLASSID,2); 2443 if (dm->ops->computevariablebounds) { 2444 ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr); 2445 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds."); 2446 PetscFunctionReturn(0); 2447 } 2448 2449 #undef __FUNCT__ 2450 #define __FUNCT__ "DMHasColoring" 2451 /*@ 2452 DMHasColoring - does the DM object have a method of providing a coloring? 2453 2454 Not Collective 2455 2456 Input Parameter: 2457 . dm - the DM object 2458 2459 Output Parameter: 2460 . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring(). 2461 2462 Level: developer 2463 2464 .seealso DMHasFunction(), DMCreateColoring() 2465 2466 @*/ 2467 PetscErrorCode DMHasColoring(DM dm,PetscBool *flg) 2468 { 2469 PetscFunctionBegin; 2470 *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE; 2471 PetscFunctionReturn(0); 2472 } 2473 2474 #undef __FUNCT__ 2475 #define __FUNCT__ "DMSetVec" 2476 /*@C 2477 DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear. 2478 2479 Collective on DM 2480 2481 Input Parameter: 2482 + dm - the DM object 2483 - x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems. 2484 2485 Level: developer 2486 2487 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2488 2489 @*/ 2490 PetscErrorCode DMSetVec(DM dm,Vec x) 2491 { 2492 PetscErrorCode ierr; 2493 2494 PetscFunctionBegin; 2495 if (x) { 2496 if (!dm->x) { 2497 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2498 } 2499 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2500 } else if (dm->x) { 2501 ierr = VecDestroy(&dm->x);CHKERRQ(ierr); 2502 } 2503 PetscFunctionReturn(0); 2504 } 2505 2506 PetscFunctionList DMList = NULL; 2507 PetscBool DMRegisterAllCalled = PETSC_FALSE; 2508 2509 #undef __FUNCT__ 2510 #define __FUNCT__ "DMSetType" 2511 /*@C 2512 DMSetType - Builds a DM, for a particular DM implementation. 2513 2514 Collective on DM 2515 2516 Input Parameters: 2517 + dm - The DM object 2518 - method - The name of the DM type 2519 2520 Options Database Key: 2521 . -dm_type <type> - Sets the DM type; use -help for a list of available types 2522 2523 Notes: 2524 See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D). 2525 2526 Level: intermediate 2527 2528 .keywords: DM, set, type 2529 .seealso: DMGetType(), DMCreate() 2530 @*/ 2531 PetscErrorCode DMSetType(DM dm, DMType method) 2532 { 2533 PetscErrorCode (*r)(DM); 2534 PetscBool match; 2535 PetscErrorCode ierr; 2536 2537 PetscFunctionBegin; 2538 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2539 ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr); 2540 if (match) PetscFunctionReturn(0); 2541 2542 if (!DMRegisterAllCalled) {ierr = DMRegisterAll();CHKERRQ(ierr);} 2543 ierr = PetscFunctionListFind(DMList,method,&r);CHKERRQ(ierr); 2544 if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method); 2545 2546 if (dm->ops->destroy) { 2547 ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr); 2548 dm->ops->destroy = NULL; 2549 } 2550 ierr = (*r)(dm);CHKERRQ(ierr); 2551 ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr); 2552 PetscFunctionReturn(0); 2553 } 2554 2555 #undef __FUNCT__ 2556 #define __FUNCT__ "DMGetType" 2557 /*@C 2558 DMGetType - Gets the DM type name (as a string) from the DM. 2559 2560 Not Collective 2561 2562 Input Parameter: 2563 . dm - The DM 2564 2565 Output Parameter: 2566 . type - The DM type name 2567 2568 Level: intermediate 2569 2570 .keywords: DM, get, type, name 2571 .seealso: DMSetType(), DMCreate() 2572 @*/ 2573 PetscErrorCode DMGetType(DM dm, DMType *type) 2574 { 2575 PetscErrorCode ierr; 2576 2577 PetscFunctionBegin; 2578 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2579 PetscValidCharPointer(type,2); 2580 if (!DMRegisterAllCalled) { 2581 ierr = DMRegisterAll();CHKERRQ(ierr); 2582 } 2583 *type = ((PetscObject)dm)->type_name; 2584 PetscFunctionReturn(0); 2585 } 2586 2587 #undef __FUNCT__ 2588 #define __FUNCT__ "DMConvert" 2589 /*@C 2590 DMConvert - Converts a DM to another DM, either of the same or different type. 2591 2592 Collective on DM 2593 2594 Input Parameters: 2595 + dm - the DM 2596 - newtype - new DM type (use "same" for the same type) 2597 2598 Output Parameter: 2599 . M - pointer to new DM 2600 2601 Notes: 2602 Cannot be used to convert a sequential DM to parallel or parallel to sequential, 2603 the MPI communicator of the generated DM is always the same as the communicator 2604 of the input DM. 2605 2606 Level: intermediate 2607 2608 .seealso: DMCreate() 2609 @*/ 2610 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M) 2611 { 2612 DM B; 2613 char convname[256]; 2614 PetscBool sametype, issame; 2615 PetscErrorCode ierr; 2616 2617 PetscFunctionBegin; 2618 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2619 PetscValidType(dm,1); 2620 PetscValidPointer(M,3); 2621 ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr); 2622 ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr); 2623 { 2624 PetscErrorCode (*conv)(DM, DMType, DM*) = NULL; 2625 2626 /* 2627 Order of precedence: 2628 1) See if a specialized converter is known to the current DM. 2629 2) See if a specialized converter is known to the desired DM class. 2630 3) See if a good general converter is registered for the desired class 2631 4) See if a good general converter is known for the current matrix. 2632 5) Use a really basic converter. 2633 */ 2634 2635 /* 1) See if a specialized converter is known to the current DM and the desired class */ 2636 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2637 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2638 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2639 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2640 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2641 ierr = PetscObjectQueryFunction((PetscObject)dm,convname,&conv);CHKERRQ(ierr); 2642 if (conv) goto foundconv; 2643 2644 /* 2) See if a specialized converter is known to the desired DM class. */ 2645 ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr); 2646 ierr = DMSetType(B, newtype);CHKERRQ(ierr); 2647 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2648 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2649 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2650 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2651 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2652 ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv);CHKERRQ(ierr); 2653 if (conv) { 2654 ierr = DMDestroy(&B);CHKERRQ(ierr); 2655 goto foundconv; 2656 } 2657 2658 #if 0 2659 /* 3) See if a good general converter is registered for the desired class */ 2660 conv = B->ops->convertfrom; 2661 ierr = DMDestroy(&B);CHKERRQ(ierr); 2662 if (conv) goto foundconv; 2663 2664 /* 4) See if a good general converter is known for the current matrix */ 2665 if (dm->ops->convert) { 2666 conv = dm->ops->convert; 2667 } 2668 if (conv) goto foundconv; 2669 #endif 2670 2671 /* 5) Use a really basic converter. */ 2672 SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype); 2673 2674 foundconv: 2675 ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2676 ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr); 2677 ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2678 } 2679 ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr); 2680 PetscFunctionReturn(0); 2681 } 2682 2683 /*--------------------------------------------------------------------------------------------------------------------*/ 2684 2685 #undef __FUNCT__ 2686 #define __FUNCT__ "DMRegister" 2687 /*@C 2688 DMRegister - Adds a new DM component implementation 2689 2690 Not Collective 2691 2692 Input Parameters: 2693 + name - The name of a new user-defined creation routine 2694 - create_func - The creation routine itself 2695 2696 Notes: 2697 DMRegister() may be called multiple times to add several user-defined DMs 2698 2699 2700 Sample usage: 2701 .vb 2702 DMRegister("my_da", MyDMCreate); 2703 .ve 2704 2705 Then, your DM type can be chosen with the procedural interface via 2706 .vb 2707 DMCreate(MPI_Comm, DM *); 2708 DMSetType(DM,"my_da"); 2709 .ve 2710 or at runtime via the option 2711 .vb 2712 -da_type my_da 2713 .ve 2714 2715 Level: advanced 2716 2717 .keywords: DM, register 2718 .seealso: DMRegisterAll(), DMRegisterDestroy() 2719 2720 @*/ 2721 PetscErrorCode DMRegister(const char sname[],PetscErrorCode (*function)(DM)) 2722 { 2723 PetscErrorCode ierr; 2724 2725 PetscFunctionBegin; 2726 ierr = PetscFunctionListAdd(&DMList,sname,function);CHKERRQ(ierr); 2727 PetscFunctionReturn(0); 2728 } 2729 2730 #undef __FUNCT__ 2731 #define __FUNCT__ "DMLoad" 2732 /*@C 2733 DMLoad - Loads a DM that has been stored in binary with DMView(). 2734 2735 Collective on PetscViewer 2736 2737 Input Parameters: 2738 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or 2739 some related function before a call to DMLoad(). 2740 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or 2741 HDF5 file viewer, obtained from PetscViewerHDF5Open() 2742 2743 Level: intermediate 2744 2745 Notes: 2746 The type is determined by the data in the file, any type set into the DM before this call is ignored. 2747 2748 Notes for advanced users: 2749 Most users should not need to know the details of the binary storage 2750 format, since DMLoad() and DMView() completely hide these details. 2751 But for anyone who's interested, the standard binary matrix storage 2752 format is 2753 .vb 2754 has not yet been determined 2755 .ve 2756 2757 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad() 2758 @*/ 2759 PetscErrorCode DMLoad(DM newdm, PetscViewer viewer) 2760 { 2761 PetscBool isbinary, ishdf5; 2762 PetscErrorCode ierr; 2763 2764 PetscFunctionBegin; 2765 PetscValidHeaderSpecific(newdm,DM_CLASSID,1); 2766 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 2767 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 2768 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);CHKERRQ(ierr); 2769 if (isbinary) { 2770 PetscInt classid; 2771 char type[256]; 2772 2773 ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr); 2774 if (classid != DM_FILE_CLASSID) SETERRQ1(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file, classid found %d",(int)classid); 2775 ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr); 2776 ierr = DMSetType(newdm, type);CHKERRQ(ierr); 2777 if (newdm->ops->load) {ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);} 2778 } else if (ishdf5) { 2779 if (newdm->ops->load) {ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);} 2780 } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen() or PetscViewerHDF5Open()"); 2781 PetscFunctionReturn(0); 2782 } 2783 2784 /******************************** FEM Support **********************************/ 2785 2786 #undef __FUNCT__ 2787 #define __FUNCT__ "DMPrintCellVector" 2788 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) 2789 { 2790 PetscInt f; 2791 PetscErrorCode ierr; 2792 2793 PetscFunctionBegin; 2794 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2795 for (f = 0; f < len; ++f) { 2796 ierr = PetscPrintf(PETSC_COMM_SELF, " | %g |\n", (double)PetscRealPart(x[f]));CHKERRQ(ierr); 2797 } 2798 PetscFunctionReturn(0); 2799 } 2800 2801 #undef __FUNCT__ 2802 #define __FUNCT__ "DMPrintCellMatrix" 2803 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) 2804 { 2805 PetscInt f, g; 2806 PetscErrorCode ierr; 2807 2808 PetscFunctionBegin; 2809 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2810 for (f = 0; f < rows; ++f) { 2811 ierr = PetscPrintf(PETSC_COMM_SELF, " |");CHKERRQ(ierr); 2812 for (g = 0; g < cols; ++g) { 2813 ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5g", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr); 2814 } 2815 ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr); 2816 } 2817 PetscFunctionReturn(0); 2818 } 2819 2820 #undef __FUNCT__ 2821 #define __FUNCT__ "DMPrintLocalVec" 2822 PetscErrorCode DMPrintLocalVec(DM dm, const char name[], PetscReal tol, Vec X) 2823 { 2824 PetscMPIInt rank, numProcs; 2825 PetscInt p; 2826 PetscErrorCode ierr; 2827 2828 PetscFunctionBegin; 2829 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr); 2830 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr); 2831 ierr = PetscPrintf(PetscObjectComm((PetscObject) dm), "%s:\n", name);CHKERRQ(ierr); 2832 for (p = 0; p < numProcs; ++p) { 2833 if (p == rank) { 2834 Vec x; 2835 2836 ierr = VecDuplicate(X, &x);CHKERRQ(ierr); 2837 ierr = VecCopy(X, x);CHKERRQ(ierr); 2838 ierr = VecChop(x, tol);CHKERRQ(ierr); 2839 ierr = VecView(x, PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr); 2840 ierr = VecDestroy(&x);CHKERRQ(ierr); 2841 ierr = PetscViewerFlush(PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr); 2842 } 2843 ierr = PetscBarrier((PetscObject) dm);CHKERRQ(ierr); 2844 } 2845 PetscFunctionReturn(0); 2846 } 2847 2848 #undef __FUNCT__ 2849 #define __FUNCT__ "DMGetDefaultSection" 2850 /*@ 2851 DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM. 2852 2853 Input Parameter: 2854 . dm - The DM 2855 2856 Output Parameter: 2857 . section - The PetscSection 2858 2859 Level: intermediate 2860 2861 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2862 2863 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2864 @*/ 2865 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) 2866 { 2867 PetscErrorCode ierr; 2868 2869 PetscFunctionBegin; 2870 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2871 PetscValidPointer(section, 2); 2872 if (!dm->defaultSection && dm->ops->createdefaultsection) {ierr = (*dm->ops->createdefaultsection)(dm);CHKERRQ(ierr);} 2873 *section = dm->defaultSection; 2874 PetscFunctionReturn(0); 2875 } 2876 2877 #undef __FUNCT__ 2878 #define __FUNCT__ "DMSetDefaultSection" 2879 /*@ 2880 DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM. 2881 2882 Input Parameters: 2883 + dm - The DM 2884 - section - The PetscSection 2885 2886 Level: intermediate 2887 2888 Note: Any existing Section will be destroyed 2889 2890 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2891 @*/ 2892 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) 2893 { 2894 PetscInt numFields; 2895 PetscInt f; 2896 PetscErrorCode ierr; 2897 2898 PetscFunctionBegin; 2899 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2900 PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2901 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2902 ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr); 2903 dm->defaultSection = section; 2904 ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr); 2905 if (numFields) { 2906 ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr); 2907 for (f = 0; f < numFields; ++f) { 2908 const char *name; 2909 2910 ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr); 2911 ierr = PetscObjectSetName((PetscObject) dm->fields[f], name);CHKERRQ(ierr); 2912 } 2913 } 2914 /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */ 2915 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2916 PetscFunctionReturn(0); 2917 } 2918 2919 #undef __FUNCT__ 2920 #define __FUNCT__ "DMGetDefaultGlobalSection" 2921 /*@ 2922 DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM. 2923 2924 Collective on DM 2925 2926 Input Parameter: 2927 . dm - The DM 2928 2929 Output Parameter: 2930 . section - The PetscSection 2931 2932 Level: intermediate 2933 2934 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2935 2936 .seealso: DMSetDefaultSection(), DMGetDefaultSection() 2937 @*/ 2938 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) 2939 { 2940 PetscErrorCode ierr; 2941 2942 PetscFunctionBegin; 2943 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2944 PetscValidPointer(section, 2); 2945 if (!dm->defaultGlobalSection) { 2946 PetscSection s; 2947 2948 ierr = DMGetDefaultSection(dm, &s);CHKERRQ(ierr); 2949 if (!s) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection in order to create a global PetscSection"); 2950 if (!dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSF in order to create a global PetscSection"); 2951 ierr = PetscSectionCreateGlobalSection(s, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr); 2952 ierr = PetscLayoutDestroy(&dm->map);CHKERRQ(ierr); 2953 ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm), dm->defaultGlobalSection, &dm->map);CHKERRQ(ierr); 2954 } 2955 *section = dm->defaultGlobalSection; 2956 PetscFunctionReturn(0); 2957 } 2958 2959 #undef __FUNCT__ 2960 #define __FUNCT__ "DMSetDefaultGlobalSection" 2961 /*@ 2962 DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM. 2963 2964 Input Parameters: 2965 + dm - The DM 2966 - section - The PetscSection, or NULL 2967 2968 Level: intermediate 2969 2970 Note: Any existing Section will be destroyed 2971 2972 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection() 2973 @*/ 2974 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) 2975 { 2976 PetscErrorCode ierr; 2977 2978 PetscFunctionBegin; 2979 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2980 if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2981 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2982 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2983 dm->defaultGlobalSection = section; 2984 PetscFunctionReturn(0); 2985 } 2986 2987 #undef __FUNCT__ 2988 #define __FUNCT__ "DMGetDefaultSF" 2989 /*@ 2990 DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set, 2991 it is created from the default PetscSection layouts in the DM. 2992 2993 Input Parameter: 2994 . dm - The DM 2995 2996 Output Parameter: 2997 . sf - The PetscSF 2998 2999 Level: intermediate 3000 3001 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3002 3003 .seealso: DMSetDefaultSF(), DMCreateDefaultSF() 3004 @*/ 3005 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) 3006 { 3007 PetscInt nroots; 3008 PetscErrorCode ierr; 3009 3010 PetscFunctionBegin; 3011 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3012 PetscValidPointer(sf, 2); 3013 ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr); 3014 if (nroots < 0) { 3015 PetscSection section, gSection; 3016 3017 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 3018 if (section) { 3019 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 3020 ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr); 3021 } else { 3022 *sf = NULL; 3023 PetscFunctionReturn(0); 3024 } 3025 } 3026 *sf = dm->defaultSF; 3027 PetscFunctionReturn(0); 3028 } 3029 3030 #undef __FUNCT__ 3031 #define __FUNCT__ "DMSetDefaultSF" 3032 /*@ 3033 DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM 3034 3035 Input Parameters: 3036 + dm - The DM 3037 - sf - The PetscSF 3038 3039 Level: intermediate 3040 3041 Note: Any previous SF is destroyed 3042 3043 .seealso: DMGetDefaultSF(), DMCreateDefaultSF() 3044 @*/ 3045 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) 3046 { 3047 PetscErrorCode ierr; 3048 3049 PetscFunctionBegin; 3050 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3051 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2); 3052 ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr); 3053 dm->defaultSF = sf; 3054 PetscFunctionReturn(0); 3055 } 3056 3057 #undef __FUNCT__ 3058 #define __FUNCT__ "DMCreateDefaultSF" 3059 /*@C 3060 DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections 3061 describing the data layout. 3062 3063 Input Parameters: 3064 + dm - The DM 3065 . localSection - PetscSection describing the local data layout 3066 - globalSection - PetscSection describing the global data layout 3067 3068 Level: intermediate 3069 3070 .seealso: DMGetDefaultSF(), DMSetDefaultSF() 3071 @*/ 3072 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection) 3073 { 3074 MPI_Comm comm; 3075 PetscLayout layout; 3076 const PetscInt *ranges; 3077 PetscInt *local; 3078 PetscSFNode *remote; 3079 PetscInt pStart, pEnd, p, nroots, nleaves = 0, l; 3080 PetscMPIInt size, rank; 3081 PetscErrorCode ierr; 3082 3083 PetscFunctionBegin; 3084 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 3085 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3086 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 3087 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 3088 ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr); 3089 ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr); 3090 ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr); 3091 ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr); 3092 ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr); 3093 ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr); 3094 ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr); 3095 for (p = pStart; p < pEnd; ++p) { 3096 PetscInt gdof, gcdof; 3097 3098 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3099 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3100 nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3101 } 3102 ierr = PetscMalloc1(nleaves, &local);CHKERRQ(ierr); 3103 ierr = PetscMalloc1(nleaves, &remote);CHKERRQ(ierr); 3104 for (p = pStart, l = 0; p < pEnd; ++p) { 3105 const PetscInt *cind; 3106 PetscInt dof, cdof, off, gdof, gcdof, goff, gsize, d, c; 3107 3108 ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr); 3109 ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr); 3110 ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr); 3111 ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr); 3112 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3113 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3114 ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr); 3115 if (!gdof) continue; /* Censored point */ 3116 gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3117 if (gsize != dof-cdof) { 3118 if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof); 3119 cdof = 0; /* Ignore constraints */ 3120 } 3121 for (d = 0, c = 0; d < dof; ++d) { 3122 if ((c < cdof) && (cind[c] == d)) {++c; continue;} 3123 local[l+d-c] = off+d; 3124 } 3125 if (gdof < 0) { 3126 for (d = 0; d < gsize; ++d, ++l) { 3127 PetscInt offset = -(goff+1) + d, r; 3128 3129 ierr = PetscFindInt(offset,size+1,ranges,&r);CHKERRQ(ierr); 3130 if (r < 0) r = -(r+2); 3131 if ((r < 0) || (r >= size)) SETERRQ4(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Point %d mapped to invalid process %d (%d, %d)", p, r, gdof, goff); 3132 remote[l].rank = r; 3133 remote[l].index = offset - ranges[r]; 3134 } 3135 } else { 3136 for (d = 0; d < gsize; ++d, ++l) { 3137 remote[l].rank = rank; 3138 remote[l].index = goff+d - ranges[rank]; 3139 } 3140 } 3141 } 3142 if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves); 3143 ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr); 3144 ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 3145 PetscFunctionReturn(0); 3146 } 3147 3148 #undef __FUNCT__ 3149 #define __FUNCT__ "DMGetPointSF" 3150 /*@ 3151 DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM. 3152 3153 Input Parameter: 3154 . dm - The DM 3155 3156 Output Parameter: 3157 . sf - The PetscSF 3158 3159 Level: intermediate 3160 3161 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3162 3163 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3164 @*/ 3165 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) 3166 { 3167 PetscFunctionBegin; 3168 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3169 PetscValidPointer(sf, 2); 3170 *sf = dm->sf; 3171 PetscFunctionReturn(0); 3172 } 3173 3174 #undef __FUNCT__ 3175 #define __FUNCT__ "DMSetPointSF" 3176 /*@ 3177 DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM. 3178 3179 Input Parameters: 3180 + dm - The DM 3181 - sf - The PetscSF 3182 3183 Level: intermediate 3184 3185 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3186 @*/ 3187 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) 3188 { 3189 PetscErrorCode ierr; 3190 3191 PetscFunctionBegin; 3192 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3193 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1); 3194 ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr); 3195 ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr); 3196 dm->sf = sf; 3197 PetscFunctionReturn(0); 3198 } 3199 3200 #undef __FUNCT__ 3201 #define __FUNCT__ "DMGetNumFields" 3202 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields) 3203 { 3204 PetscFunctionBegin; 3205 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3206 PetscValidPointer(numFields, 2); 3207 *numFields = dm->numFields; 3208 PetscFunctionReturn(0); 3209 } 3210 3211 #undef __FUNCT__ 3212 #define __FUNCT__ "DMSetNumFields" 3213 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields) 3214 { 3215 PetscInt f; 3216 PetscErrorCode ierr; 3217 3218 PetscFunctionBegin; 3219 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3220 if (dm->numFields == numFields) PetscFunctionReturn(0); 3221 for (f = 0; f < dm->numFields; ++f) { 3222 ierr = PetscObjectDestroy((PetscObject *) &dm->fields[f]);CHKERRQ(ierr); 3223 } 3224 ierr = PetscFree(dm->fields);CHKERRQ(ierr); 3225 dm->numFields = numFields; 3226 ierr = PetscMalloc1(dm->numFields, &dm->fields);CHKERRQ(ierr); 3227 for (f = 0; f < dm->numFields; ++f) { 3228 ierr = PetscContainerCreate(PetscObjectComm((PetscObject) dm), (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr); 3229 } 3230 PetscFunctionReturn(0); 3231 } 3232 3233 #undef __FUNCT__ 3234 #define __FUNCT__ "DMGetField" 3235 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field) 3236 { 3237 PetscFunctionBegin; 3238 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3239 PetscValidPointer(field, 3); 3240 if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3241 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3242 *field = (PetscObject) dm->fields[f]; 3243 PetscFunctionReturn(0); 3244 } 3245 3246 #undef __FUNCT__ 3247 #define __FUNCT__ "DMSetField" 3248 PetscErrorCode DMSetField(DM dm, PetscInt f, PetscObject field) 3249 { 3250 PetscErrorCode ierr; 3251 3252 PetscFunctionBegin; 3253 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3254 if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3255 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3256 if (((PetscObject) dm->fields[f]) == field) PetscFunctionReturn(0); 3257 ierr = PetscObjectDestroy((PetscObject *) &dm->fields[f]);CHKERRQ(ierr); 3258 dm->fields[f] = (PetscFE) field; 3259 ierr = PetscObjectReference((PetscObject) dm->fields[f]);CHKERRQ(ierr); 3260 PetscFunctionReturn(0); 3261 } 3262 3263 #undef __FUNCT__ 3264 #define __FUNCT__ "DMRestrictHook_Coordinates" 3265 PetscErrorCode DMRestrictHook_Coordinates(DM dm,DM dmc,void *ctx) 3266 { 3267 DM dm_coord,dmc_coord; 3268 PetscErrorCode ierr; 3269 Vec coords,ccoords; 3270 VecScatter scat; 3271 PetscFunctionBegin; 3272 ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr); 3273 ierr = DMGetCoordinateDM(dmc,&dmc_coord);CHKERRQ(ierr); 3274 ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr); 3275 ierr = DMGetCoordinates(dmc,&ccoords);CHKERRQ(ierr); 3276 if (coords && !ccoords) { 3277 ierr = DMCreateGlobalVector(dmc_coord,&ccoords);CHKERRQ(ierr); 3278 ierr = DMCreateInjection(dmc_coord,dm_coord,&scat);CHKERRQ(ierr); 3279 ierr = VecScatterBegin(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3280 ierr = VecScatterEnd(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3281 ierr = DMSetCoordinates(dmc,ccoords);CHKERRQ(ierr); 3282 ierr = VecScatterDestroy(&scat);CHKERRQ(ierr); 3283 ierr = VecDestroy(&ccoords);CHKERRQ(ierr); 3284 } 3285 PetscFunctionReturn(0); 3286 } 3287 3288 #undef __FUNCT__ 3289 #define __FUNCT__ "DMSubDomainHook_Coordinates" 3290 static PetscErrorCode DMSubDomainHook_Coordinates(DM dm,DM subdm,void *ctx) 3291 { 3292 DM dm_coord,subdm_coord; 3293 PetscErrorCode ierr; 3294 Vec coords,ccoords,clcoords; 3295 VecScatter *scat_i,*scat_g; 3296 PetscFunctionBegin; 3297 ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr); 3298 ierr = DMGetCoordinateDM(subdm,&subdm_coord);CHKERRQ(ierr); 3299 ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr); 3300 ierr = DMGetCoordinates(subdm,&ccoords);CHKERRQ(ierr); 3301 if (coords && !ccoords) { 3302 ierr = DMCreateGlobalVector(subdm_coord,&ccoords);CHKERRQ(ierr); 3303 ierr = DMCreateLocalVector(subdm_coord,&clcoords);CHKERRQ(ierr); 3304 ierr = DMCreateDomainDecompositionScatters(dm_coord,1,&subdm_coord,NULL,&scat_i,&scat_g);CHKERRQ(ierr); 3305 ierr = VecScatterBegin(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3306 ierr = VecScatterBegin(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3307 ierr = VecScatterEnd(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3308 ierr = VecScatterEnd(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3309 ierr = DMSetCoordinates(subdm,ccoords);CHKERRQ(ierr); 3310 ierr = DMSetCoordinatesLocal(subdm,clcoords);CHKERRQ(ierr); 3311 ierr = VecScatterDestroy(&scat_i[0]);CHKERRQ(ierr); 3312 ierr = VecScatterDestroy(&scat_g[0]);CHKERRQ(ierr); 3313 ierr = VecDestroy(&ccoords);CHKERRQ(ierr); 3314 ierr = VecDestroy(&clcoords);CHKERRQ(ierr); 3315 ierr = PetscFree(scat_i);CHKERRQ(ierr); 3316 ierr = PetscFree(scat_g);CHKERRQ(ierr); 3317 } 3318 PetscFunctionReturn(0); 3319 } 3320 3321 #undef __FUNCT__ 3322 #define __FUNCT__ "DMSetCoordinates" 3323 /*@ 3324 DMSetCoordinates - Sets into the DM a global vector that holds the coordinates 3325 3326 Collective on DM 3327 3328 Input Parameters: 3329 + dm - the DM 3330 - c - coordinate vector 3331 3332 Note: 3333 The coordinates do include those for ghost points, which are in the local vector 3334 3335 Level: intermediate 3336 3337 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3338 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM() 3339 @*/ 3340 PetscErrorCode DMSetCoordinates(DM dm, Vec c) 3341 { 3342 PetscErrorCode ierr; 3343 3344 PetscFunctionBegin; 3345 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3346 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3347 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3348 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3349 dm->coordinates = c; 3350 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3351 ierr = DMCoarsenHookAdd(dm,DMRestrictHook_Coordinates,NULL,NULL);CHKERRQ(ierr); 3352 ierr = DMSubDomainHookAdd(dm,DMSubDomainHook_Coordinates,NULL,NULL);CHKERRQ(ierr); 3353 PetscFunctionReturn(0); 3354 } 3355 3356 #undef __FUNCT__ 3357 #define __FUNCT__ "DMSetCoordinatesLocal" 3358 /*@ 3359 DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates 3360 3361 Collective on DM 3362 3363 Input Parameters: 3364 + dm - the DM 3365 - c - coordinate vector 3366 3367 Note: 3368 The coordinates of ghost points can be set using DMSetCoordinates() 3369 followed by DMGetCoordinatesLocal(). This is intended to enable the 3370 setting of ghost coordinates outside of the domain. 3371 3372 Level: intermediate 3373 3374 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3375 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM() 3376 @*/ 3377 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c) 3378 { 3379 PetscErrorCode ierr; 3380 3381 PetscFunctionBegin; 3382 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3383 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3384 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3385 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3386 3387 dm->coordinatesLocal = c; 3388 3389 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3390 PetscFunctionReturn(0); 3391 } 3392 3393 #undef __FUNCT__ 3394 #define __FUNCT__ "DMGetCoordinates" 3395 /*@ 3396 DMGetCoordinates - Gets a global vector with the coordinates associated with the DM. 3397 3398 Not Collective 3399 3400 Input Parameter: 3401 . dm - the DM 3402 3403 Output Parameter: 3404 . c - global coordinate vector 3405 3406 Note: 3407 This is a borrowed reference, so the user should NOT destroy this vector 3408 3409 Each process has only the local coordinates (does NOT have the ghost coordinates). 3410 3411 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3412 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3413 3414 Level: intermediate 3415 3416 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3417 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM() 3418 @*/ 3419 PetscErrorCode DMGetCoordinates(DM dm, Vec *c) 3420 { 3421 PetscErrorCode ierr; 3422 3423 PetscFunctionBegin; 3424 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3425 PetscValidPointer(c,2); 3426 if (!dm->coordinates && dm->coordinatesLocal) { 3427 DM cdm = NULL; 3428 3429 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3430 ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr); 3431 ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr); 3432 ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3433 ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3434 } 3435 *c = dm->coordinates; 3436 PetscFunctionReturn(0); 3437 } 3438 3439 #undef __FUNCT__ 3440 #define __FUNCT__ "DMGetCoordinatesLocal" 3441 /*@ 3442 DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM. 3443 3444 Collective on DM 3445 3446 Input Parameter: 3447 . dm - the DM 3448 3449 Output Parameter: 3450 . c - coordinate vector 3451 3452 Note: 3453 This is a borrowed reference, so the user should NOT destroy this vector 3454 3455 Each process has the local and ghost coordinates 3456 3457 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3458 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3459 3460 Level: intermediate 3461 3462 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3463 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM() 3464 @*/ 3465 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c) 3466 { 3467 PetscErrorCode ierr; 3468 3469 PetscFunctionBegin; 3470 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3471 PetscValidPointer(c,2); 3472 if (!dm->coordinatesLocal && dm->coordinates) { 3473 DM cdm = NULL; 3474 3475 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3476 ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr); 3477 ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr); 3478 ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3479 ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3480 } 3481 *c = dm->coordinatesLocal; 3482 PetscFunctionReturn(0); 3483 } 3484 3485 #undef __FUNCT__ 3486 #define __FUNCT__ "DMGetCoordinateDM" 3487 /*@ 3488 DMGetCoordinateDM - Gets the DM that prescribes coordinate layout and scatters between global and local coordinates 3489 3490 Collective on DM 3491 3492 Input Parameter: 3493 . dm - the DM 3494 3495 Output Parameter: 3496 . cdm - coordinate DM 3497 3498 Level: intermediate 3499 3500 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3501 .seealso: DMSetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3502 @*/ 3503 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm) 3504 { 3505 PetscErrorCode ierr; 3506 3507 PetscFunctionBegin; 3508 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3509 PetscValidPointer(cdm,2); 3510 if (!dm->coordinateDM) { 3511 if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM"); 3512 ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr); 3513 } 3514 *cdm = dm->coordinateDM; 3515 PetscFunctionReturn(0); 3516 } 3517 3518 #undef __FUNCT__ 3519 #define __FUNCT__ "DMSetCoordinateDM" 3520 /*@ 3521 DMSetCoordinateDM - Sets the DM that prescribes coordinate layout and scatters between global and local coordinates 3522 3523 Logically Collective on DM 3524 3525 Input Parameters: 3526 + dm - the DM 3527 - cdm - coordinate DM 3528 3529 Level: intermediate 3530 3531 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3532 .seealso: DMGetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3533 @*/ 3534 PetscErrorCode DMSetCoordinateDM(DM dm, DM cdm) 3535 { 3536 PetscErrorCode ierr; 3537 3538 PetscFunctionBegin; 3539 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3540 PetscValidHeaderSpecific(cdm,DM_CLASSID,2); 3541 ierr = DMDestroy(&dm->coordinateDM);CHKERRQ(ierr); 3542 dm->coordinateDM = cdm; 3543 ierr = PetscObjectReference((PetscObject) dm->coordinateDM);CHKERRQ(ierr); 3544 PetscFunctionReturn(0); 3545 } 3546 3547 #undef __FUNCT__ 3548 #define __FUNCT__ "DMGetCoordinateSection" 3549 /*@ 3550 DMGetCoordinateSection - Retrieve the layout of coordinate values over the mesh. 3551 3552 Not Collective 3553 3554 Input Parameter: 3555 . dm - The DM object 3556 3557 Output Parameter: 3558 . section - The PetscSection object 3559 3560 Level: intermediate 3561 3562 .keywords: mesh, coordinates 3563 .seealso: DMGetCoordinateDM(), DMGetDefaultSection(), DMSetDefaultSection() 3564 @*/ 3565 PetscErrorCode DMGetCoordinateSection(DM dm, PetscSection *section) 3566 { 3567 DM cdm; 3568 PetscErrorCode ierr; 3569 3570 PetscFunctionBegin; 3571 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3572 PetscValidPointer(section, 2); 3573 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3574 ierr = DMGetDefaultSection(cdm, section);CHKERRQ(ierr); 3575 PetscFunctionReturn(0); 3576 } 3577 3578 #undef __FUNCT__ 3579 #define __FUNCT__ "DMSetCoordinateSection" 3580 /*@ 3581 DMSetCoordinateSection - Set the layout of coordinate values over the mesh. 3582 3583 Not Collective 3584 3585 Input Parameters: 3586 + dm - The DM object 3587 - section - The PetscSection object 3588 3589 Level: intermediate 3590 3591 .keywords: mesh, coordinates 3592 .seealso: DMGetCoordinateSection(), DMGetDefaultSection(), DMSetDefaultSection() 3593 @*/ 3594 PetscErrorCode DMSetCoordinateSection(DM dm, PetscSection section) 3595 { 3596 DM cdm; 3597 PetscErrorCode ierr; 3598 3599 PetscFunctionBegin; 3600 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3601 PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 3602 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3603 ierr = DMSetDefaultSection(cdm, section);CHKERRQ(ierr); 3604 PetscFunctionReturn(0); 3605 } 3606 3607 #undef __FUNCT__ 3608 #define __FUNCT__ "DMLocatePoints" 3609 /*@ 3610 DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells 3611 3612 Not collective 3613 3614 Input Parameters: 3615 + dm - The DM 3616 - v - The Vec of points 3617 3618 Output Parameter: 3619 . cells - The local cell numbers for cells which contain the points 3620 3621 Level: developer 3622 3623 .keywords: point location, mesh 3624 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3625 @*/ 3626 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells) 3627 { 3628 PetscErrorCode ierr; 3629 3630 PetscFunctionBegin; 3631 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3632 PetscValidHeaderSpecific(v,VEC_CLASSID,2); 3633 PetscValidPointer(cells,3); 3634 if (dm->ops->locatepoints) { 3635 ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr); 3636 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM"); 3637 PetscFunctionReturn(0); 3638 } 3639 3640 #undef __FUNCT__ 3641 #define __FUNCT__ "DMGetOutputDM" 3642 /*@ 3643 DMGetOutputDM - Retrieve the DM associated with the layout for output 3644 3645 Input Parameter: 3646 . dm - The original DM 3647 3648 Output Parameter: 3649 . odm - The DM which provides the layout for output 3650 3651 Level: intermediate 3652 3653 .seealso: VecView(), DMGetDefaultGlobalSection() 3654 @*/ 3655 PetscErrorCode DMGetOutputDM(DM dm, DM *odm) 3656 { 3657 PetscSection section; 3658 PetscBool hasConstraints; 3659 PetscErrorCode ierr; 3660 3661 PetscFunctionBegin; 3662 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3663 PetscValidPointer(odm,2); 3664 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 3665 ierr = PetscSectionHasConstraints(section, &hasConstraints);CHKERRQ(ierr); 3666 if (!hasConstraints) { 3667 *odm = dm; 3668 PetscFunctionReturn(0); 3669 } 3670 if (!dm->dmBC) { 3671 PetscSection newSection, gsection; 3672 PetscSF sf; 3673 3674 ierr = DMClone(dm, &dm->dmBC);CHKERRQ(ierr); 3675 ierr = PetscSectionClone(section, &newSection);CHKERRQ(ierr); 3676 ierr = DMSetDefaultSection(dm->dmBC, newSection);CHKERRQ(ierr); 3677 ierr = PetscSectionDestroy(&newSection);CHKERRQ(ierr); 3678 ierr = DMGetPointSF(dm->dmBC, &sf);CHKERRQ(ierr); 3679 ierr = PetscSectionCreateGlobalSection(section, sf, PETSC_TRUE, &gsection);CHKERRQ(ierr); 3680 ierr = DMSetDefaultGlobalSection(dm->dmBC, gsection);CHKERRQ(ierr); 3681 ierr = PetscSectionDestroy(&gsection);CHKERRQ(ierr); 3682 } 3683 *odm = dm->dmBC; 3684 PetscFunctionReturn(0); 3685 } 3686 3687 #undef __FUNCT__ 3688 #define __FUNCT__ "DMGetOutputSequenceNumber" 3689 /*@ 3690 DMGetOutputSequenceNumber - Retrieve the sequence number for output 3691 3692 Input Parameter: 3693 . dm - The original DM 3694 3695 Output Parameter: 3696 . num - The output sequence number 3697 3698 Level: intermediate 3699 3700 Note: This is intended for output that should appear in sequence, for instance 3701 a set of timesteps in an HDF5 file, or a set of realizations of a stochastic system. 3702 3703 .seealso: VecView() 3704 @*/ 3705 PetscErrorCode DMGetOutputSequenceNumber(DM dm, PetscInt *num) 3706 { 3707 PetscFunctionBegin; 3708 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3709 PetscValidPointer(num,2); 3710 *num = dm->outputSequenceNum; 3711 PetscFunctionReturn(0); 3712 } 3713 3714 #undef __FUNCT__ 3715 #define __FUNCT__ "DMSetOutputSequenceNumber" 3716 /*@ 3717 DMSetOutputSequenceNumber - Set the sequence number for output 3718 3719 Input Parameters: 3720 + dm - The original DM 3721 - num - The output sequence number 3722 3723 Level: intermediate 3724 3725 Note: This is intended for output that should appear in sequence, for instance 3726 a set of timesteps in an HDF5 file, or a set of realizations of a stochastic system. 3727 3728 .seealso: VecView() 3729 @*/ 3730 PetscErrorCode DMSetOutputSequenceNumber(DM dm, PetscInt num) 3731 { 3732 PetscFunctionBegin; 3733 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3734 dm->outputSequenceNum = num; 3735 PetscFunctionReturn(0); 3736 } 3737