1 #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ 2 #include <petscsf.h> 3 4 PetscClassId DM_CLASSID; 5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal, DM_LocalToLocal; 6 7 #undef __FUNCT__ 8 #define __FUNCT__ "DMCreate" 9 /*@ 10 DMCreate - Creates an empty DM object. The type can then be set with DMSetType(). 11 12 If you never call DMSetType() it will generate an 13 error when you try to use the vector. 14 15 Collective on MPI_Comm 16 17 Input Parameter: 18 . comm - The communicator for the DM object 19 20 Output Parameter: 21 . dm - The DM object 22 23 Level: beginner 24 25 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE 26 @*/ 27 PetscErrorCode DMCreate(MPI_Comm comm,DM *dm) 28 { 29 DM v; 30 PetscErrorCode ierr; 31 32 PetscFunctionBegin; 33 PetscValidPointer(dm,2); 34 *dm = NULL; 35 ierr = PetscSysInitializePackage();CHKERRQ(ierr); 36 ierr = VecInitializePackage();CHKERRQ(ierr); 37 ierr = MatInitializePackage();CHKERRQ(ierr); 38 ierr = DMInitializePackage();CHKERRQ(ierr); 39 40 ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr); 41 ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr); 42 43 44 v->ltogmap = NULL; 45 v->ltogmapb = NULL; 46 v->bs = 1; 47 v->coloringtype = IS_COLORING_GLOBAL; 48 ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr); 49 ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr); 50 v->defaultSection = NULL; 51 v->defaultGlobalSection = NULL; 52 { 53 PetscInt i; 54 for (i = 0; i < 10; ++i) { 55 v->nullspaceConstructors[i] = NULL; 56 } 57 } 58 v->numFields = 0; 59 v->fields = NULL; 60 ierr = DMSetVecType(v,VECSTANDARD);CHKERRQ(ierr); 61 ierr = DMSetMatType(v,MATAIJ);CHKERRQ(ierr); 62 *dm = v; 63 PetscFunctionReturn(0); 64 } 65 66 #undef __FUNCT__ 67 #define __FUNCT__ "DMClone" 68 /*@ 69 DMClone - Creates a DM object with the same topology as the original. 70 71 Collective on MPI_Comm 72 73 Input Parameter: 74 . dm - The original DM object 75 76 Output Parameter: 77 . newdm - The new DM object 78 79 Level: beginner 80 81 .keywords: DM, topology, create 82 @*/ 83 PetscErrorCode DMClone(DM dm, DM *newdm) 84 { 85 PetscSF sf; 86 Vec coords; 87 void *ctx; 88 PetscErrorCode ierr; 89 90 PetscFunctionBegin; 91 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 92 PetscValidPointer(newdm,2); 93 ierr = DMCreate(PetscObjectComm((PetscObject)dm), newdm);CHKERRQ(ierr); 94 if (dm->ops->clone) { 95 ierr = (*dm->ops->clone)(dm, newdm);CHKERRQ(ierr); 96 } 97 (*newdm)->setupcalled = PETSC_TRUE; 98 ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr); 99 ierr = DMSetPointSF(*newdm, sf);CHKERRQ(ierr); 100 ierr = DMGetApplicationContext(dm, &ctx);CHKERRQ(ierr); 101 ierr = DMSetApplicationContext(*newdm, ctx);CHKERRQ(ierr); 102 ierr = DMGetCoordinatesLocal(dm, &coords);CHKERRQ(ierr); 103 if (coords) { 104 ierr = DMSetCoordinatesLocal(*newdm, coords);CHKERRQ(ierr); 105 } else { 106 ierr = DMGetCoordinates(dm, &coords);CHKERRQ(ierr); 107 if (coords) {ierr = DMSetCoordinates(*newdm, coords);CHKERRQ(ierr);} 108 } 109 PetscFunctionReturn(0); 110 } 111 112 #undef __FUNCT__ 113 #define __FUNCT__ "DMSetVecType" 114 /*@C 115 DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 116 117 Logically Collective on DMDA 118 119 Input Parameter: 120 + da - initial distributed array 121 . ctype - the vector type, currently either VECSTANDARD or VECCUSP 122 123 Options Database: 124 . -dm_vec_type ctype 125 126 Level: intermediate 127 128 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType, DMGetVecType() 129 @*/ 130 PetscErrorCode DMSetVecType(DM da,VecType ctype) 131 { 132 PetscErrorCode ierr; 133 134 PetscFunctionBegin; 135 PetscValidHeaderSpecific(da,DM_CLASSID,1); 136 ierr = PetscFree(da->vectype);CHKERRQ(ierr); 137 ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr); 138 PetscFunctionReturn(0); 139 } 140 141 #undef __FUNCT__ 142 #define __FUNCT__ "DMGetVecType" 143 /*@C 144 DMGetVecType - Gets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 145 146 Logically Collective on DMDA 147 148 Input Parameter: 149 . da - initial distributed array 150 151 Output Parameter: 152 . ctype - the vector type 153 154 Level: intermediate 155 156 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType 157 @*/ 158 PetscErrorCode DMGetVecType(DM da,VecType *ctype) 159 { 160 PetscFunctionBegin; 161 PetscValidHeaderSpecific(da,DM_CLASSID,1); 162 *ctype = da->vectype; 163 PetscFunctionReturn(0); 164 } 165 166 #undef __FUNCT__ 167 #define __FUNCT__ "VecGetDM" 168 /*@ 169 VecGetDM - Gets the DM defining the data layout of the vector 170 171 Not collective 172 173 Input Parameter: 174 . v - The Vec 175 176 Output Parameter: 177 . dm - The DM 178 179 Level: intermediate 180 181 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 182 @*/ 183 PetscErrorCode VecGetDM(Vec v, DM *dm) 184 { 185 PetscErrorCode ierr; 186 187 PetscFunctionBegin; 188 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 189 PetscValidPointer(dm,2); 190 ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 191 PetscFunctionReturn(0); 192 } 193 194 #undef __FUNCT__ 195 #define __FUNCT__ "VecSetDM" 196 /*@ 197 VecSetDM - Sets the DM defining the data layout of the vector 198 199 Not collective 200 201 Input Parameters: 202 + v - The Vec 203 - dm - The DM 204 205 Level: intermediate 206 207 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 208 @*/ 209 PetscErrorCode VecSetDM(Vec v, DM dm) 210 { 211 PetscErrorCode ierr; 212 213 PetscFunctionBegin; 214 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 215 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 216 ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 217 PetscFunctionReturn(0); 218 } 219 220 #undef __FUNCT__ 221 #define __FUNCT__ "DMSetMatType" 222 /*@C 223 DMSetMatType - Sets the type of matrix created with DMCreateMatrix() 224 225 Logically Collective on DM 226 227 Input Parameter: 228 + dm - the DM context 229 . ctype - the matrix type 230 231 Options Database: 232 . -dm_mat_type ctype 233 234 Level: intermediate 235 236 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMGetMatType() 237 @*/ 238 PetscErrorCode DMSetMatType(DM dm,MatType ctype) 239 { 240 PetscErrorCode ierr; 241 242 PetscFunctionBegin; 243 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 244 ierr = PetscFree(dm->mattype);CHKERRQ(ierr); 245 ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr); 246 PetscFunctionReturn(0); 247 } 248 249 #undef __FUNCT__ 250 #define __FUNCT__ "DMGetMatType" 251 /*@C 252 DMGetMatType - Gets the type of matrix created with DMCreateMatrix() 253 254 Logically Collective on DM 255 256 Input Parameter: 257 . dm - the DM context 258 259 Output Parameter: 260 . ctype - the matrix type 261 262 Options Database: 263 . -dm_mat_type ctype 264 265 Level: intermediate 266 267 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMSetMatType() 268 @*/ 269 PetscErrorCode DMGetMatType(DM dm,MatType *ctype) 270 { 271 PetscFunctionBegin; 272 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 273 *ctype = dm->mattype; 274 PetscFunctionReturn(0); 275 } 276 277 #undef __FUNCT__ 278 #define __FUNCT__ "MatGetDM" 279 /*@ 280 MatGetDM - Gets the DM defining the data layout of the matrix 281 282 Not collective 283 284 Input Parameter: 285 . A - The Mat 286 287 Output Parameter: 288 . dm - The DM 289 290 Level: intermediate 291 292 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType() 293 @*/ 294 PetscErrorCode MatGetDM(Mat A, DM *dm) 295 { 296 PetscErrorCode ierr; 297 298 PetscFunctionBegin; 299 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 300 PetscValidPointer(dm,2); 301 ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 302 PetscFunctionReturn(0); 303 } 304 305 #undef __FUNCT__ 306 #define __FUNCT__ "MatSetDM" 307 /*@ 308 MatSetDM - Sets the DM defining the data layout of the matrix 309 310 Not collective 311 312 Input Parameters: 313 + A - The Mat 314 - dm - The DM 315 316 Level: intermediate 317 318 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType() 319 @*/ 320 PetscErrorCode MatSetDM(Mat A, DM dm) 321 { 322 PetscErrorCode ierr; 323 324 PetscFunctionBegin; 325 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 326 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 327 ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 328 PetscFunctionReturn(0); 329 } 330 331 #undef __FUNCT__ 332 #define __FUNCT__ "DMSetOptionsPrefix" 333 /*@C 334 DMSetOptionsPrefix - Sets the prefix used for searching for all 335 DMDA options in the database. 336 337 Logically Collective on DMDA 338 339 Input Parameter: 340 + da - the DMDA context 341 - prefix - the prefix to prepend to all option names 342 343 Notes: 344 A hyphen (-) must NOT be given at the beginning of the prefix name. 345 The first character of all runtime options is AUTOMATICALLY the hyphen. 346 347 Level: advanced 348 349 .keywords: DMDA, set, options, prefix, database 350 351 .seealso: DMSetFromOptions() 352 @*/ 353 PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[]) 354 { 355 PetscErrorCode ierr; 356 357 PetscFunctionBegin; 358 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 359 ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr); 360 PetscFunctionReturn(0); 361 } 362 363 #undef __FUNCT__ 364 #define __FUNCT__ "DMDestroy" 365 /*@ 366 DMDestroy - Destroys a vector packer or DMDA. 367 368 Collective on DM 369 370 Input Parameter: 371 . dm - the DM object to destroy 372 373 Level: developer 374 375 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 376 377 @*/ 378 PetscErrorCode DMDestroy(DM *dm) 379 { 380 PetscInt i, cnt = 0, f; 381 DMNamedVecLink nlink,nnext; 382 PetscErrorCode ierr; 383 384 PetscFunctionBegin; 385 if (!*dm) PetscFunctionReturn(0); 386 PetscValidHeaderSpecific((*dm),DM_CLASSID,1); 387 388 /* I think it makes sense to dump all attached things when you are destroyed, which also eliminates circular references */ 389 for (f = 0; f < (*dm)->numFields; ++f) { 390 ierr = PetscObjectCompose((*dm)->fields[f], "pmat", NULL);CHKERRQ(ierr); 391 ierr = PetscObjectCompose((*dm)->fields[f], "nullspace", NULL);CHKERRQ(ierr); 392 ierr = PetscObjectCompose((*dm)->fields[f], "nearnullspace", NULL);CHKERRQ(ierr); 393 } 394 /* count all the circular references of DM and its contained Vecs */ 395 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 396 if ((*dm)->localin[i]) cnt++; 397 if ((*dm)->globalin[i]) cnt++; 398 } 399 for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++; 400 for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++; 401 if ((*dm)->x) { 402 DM obj; 403 ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr); 404 if (obj == *dm) cnt++; 405 } 406 407 if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);} 408 /* 409 Need this test because the dm references the vectors that 410 reference the dm, so destroying the dm calls destroy on the 411 vectors that cause another destroy on the dm 412 */ 413 if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0); 414 ((PetscObject) (*dm))->refct = 0; 415 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 416 if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()"); 417 ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr); 418 } 419 for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */ 420 nnext = nlink->next; 421 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 422 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 423 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 424 ierr = PetscFree(nlink);CHKERRQ(ierr); 425 } 426 (*dm)->namedglobal = NULL; 427 428 for (nlink=(*dm)->namedlocal; nlink; nlink=nnext) { /* Destroy the named local vectors */ 429 nnext = nlink->next; 430 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 431 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 432 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 433 ierr = PetscFree(nlink);CHKERRQ(ierr); 434 } 435 (*dm)->namedlocal = NULL; 436 437 /* Destroy the list of hooks */ 438 { 439 DMCoarsenHookLink link,next; 440 for (link=(*dm)->coarsenhook; link; link=next) { 441 next = link->next; 442 ierr = PetscFree(link);CHKERRQ(ierr); 443 } 444 (*dm)->coarsenhook = NULL; 445 } 446 { 447 DMRefineHookLink link,next; 448 for (link=(*dm)->refinehook; link; link=next) { 449 next = link->next; 450 ierr = PetscFree(link);CHKERRQ(ierr); 451 } 452 (*dm)->refinehook = NULL; 453 } 454 { 455 DMSubDomainHookLink link,next; 456 for (link=(*dm)->subdomainhook; link; link=next) { 457 next = link->next; 458 ierr = PetscFree(link);CHKERRQ(ierr); 459 } 460 (*dm)->subdomainhook = NULL; 461 } 462 { 463 DMGlobalToLocalHookLink link,next; 464 for (link=(*dm)->gtolhook; link; link=next) { 465 next = link->next; 466 ierr = PetscFree(link);CHKERRQ(ierr); 467 } 468 (*dm)->gtolhook = NULL; 469 } 470 /* Destroy the work arrays */ 471 { 472 DMWorkLink link,next; 473 if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out"); 474 for (link=(*dm)->workin; link; link=next) { 475 next = link->next; 476 ierr = PetscFree(link->mem);CHKERRQ(ierr); 477 ierr = PetscFree(link);CHKERRQ(ierr); 478 } 479 (*dm)->workin = NULL; 480 } 481 482 ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr); 483 ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr); 484 ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr); 485 486 if ((*dm)->ctx && (*dm)->ctxdestroy) { 487 ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr); 488 } 489 ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr); 490 ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr); 491 ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr); 492 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr); 493 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr); 494 ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr); 495 ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr); 496 497 ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr); 498 ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr); 499 ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr); 500 ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr); 501 ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr); 502 503 ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr); 504 ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr); 505 ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr); 506 507 for (f = 0; f < (*dm)->numFields; ++f) { 508 ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr); 509 } 510 ierr = PetscFree((*dm)->fields);CHKERRQ(ierr); 511 /* if memory was published with SAWs then destroy it */ 512 ierr = PetscObjectSAWsViewOff((PetscObject)*dm);CHKERRQ(ierr); 513 514 ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr); 515 /* We do not destroy (*dm)->data here so that we can reference count backend objects */ 516 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 517 PetscFunctionReturn(0); 518 } 519 520 #undef __FUNCT__ 521 #define __FUNCT__ "DMSetUp" 522 /*@ 523 DMSetUp - sets up the data structures inside a DM object 524 525 Collective on DM 526 527 Input Parameter: 528 . dm - the DM object to setup 529 530 Level: developer 531 532 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 533 534 @*/ 535 PetscErrorCode DMSetUp(DM dm) 536 { 537 PetscErrorCode ierr; 538 539 PetscFunctionBegin; 540 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 541 if (dm->setupcalled) PetscFunctionReturn(0); 542 if (dm->ops->setup) { 543 ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr); 544 } 545 dm->setupcalled = PETSC_TRUE; 546 PetscFunctionReturn(0); 547 } 548 549 #undef __FUNCT__ 550 #define __FUNCT__ "DMSetFromOptions" 551 /*@ 552 DMSetFromOptions - sets parameters in a DM from the options database 553 554 Collective on DM 555 556 Input Parameter: 557 . dm - the DM object to set options for 558 559 Options Database: 560 + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros 561 . -dm_vec_type <type> type of vector to create inside DM 562 . -dm_mat_type <type> type of matrix to create inside DM 563 - -dm_coloring_type <global or ghosted> 564 565 Level: developer 566 567 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 568 569 @*/ 570 PetscErrorCode DMSetFromOptions(DM dm) 571 { 572 char typeName[256]; 573 PetscBool flg; 574 PetscErrorCode ierr; 575 576 PetscFunctionBegin; 577 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 578 ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr); 579 ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr); 580 ierr = PetscOptionsFList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr); 581 if (flg) { 582 ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr); 583 } 584 ierr = PetscOptionsFList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr); 585 if (flg) { 586 ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr); 587 } 588 ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr); 589 if (dm->ops->setfromoptions) { 590 ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr); 591 } 592 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 593 ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr); 594 ierr = PetscOptionsEnd();CHKERRQ(ierr); 595 PetscFunctionReturn(0); 596 } 597 598 #undef __FUNCT__ 599 #define __FUNCT__ "DMView" 600 /*@C 601 DMView - Views a vector packer or DMDA. 602 603 Collective on DM 604 605 Input Parameter: 606 + dm - the DM object to view 607 - v - the viewer 608 609 Level: developer 610 611 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 612 613 @*/ 614 PetscErrorCode DMView(DM dm,PetscViewer v) 615 { 616 PetscErrorCode ierr; 617 PetscBool isbinary; 618 619 PetscFunctionBegin; 620 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 621 if (!v) { 622 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr); 623 } 624 ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 625 if (isbinary) { 626 PetscInt classid = DM_FILE_CLASSID; 627 char type[256]; 628 629 ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr); 630 ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr); 631 ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr); 632 } 633 if (dm->ops->view) { 634 ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr); 635 } 636 PetscFunctionReturn(0); 637 } 638 639 #undef __FUNCT__ 640 #define __FUNCT__ "DMCreateGlobalVector" 641 /*@ 642 DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object 643 644 Collective on DM 645 646 Input Parameter: 647 . dm - the DM object 648 649 Output Parameter: 650 . vec - the global vector 651 652 Level: beginner 653 654 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 655 656 @*/ 657 PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec) 658 { 659 PetscErrorCode ierr; 660 661 PetscFunctionBegin; 662 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 663 ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr); 664 PetscFunctionReturn(0); 665 } 666 667 #undef __FUNCT__ 668 #define __FUNCT__ "DMCreateLocalVector" 669 /*@ 670 DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object 671 672 Not Collective 673 674 Input Parameter: 675 . dm - the DM object 676 677 Output Parameter: 678 . vec - the local vector 679 680 Level: beginner 681 682 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 683 684 @*/ 685 PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec) 686 { 687 PetscErrorCode ierr; 688 689 PetscFunctionBegin; 690 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 691 ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr); 692 PetscFunctionReturn(0); 693 } 694 695 #undef __FUNCT__ 696 #define __FUNCT__ "DMGetLocalToGlobalMapping" 697 /*@ 698 DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM. 699 700 Collective on DM 701 702 Input Parameter: 703 . dm - the DM that provides the mapping 704 705 Output Parameter: 706 . ltog - the mapping 707 708 Level: intermediate 709 710 Notes: 711 This mapping can then be used by VecSetLocalToGlobalMapping() or 712 MatSetLocalToGlobalMapping(). 713 714 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock() 715 @*/ 716 PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog) 717 { 718 PetscErrorCode ierr; 719 720 PetscFunctionBegin; 721 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 722 PetscValidPointer(ltog,2); 723 if (!dm->ltogmap) { 724 PetscSection section, sectionGlobal; 725 726 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 727 if (section) { 728 PetscInt *ltog; 729 PetscInt pStart, pEnd, size, p, l; 730 731 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 732 ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr); 733 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 734 ierr = PetscMalloc1(size, <og);CHKERRQ(ierr); /* We want the local+overlap size */ 735 for (p = pStart, l = 0; p < pEnd; ++p) { 736 PetscInt dof, off, c; 737 738 /* Should probably use constrained dofs */ 739 ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr); 740 ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr); 741 for (c = 0; c < dof; ++c, ++l) { 742 ltog[l] = off+c; 743 } 744 } 745 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr); 746 ierr = PetscLogObjectParent((PetscObject)dm, (PetscObject)dm->ltogmap);CHKERRQ(ierr); 747 } else { 748 if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping"); 749 ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr); 750 } 751 } 752 *ltog = dm->ltogmap; 753 PetscFunctionReturn(0); 754 } 755 756 #undef __FUNCT__ 757 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock" 758 /*@ 759 DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM. 760 761 Collective on DM 762 763 Input Parameter: 764 . da - the distributed array that provides the mapping 765 766 Output Parameter: 767 . ltog - the block mapping 768 769 Level: intermediate 770 771 Notes: 772 This mapping can then be used by VecSetLocalToGlobalMappingBlock() or 773 MatSetLocalToGlobalMappingBlock(). 774 775 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize() 776 @*/ 777 PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog) 778 { 779 PetscErrorCode ierr; 780 781 PetscFunctionBegin; 782 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 783 PetscValidPointer(ltog,2); 784 if (!dm->ltogmapb) { 785 PetscInt bs; 786 ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr); 787 if (bs > 1) { 788 if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock"); 789 ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr); 790 } else { 791 ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr); 792 ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr); 793 } 794 } 795 *ltog = dm->ltogmapb; 796 PetscFunctionReturn(0); 797 } 798 799 #undef __FUNCT__ 800 #define __FUNCT__ "DMGetBlockSize" 801 /*@ 802 DMGetBlockSize - Gets the inherent block size associated with a DM 803 804 Not Collective 805 806 Input Parameter: 807 . dm - the DM with block structure 808 809 Output Parameter: 810 . bs - the block size, 1 implies no exploitable block structure 811 812 Level: intermediate 813 814 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock() 815 @*/ 816 PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs) 817 { 818 PetscFunctionBegin; 819 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 820 PetscValidPointer(bs,2); 821 if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet"); 822 *bs = dm->bs; 823 PetscFunctionReturn(0); 824 } 825 826 #undef __FUNCT__ 827 #define __FUNCT__ "DMCreateInterpolation" 828 /*@ 829 DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects 830 831 Collective on DM 832 833 Input Parameter: 834 + dm1 - the DM object 835 - dm2 - the second, finer DM object 836 837 Output Parameter: 838 + mat - the interpolation 839 - vec - the scaling (optional) 840 841 Level: developer 842 843 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 844 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation. 845 846 For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors 847 EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic. 848 849 850 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen() 851 852 @*/ 853 PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 854 { 855 PetscErrorCode ierr; 856 857 PetscFunctionBegin; 858 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 859 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 860 ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr); 861 PetscFunctionReturn(0); 862 } 863 864 #undef __FUNCT__ 865 #define __FUNCT__ "DMCreateInjection" 866 /*@ 867 DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects 868 869 Collective on DM 870 871 Input Parameter: 872 + dm1 - the DM object 873 - dm2 - the second, finer DM object 874 875 Output Parameter: 876 . ctx - the injection 877 878 Level: developer 879 880 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 881 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection. 882 883 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation() 884 885 @*/ 886 PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx) 887 { 888 PetscErrorCode ierr; 889 890 PetscFunctionBegin; 891 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 892 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 893 ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr); 894 PetscFunctionReturn(0); 895 } 896 897 #undef __FUNCT__ 898 #define __FUNCT__ "DMCreateColoring" 899 /*@ 900 DMCreateColoring - Gets coloring for a DM 901 902 Collective on DM 903 904 Input Parameter: 905 + dm - the DM object 906 - ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL 907 908 Output Parameter: 909 . coloring - the coloring 910 911 Level: developer 912 913 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix(), DMSetMatType() 914 915 @*/ 916 PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,ISColoring *coloring) 917 { 918 PetscErrorCode ierr; 919 920 PetscFunctionBegin; 921 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 922 if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet"); 923 ierr = (*dm->ops->getcoloring)(dm,ctype,coloring);CHKERRQ(ierr); 924 PetscFunctionReturn(0); 925 } 926 927 #undef __FUNCT__ 928 #define __FUNCT__ "DMCreateMatrix" 929 /*@ 930 DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite 931 932 Collective on DM 933 934 Input Parameter: 935 . dm - the DM object 936 937 Output Parameter: 938 . mat - the empty Jacobian 939 940 Level: beginner 941 942 Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 943 do not need to do it yourself. 944 945 By default it also sets the nonzero structure and puts in the zero entries. To prevent setting 946 the nonzero pattern call DMDASetMatPreallocateOnly() 947 948 For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used 949 internally by PETSc. 950 951 For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires 952 the indices for the global numbering for DMDAs which is complicated. 953 954 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMSetMatType() 955 956 @*/ 957 PetscErrorCode DMCreateMatrix(DM dm,Mat *mat) 958 { 959 PetscErrorCode ierr; 960 961 PetscFunctionBegin; 962 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 963 ierr = MatInitializePackage();CHKERRQ(ierr); 964 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 965 PetscValidPointer(mat,3); 966 ierr = (*dm->ops->creatematrix)(dm,mat);CHKERRQ(ierr); 967 PetscFunctionReturn(0); 968 } 969 970 #undef __FUNCT__ 971 #define __FUNCT__ "DMSetMatrixPreallocateOnly" 972 /*@ 973 DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly 974 preallocated but the nonzero structure and zero values will not be set. 975 976 Logically Collective on DMDA 977 978 Input Parameter: 979 + dm - the DM 980 - only - PETSC_TRUE if only want preallocation 981 982 Level: developer 983 .seealso DMCreateMatrix() 984 @*/ 985 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only) 986 { 987 PetscFunctionBegin; 988 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 989 dm->prealloc_only = only; 990 PetscFunctionReturn(0); 991 } 992 993 #undef __FUNCT__ 994 #define __FUNCT__ "DMGetWorkArray" 995 /*@C 996 DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 997 998 Not Collective 999 1000 Input Parameters: 1001 + dm - the DM object 1002 . count - The minium size 1003 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 1004 1005 Output Parameter: 1006 . array - the work array 1007 1008 Level: developer 1009 1010 .seealso DMDestroy(), DMCreate() 1011 @*/ 1012 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1013 { 1014 PetscErrorCode ierr; 1015 DMWorkLink link; 1016 size_t size; 1017 1018 PetscFunctionBegin; 1019 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1020 PetscValidPointer(mem,4); 1021 if (dm->workin) { 1022 link = dm->workin; 1023 dm->workin = dm->workin->next; 1024 } else { 1025 ierr = PetscNewLog(dm,&link);CHKERRQ(ierr); 1026 } 1027 ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr); 1028 if (size*count > link->bytes) { 1029 ierr = PetscFree(link->mem);CHKERRQ(ierr); 1030 ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr); 1031 link->bytes = size*count; 1032 } 1033 link->next = dm->workout; 1034 dm->workout = link; 1035 *(void**)mem = link->mem; 1036 PetscFunctionReturn(0); 1037 } 1038 1039 #undef __FUNCT__ 1040 #define __FUNCT__ "DMRestoreWorkArray" 1041 /*@C 1042 DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 1043 1044 Not Collective 1045 1046 Input Parameters: 1047 + dm - the DM object 1048 . count - The minium size 1049 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 1050 1051 Output Parameter: 1052 . array - the work array 1053 1054 Level: developer 1055 1056 .seealso DMDestroy(), DMCreate() 1057 @*/ 1058 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1059 { 1060 DMWorkLink *p,link; 1061 1062 PetscFunctionBegin; 1063 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1064 PetscValidPointer(mem,4); 1065 for (p=&dm->workout; (link=*p); p=&link->next) { 1066 if (link->mem == *(void**)mem) { 1067 *p = link->next; 1068 link->next = dm->workin; 1069 dm->workin = link; 1070 *(void**)mem = NULL; 1071 PetscFunctionReturn(0); 1072 } 1073 } 1074 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out"); 1075 PetscFunctionReturn(0); 1076 } 1077 1078 #undef __FUNCT__ 1079 #define __FUNCT__ "DMSetNullSpaceConstructor" 1080 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace)) 1081 { 1082 PetscFunctionBegin; 1083 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1084 if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field); 1085 dm->nullspaceConstructors[field] = nullsp; 1086 PetscFunctionReturn(0); 1087 } 1088 1089 #undef __FUNCT__ 1090 #define __FUNCT__ "DMCreateFieldIS" 1091 /*@C 1092 DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field 1093 1094 Not collective 1095 1096 Input Parameter: 1097 . dm - the DM object 1098 1099 Output Parameters: 1100 + numFields - The number of fields (or NULL if not requested) 1101 . fieldNames - The name for each field (or NULL if not requested) 1102 - fields - The global indices for each field (or NULL if not requested) 1103 1104 Level: intermediate 1105 1106 Notes: 1107 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1108 PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with 1109 PetscFree(). 1110 1111 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 1112 @*/ 1113 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields) 1114 { 1115 PetscSection section, sectionGlobal; 1116 PetscErrorCode ierr; 1117 1118 PetscFunctionBegin; 1119 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1120 if (numFields) { 1121 PetscValidPointer(numFields,2); 1122 *numFields = 0; 1123 } 1124 if (fieldNames) { 1125 PetscValidPointer(fieldNames,3); 1126 *fieldNames = NULL; 1127 } 1128 if (fields) { 1129 PetscValidPointer(fields,4); 1130 *fields = NULL; 1131 } 1132 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1133 if (section) { 1134 PetscInt *fieldSizes, **fieldIndices; 1135 PetscInt nF, f, pStart, pEnd, p; 1136 1137 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 1138 ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr); 1139 ierr = PetscMalloc2(nF,&fieldSizes,nF,&fieldIndices);CHKERRQ(ierr); 1140 ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr); 1141 for (f = 0; f < nF; ++f) { 1142 fieldSizes[f] = 0; 1143 } 1144 for (p = pStart; p < pEnd; ++p) { 1145 PetscInt gdof; 1146 1147 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1148 if (gdof > 0) { 1149 for (f = 0; f < nF; ++f) { 1150 PetscInt fdof, fcdof; 1151 1152 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1153 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1154 fieldSizes[f] += fdof-fcdof; 1155 } 1156 } 1157 } 1158 for (f = 0; f < nF; ++f) { 1159 ierr = PetscMalloc1(fieldSizes[f], &fieldIndices[f]);CHKERRQ(ierr); 1160 fieldSizes[f] = 0; 1161 } 1162 for (p = pStart; p < pEnd; ++p) { 1163 PetscInt gdof, goff; 1164 1165 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1166 if (gdof > 0) { 1167 ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr); 1168 for (f = 0; f < nF; ++f) { 1169 PetscInt fdof, fcdof, fc; 1170 1171 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1172 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1173 for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) { 1174 fieldIndices[f][fieldSizes[f]] = goff++; 1175 } 1176 } 1177 } 1178 } 1179 if (numFields) *numFields = nF; 1180 if (fieldNames) { 1181 ierr = PetscMalloc1(nF, fieldNames);CHKERRQ(ierr); 1182 for (f = 0; f < nF; ++f) { 1183 const char *fieldName; 1184 1185 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1186 ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr); 1187 } 1188 } 1189 if (fields) { 1190 ierr = PetscMalloc1(nF, fields);CHKERRQ(ierr); 1191 for (f = 0; f < nF; ++f) { 1192 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr); 1193 } 1194 } 1195 ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr); 1196 } else if (dm->ops->createfieldis) { 1197 ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr); 1198 } 1199 PetscFunctionReturn(0); 1200 } 1201 1202 1203 #undef __FUNCT__ 1204 #define __FUNCT__ "DMCreateFieldDecomposition" 1205 /*@C 1206 DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems 1207 corresponding to different fields: each IS contains the global indices of the dofs of the 1208 corresponding field. The optional list of DMs define the DM for each subproblem. 1209 Generalizes DMCreateFieldIS(). 1210 1211 Not collective 1212 1213 Input Parameter: 1214 . dm - the DM object 1215 1216 Output Parameters: 1217 + len - The number of subproblems in the field decomposition (or NULL if not requested) 1218 . namelist - The name for each field (or NULL if not requested) 1219 . islist - The global indices for each field (or NULL if not requested) 1220 - dmlist - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1221 1222 Level: intermediate 1223 1224 Notes: 1225 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1226 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1227 and all of the arrays should be freed with PetscFree(). 1228 1229 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1230 @*/ 1231 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) 1232 { 1233 PetscErrorCode ierr; 1234 1235 PetscFunctionBegin; 1236 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1237 if (len) { 1238 PetscValidPointer(len,2); 1239 *len = 0; 1240 } 1241 if (namelist) { 1242 PetscValidPointer(namelist,3); 1243 *namelist = 0; 1244 } 1245 if (islist) { 1246 PetscValidPointer(islist,4); 1247 *islist = 0; 1248 } 1249 if (dmlist) { 1250 PetscValidPointer(dmlist,5); 1251 *dmlist = 0; 1252 } 1253 /* 1254 Is it a good idea to apply the following check across all impls? 1255 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1256 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1257 */ 1258 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1259 if (!dm->ops->createfielddecomposition) { 1260 PetscSection section; 1261 PetscInt numFields, f; 1262 1263 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1264 if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);} 1265 if (section && numFields && dm->ops->createsubdm) { 1266 *len = numFields; 1267 ierr = PetscMalloc3(numFields,namelist,numFields,islist,numFields,dmlist);CHKERRQ(ierr); 1268 for (f = 0; f < numFields; ++f) { 1269 const char *fieldName; 1270 1271 ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr); 1272 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1273 ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr); 1274 } 1275 } else { 1276 ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr); 1277 /* By default there are no DMs associated with subproblems. */ 1278 if (dmlist) *dmlist = NULL; 1279 } 1280 } else { 1281 ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr); 1282 } 1283 PetscFunctionReturn(0); 1284 } 1285 1286 #undef __FUNCT__ 1287 #define __FUNCT__ "DMCreateSubDM" 1288 /*@C 1289 DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in. 1290 The fields are defined by DMCreateFieldIS(). 1291 1292 Not collective 1293 1294 Input Parameters: 1295 + dm - the DM object 1296 . numFields - number of fields in this subproblem 1297 - len - The number of subproblems in the decomposition (or NULL if not requested) 1298 1299 Output Parameters: 1300 . is - The global indices for the subproblem 1301 - dm - The DM for the subproblem 1302 1303 Level: intermediate 1304 1305 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1306 @*/ 1307 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm) 1308 { 1309 PetscErrorCode ierr; 1310 1311 PetscFunctionBegin; 1312 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1313 PetscValidPointer(fields,3); 1314 if (is) PetscValidPointer(is,4); 1315 if (subdm) PetscValidPointer(subdm,5); 1316 if (dm->ops->createsubdm) { 1317 ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr); 1318 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined"); 1319 PetscFunctionReturn(0); 1320 } 1321 1322 1323 #undef __FUNCT__ 1324 #define __FUNCT__ "DMCreateDomainDecomposition" 1325 /*@C 1326 DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems 1327 corresponding to restrictions to pairs nested subdomains: each IS contains the global 1328 indices of the dofs of the corresponding subdomains. The inner subdomains conceptually 1329 define a nonoverlapping covering, while outer subdomains can overlap. 1330 The optional list of DMs define the DM for each subproblem. 1331 1332 Not collective 1333 1334 Input Parameter: 1335 . dm - the DM object 1336 1337 Output Parameters: 1338 + len - The number of subproblems in the domain decomposition (or NULL if not requested) 1339 . namelist - The name for each subdomain (or NULL if not requested) 1340 . innerislist - The global indices for each inner subdomain (or NULL, if not requested) 1341 . outerislist - The global indices for each outer subdomain (or NULL, if not requested) 1342 - dmlist - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1343 1344 Level: intermediate 1345 1346 Notes: 1347 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1348 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1349 and all of the arrays should be freed with PetscFree(). 1350 1351 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition() 1352 @*/ 1353 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) 1354 { 1355 PetscErrorCode ierr; 1356 DMSubDomainHookLink link; 1357 PetscInt i,l; 1358 1359 PetscFunctionBegin; 1360 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1361 if (len) {PetscValidPointer(len,2); *len = 0;} 1362 if (namelist) {PetscValidPointer(namelist,3); *namelist = NULL;} 1363 if (innerislist) {PetscValidPointer(innerislist,4); *innerislist = NULL;} 1364 if (outerislist) {PetscValidPointer(outerislist,5); *outerislist = NULL;} 1365 if (dmlist) {PetscValidPointer(dmlist,6); *dmlist = NULL;} 1366 /* 1367 Is it a good idea to apply the following check across all impls? 1368 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1369 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1370 */ 1371 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1372 if (dm->ops->createdomaindecomposition) { 1373 ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr); 1374 /* copy subdomain hooks and context over to the subdomain DMs */ 1375 if (dmlist) { 1376 for (i = 0; i < l; i++) { 1377 for (link=dm->subdomainhook; link; link=link->next) { 1378 if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);} 1379 } 1380 (*dmlist)[i]->ctx = dm->ctx; 1381 } 1382 } 1383 if (len) *len = l; 1384 } 1385 PetscFunctionReturn(0); 1386 } 1387 1388 1389 #undef __FUNCT__ 1390 #define __FUNCT__ "DMCreateDomainDecompositionScatters" 1391 /*@C 1392 DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector 1393 1394 Not collective 1395 1396 Input Parameters: 1397 + dm - the DM object 1398 . n - the number of subdomain scatters 1399 - subdms - the local subdomains 1400 1401 Output Parameters: 1402 + n - the number of scatters returned 1403 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain 1404 . oscat - scatter from global vector to overlapping global vector entries on subdomain 1405 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts) 1406 1407 Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution 1408 of general nonlinear problems with overlapping subdomain methods. While merely having index sets that enable subsets 1409 of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of 1410 solution and residual data. 1411 1412 Level: developer 1413 1414 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1415 @*/ 1416 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat) 1417 { 1418 PetscErrorCode ierr; 1419 1420 PetscFunctionBegin; 1421 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1422 PetscValidPointer(subdms,3); 1423 if (dm->ops->createddscatters) { 1424 ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr); 1425 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined"); 1426 PetscFunctionReturn(0); 1427 } 1428 1429 #undef __FUNCT__ 1430 #define __FUNCT__ "DMRefine" 1431 /*@ 1432 DMRefine - Refines a DM object 1433 1434 Collective on DM 1435 1436 Input Parameter: 1437 + dm - the DM object 1438 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1439 1440 Output Parameter: 1441 . dmf - the refined DM, or NULL 1442 1443 Note: If no refinement was done, the return value is NULL 1444 1445 Level: developer 1446 1447 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1448 @*/ 1449 PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf) 1450 { 1451 PetscErrorCode ierr; 1452 DMRefineHookLink link; 1453 1454 PetscFunctionBegin; 1455 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1456 ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr); 1457 if (*dmf) { 1458 (*dmf)->ops->creatematrix = dm->ops->creatematrix; 1459 1460 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr); 1461 1462 (*dmf)->ctx = dm->ctx; 1463 (*dmf)->leveldown = dm->leveldown; 1464 (*dmf)->levelup = dm->levelup + 1; 1465 1466 ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr); 1467 for (link=dm->refinehook; link; link=link->next) { 1468 if (link->refinehook) { 1469 ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr); 1470 } 1471 } 1472 } 1473 PetscFunctionReturn(0); 1474 } 1475 1476 #undef __FUNCT__ 1477 #define __FUNCT__ "DMRefineHookAdd" 1478 /*@C 1479 DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid 1480 1481 Logically Collective 1482 1483 Input Arguments: 1484 + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level 1485 . refinehook - function to run when setting up a coarser level 1486 . interphook - function to run to update data on finer levels (once per SNESSolve()) 1487 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1488 1489 Calling sequence of refinehook: 1490 $ refinehook(DM coarse,DM fine,void *ctx); 1491 1492 + coarse - coarse level DM 1493 . fine - fine level DM to interpolate problem to 1494 - ctx - optional user-defined function context 1495 1496 Calling sequence for interphook: 1497 $ interphook(DM coarse,Mat interp,DM fine,void *ctx) 1498 1499 + coarse - coarse level DM 1500 . interp - matrix interpolating a coarse-level solution to the finer grid 1501 . fine - fine level DM to update 1502 - ctx - optional user-defined function context 1503 1504 Level: advanced 1505 1506 Notes: 1507 This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing 1508 1509 If this function is called multiple times, the hooks will be run in the order they are added. 1510 1511 This function is currently not available from Fortran. 1512 1513 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1514 @*/ 1515 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx) 1516 { 1517 PetscErrorCode ierr; 1518 DMRefineHookLink link,*p; 1519 1520 PetscFunctionBegin; 1521 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1522 for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1523 ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr); 1524 link->refinehook = refinehook; 1525 link->interphook = interphook; 1526 link->ctx = ctx; 1527 link->next = NULL; 1528 *p = link; 1529 PetscFunctionReturn(0); 1530 } 1531 1532 #undef __FUNCT__ 1533 #define __FUNCT__ "DMInterpolate" 1534 /*@ 1535 DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd() 1536 1537 Collective if any hooks are 1538 1539 Input Arguments: 1540 + coarse - coarser DM to use as a base 1541 . restrct - interpolation matrix, apply using MatInterpolate() 1542 - fine - finer DM to update 1543 1544 Level: developer 1545 1546 .seealso: DMRefineHookAdd(), MatInterpolate() 1547 @*/ 1548 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine) 1549 { 1550 PetscErrorCode ierr; 1551 DMRefineHookLink link; 1552 1553 PetscFunctionBegin; 1554 for (link=fine->refinehook; link; link=link->next) { 1555 if (link->interphook) { 1556 ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr); 1557 } 1558 } 1559 PetscFunctionReturn(0); 1560 } 1561 1562 #undef __FUNCT__ 1563 #define __FUNCT__ "DMGetRefineLevel" 1564 /*@ 1565 DMGetRefineLevel - Get's the number of refinements that have generated this DM. 1566 1567 Not Collective 1568 1569 Input Parameter: 1570 . dm - the DM object 1571 1572 Output Parameter: 1573 . level - number of refinements 1574 1575 Level: developer 1576 1577 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1578 1579 @*/ 1580 PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level) 1581 { 1582 PetscFunctionBegin; 1583 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1584 *level = dm->levelup; 1585 PetscFunctionReturn(0); 1586 } 1587 1588 #undef __FUNCT__ 1589 #define __FUNCT__ "DMGlobalToLocalHookAdd" 1590 /*@C 1591 DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called 1592 1593 Logically Collective 1594 1595 Input Arguments: 1596 + dm - the DM 1597 . beginhook - function to run at the beginning of DMGlobalToLocalBegin() 1598 . endhook - function to run after DMGlobalToLocalEnd() has completed 1599 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1600 1601 Calling sequence for beginhook: 1602 $ beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1603 1604 + dm - global DM 1605 . g - global vector 1606 . mode - mode 1607 . l - local vector 1608 - ctx - optional user-defined function context 1609 1610 1611 Calling sequence for endhook: 1612 $ endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1613 1614 + global - global DM 1615 - ctx - optional user-defined function context 1616 1617 Level: advanced 1618 1619 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1620 @*/ 1621 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx) 1622 { 1623 PetscErrorCode ierr; 1624 DMGlobalToLocalHookLink link,*p; 1625 1626 PetscFunctionBegin; 1627 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1628 for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1629 ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr); 1630 link->beginhook = beginhook; 1631 link->endhook = endhook; 1632 link->ctx = ctx; 1633 link->next = NULL; 1634 *p = link; 1635 PetscFunctionReturn(0); 1636 } 1637 1638 #undef __FUNCT__ 1639 #define __FUNCT__ "DMGlobalToLocalBegin" 1640 /*@ 1641 DMGlobalToLocalBegin - Begins updating local vectors from global vector 1642 1643 Neighbor-wise Collective on DM 1644 1645 Input Parameters: 1646 + dm - the DM object 1647 . g - the global vector 1648 . mode - INSERT_VALUES or ADD_VALUES 1649 - l - the local vector 1650 1651 1652 Level: beginner 1653 1654 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1655 1656 @*/ 1657 PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1658 { 1659 PetscSF sf; 1660 PetscErrorCode ierr; 1661 DMGlobalToLocalHookLink link; 1662 1663 PetscFunctionBegin; 1664 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1665 for (link=dm->gtolhook; link; link=link->next) { 1666 if (link->beginhook) { 1667 ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr); 1668 } 1669 } 1670 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1671 if (sf) { 1672 PetscScalar *lArray, *gArray; 1673 1674 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1675 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1676 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1677 ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1678 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1679 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1680 } else { 1681 ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1682 } 1683 PetscFunctionReturn(0); 1684 } 1685 1686 #undef __FUNCT__ 1687 #define __FUNCT__ "DMGlobalToLocalEnd" 1688 /*@ 1689 DMGlobalToLocalEnd - Ends updating local vectors from global vector 1690 1691 Neighbor-wise Collective on DM 1692 1693 Input Parameters: 1694 + dm - the DM object 1695 . g - the global vector 1696 . mode - INSERT_VALUES or ADD_VALUES 1697 - l - the local vector 1698 1699 1700 Level: beginner 1701 1702 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1703 1704 @*/ 1705 PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1706 { 1707 PetscSF sf; 1708 PetscErrorCode ierr; 1709 PetscScalar *lArray, *gArray; 1710 DMGlobalToLocalHookLink link; 1711 1712 PetscFunctionBegin; 1713 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1714 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1715 if (sf) { 1716 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1717 1718 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1719 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1720 ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1721 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1722 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1723 } else { 1724 ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1725 } 1726 for (link=dm->gtolhook; link; link=link->next) { 1727 if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);} 1728 } 1729 PetscFunctionReturn(0); 1730 } 1731 1732 #undef __FUNCT__ 1733 #define __FUNCT__ "DMLocalToGlobalBegin" 1734 /*@ 1735 DMLocalToGlobalBegin - updates global vectors from local vectors 1736 1737 Neighbor-wise Collective on DM 1738 1739 Input Parameters: 1740 + dm - the DM object 1741 . l - the local vector 1742 . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that 1743 base point. 1744 - - the global vector 1745 1746 Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local 1747 array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work 1748 global array to the final global array with VecAXPY(). 1749 1750 Level: beginner 1751 1752 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin() 1753 1754 @*/ 1755 PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g) 1756 { 1757 PetscSF sf; 1758 PetscErrorCode ierr; 1759 1760 PetscFunctionBegin; 1761 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1762 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1763 if (sf) { 1764 MPI_Op op; 1765 PetscScalar *lArray, *gArray; 1766 1767 switch (mode) { 1768 case INSERT_VALUES: 1769 case INSERT_ALL_VALUES: 1770 op = MPIU_REPLACE; break; 1771 case ADD_VALUES: 1772 case ADD_ALL_VALUES: 1773 op = MPI_SUM; break; 1774 default: 1775 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1776 } 1777 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1778 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1779 ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1780 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1781 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1782 } else { 1783 ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1784 } 1785 PetscFunctionReturn(0); 1786 } 1787 1788 #undef __FUNCT__ 1789 #define __FUNCT__ "DMLocalToGlobalEnd" 1790 /*@ 1791 DMLocalToGlobalEnd - updates global vectors from local vectors 1792 1793 Neighbor-wise Collective on DM 1794 1795 Input Parameters: 1796 + dm - the DM object 1797 . l - the local vector 1798 . mode - INSERT_VALUES or ADD_VALUES 1799 - g - the global vector 1800 1801 1802 Level: beginner 1803 1804 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd() 1805 1806 @*/ 1807 PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g) 1808 { 1809 PetscSF sf; 1810 PetscErrorCode ierr; 1811 1812 PetscFunctionBegin; 1813 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1814 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1815 if (sf) { 1816 MPI_Op op; 1817 PetscScalar *lArray, *gArray; 1818 1819 switch (mode) { 1820 case INSERT_VALUES: 1821 case INSERT_ALL_VALUES: 1822 op = MPIU_REPLACE; break; 1823 case ADD_VALUES: 1824 case ADD_ALL_VALUES: 1825 op = MPI_SUM; break; 1826 default: 1827 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1828 } 1829 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1830 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1831 ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1832 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1833 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1834 } else { 1835 ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1836 } 1837 PetscFunctionReturn(0); 1838 } 1839 1840 #undef __FUNCT__ 1841 #define __FUNCT__ "DMLocalToLocalBegin" 1842 /*@ 1843 DMLocalToLocalBegin - Maps from a local vector (including ghost points 1844 that contain irrelevant values) to another local vector where the ghost 1845 points in the second are set correctly. Must be followed by DMLocalToLocalEnd(). 1846 1847 Neighbor-wise Collective on DM and Vec 1848 1849 Input Parameters: 1850 + dm - the DM object 1851 . g - the original local vector 1852 - mode - one of INSERT_VALUES or ADD_VALUES 1853 1854 Output Parameter: 1855 . l - the local vector with correct ghost values 1856 1857 Level: intermediate 1858 1859 Notes: 1860 The local vectors used here need not be the same as those 1861 obtained from DMCreateLocalVector(), BUT they 1862 must have the same parallel data layout; they could, for example, be 1863 obtained with VecDuplicate() from the DM originating vectors. 1864 1865 .keywords: DM, local-to-local, begin 1866 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalEnd(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1867 1868 @*/ 1869 PetscErrorCode DMLocalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1870 { 1871 PetscErrorCode ierr; 1872 1873 PetscFunctionBegin; 1874 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1875 ierr = (*dm->ops->localtolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1876 PetscFunctionReturn(0); 1877 } 1878 1879 #undef __FUNCT__ 1880 #define __FUNCT__ "DMLocalToLocalEnd" 1881 /*@ 1882 DMLocalToLocalEnd - Maps from a local vector (including ghost points 1883 that contain irrelevant values) to another local vector where the ghost 1884 points in the second are set correctly. Must be preceded by DMLocalToLocalBegin(). 1885 1886 Neighbor-wise Collective on DM and Vec 1887 1888 Input Parameters: 1889 + da - the DM object 1890 . g - the original local vector 1891 - mode - one of INSERT_VALUES or ADD_VALUES 1892 1893 Output Parameter: 1894 . l - the local vector with correct ghost values 1895 1896 Level: intermediate 1897 1898 Notes: 1899 The local vectors used here need not be the same as those 1900 obtained from DMCreateLocalVector(), BUT they 1901 must have the same parallel data layout; they could, for example, be 1902 obtained with VecDuplicate() from the DM originating vectors. 1903 1904 .keywords: DM, local-to-local, end 1905 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1906 1907 @*/ 1908 PetscErrorCode DMLocalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1909 { 1910 PetscErrorCode ierr; 1911 1912 PetscFunctionBegin; 1913 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1914 ierr = (*dm->ops->localtolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1915 PetscFunctionReturn(0); 1916 } 1917 1918 1919 #undef __FUNCT__ 1920 #define __FUNCT__ "DMCoarsen" 1921 /*@ 1922 DMCoarsen - Coarsens a DM object 1923 1924 Collective on DM 1925 1926 Input Parameter: 1927 + dm - the DM object 1928 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1929 1930 Output Parameter: 1931 . dmc - the coarsened DM 1932 1933 Level: developer 1934 1935 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1936 1937 @*/ 1938 PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) 1939 { 1940 PetscErrorCode ierr; 1941 DMCoarsenHookLink link; 1942 1943 PetscFunctionBegin; 1944 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1945 ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr); 1946 (*dmc)->ops->creatematrix = dm->ops->creatematrix; 1947 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr); 1948 (*dmc)->ctx = dm->ctx; 1949 (*dmc)->levelup = dm->levelup; 1950 (*dmc)->leveldown = dm->leveldown + 1; 1951 ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr); 1952 for (link=dm->coarsenhook; link; link=link->next) { 1953 if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);} 1954 } 1955 PetscFunctionReturn(0); 1956 } 1957 1958 #undef __FUNCT__ 1959 #define __FUNCT__ "DMCoarsenHookAdd" 1960 /*@C 1961 DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1962 1963 Logically Collective 1964 1965 Input Arguments: 1966 + fine - nonlinear solver context on which to run a hook when restricting to a coarser level 1967 . coarsenhook - function to run when setting up a coarser level 1968 . restricthook - function to run to update data on coarser levels (once per SNESSolve()) 1969 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1970 1971 Calling sequence of coarsenhook: 1972 $ coarsenhook(DM fine,DM coarse,void *ctx); 1973 1974 + fine - fine level DM 1975 . coarse - coarse level DM to restrict problem to 1976 - ctx - optional user-defined function context 1977 1978 Calling sequence for restricthook: 1979 $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx) 1980 1981 + fine - fine level DM 1982 . mrestrict - matrix restricting a fine-level solution to the coarse grid 1983 . rscale - scaling vector for restriction 1984 . inject - matrix restricting by injection 1985 . coarse - coarse level DM to update 1986 - ctx - optional user-defined function context 1987 1988 Level: advanced 1989 1990 Notes: 1991 This function is only needed if auxiliary data needs to be set up on coarse grids. 1992 1993 If this function is called multiple times, the hooks will be run in the order they are added. 1994 1995 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1996 extract the finest level information from its context (instead of from the SNES). 1997 1998 This function is currently not available from Fortran. 1999 2000 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 2001 @*/ 2002 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx) 2003 { 2004 PetscErrorCode ierr; 2005 DMCoarsenHookLink link,*p; 2006 2007 PetscFunctionBegin; 2008 PetscValidHeaderSpecific(fine,DM_CLASSID,1); 2009 for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 2010 ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr); 2011 link->coarsenhook = coarsenhook; 2012 link->restricthook = restricthook; 2013 link->ctx = ctx; 2014 link->next = NULL; 2015 *p = link; 2016 PetscFunctionReturn(0); 2017 } 2018 2019 #undef __FUNCT__ 2020 #define __FUNCT__ "DMRestrict" 2021 /*@ 2022 DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd() 2023 2024 Collective if any hooks are 2025 2026 Input Arguments: 2027 + fine - finer DM to use as a base 2028 . restrct - restriction matrix, apply using MatRestrict() 2029 . inject - injection matrix, also use MatRestrict() 2030 - coarse - coarer DM to update 2031 2032 Level: developer 2033 2034 .seealso: DMCoarsenHookAdd(), MatRestrict() 2035 @*/ 2036 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse) 2037 { 2038 PetscErrorCode ierr; 2039 DMCoarsenHookLink link; 2040 2041 PetscFunctionBegin; 2042 for (link=fine->coarsenhook; link; link=link->next) { 2043 if (link->restricthook) { 2044 ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr); 2045 } 2046 } 2047 PetscFunctionReturn(0); 2048 } 2049 2050 #undef __FUNCT__ 2051 #define __FUNCT__ "DMSubDomainHookAdd" 2052 /*@C 2053 DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid 2054 2055 Logically Collective 2056 2057 Input Arguments: 2058 + global - global DM 2059 . ddhook - function to run to pass data to the decomposition DM upon its creation 2060 . restricthook - function to run to update data on block solve (at the beginning of the block solve) 2061 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 2062 2063 2064 Calling sequence for ddhook: 2065 $ ddhook(DM global,DM block,void *ctx) 2066 2067 + global - global DM 2068 . block - block DM 2069 - ctx - optional user-defined function context 2070 2071 Calling sequence for restricthook: 2072 $ restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx) 2073 2074 + global - global DM 2075 . out - scatter to the outer (with ghost and overlap points) block vector 2076 . in - scatter to block vector values only owned locally 2077 . block - block DM 2078 - ctx - optional user-defined function context 2079 2080 Level: advanced 2081 2082 Notes: 2083 This function is only needed if auxiliary data needs to be set up on subdomain DMs. 2084 2085 If this function is called multiple times, the hooks will be run in the order they are added. 2086 2087 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 2088 extract the global information from its context (instead of from the SNES). 2089 2090 This function is currently not available from Fortran. 2091 2092 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 2093 @*/ 2094 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx) 2095 { 2096 PetscErrorCode ierr; 2097 DMSubDomainHookLink link,*p; 2098 2099 PetscFunctionBegin; 2100 PetscValidHeaderSpecific(global,DM_CLASSID,1); 2101 for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 2102 ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr); 2103 link->restricthook = restricthook; 2104 link->ddhook = ddhook; 2105 link->ctx = ctx; 2106 link->next = NULL; 2107 *p = link; 2108 PetscFunctionReturn(0); 2109 } 2110 2111 #undef __FUNCT__ 2112 #define __FUNCT__ "DMSubDomainRestrict" 2113 /*@ 2114 DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd() 2115 2116 Collective if any hooks are 2117 2118 Input Arguments: 2119 + fine - finer DM to use as a base 2120 . oscatter - scatter from domain global vector filling subdomain global vector with overlap 2121 . gscatter - scatter from domain global vector filling subdomain local vector with ghosts 2122 - coarse - coarer DM to update 2123 2124 Level: developer 2125 2126 .seealso: DMCoarsenHookAdd(), MatRestrict() 2127 @*/ 2128 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm) 2129 { 2130 PetscErrorCode ierr; 2131 DMSubDomainHookLink link; 2132 2133 PetscFunctionBegin; 2134 for (link=global->subdomainhook; link; link=link->next) { 2135 if (link->restricthook) { 2136 ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr); 2137 } 2138 } 2139 PetscFunctionReturn(0); 2140 } 2141 2142 #undef __FUNCT__ 2143 #define __FUNCT__ "DMGetCoarsenLevel" 2144 /*@ 2145 DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM. 2146 2147 Not Collective 2148 2149 Input Parameter: 2150 . dm - the DM object 2151 2152 Output Parameter: 2153 . level - number of coarsenings 2154 2155 Level: developer 2156 2157 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2158 2159 @*/ 2160 PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level) 2161 { 2162 PetscFunctionBegin; 2163 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2164 *level = dm->leveldown; 2165 PetscFunctionReturn(0); 2166 } 2167 2168 2169 2170 #undef __FUNCT__ 2171 #define __FUNCT__ "DMRefineHierarchy" 2172 /*@C 2173 DMRefineHierarchy - Refines a DM object, all levels at once 2174 2175 Collective on DM 2176 2177 Input Parameter: 2178 + dm - the DM object 2179 - nlevels - the number of levels of refinement 2180 2181 Output Parameter: 2182 . dmf - the refined DM hierarchy 2183 2184 Level: developer 2185 2186 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2187 2188 @*/ 2189 PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[]) 2190 { 2191 PetscErrorCode ierr; 2192 2193 PetscFunctionBegin; 2194 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2195 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2196 if (nlevels == 0) PetscFunctionReturn(0); 2197 if (dm->ops->refinehierarchy) { 2198 ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr); 2199 } else if (dm->ops->refine) { 2200 PetscInt i; 2201 2202 ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr); 2203 for (i=1; i<nlevels; i++) { 2204 ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr); 2205 } 2206 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet"); 2207 PetscFunctionReturn(0); 2208 } 2209 2210 #undef __FUNCT__ 2211 #define __FUNCT__ "DMCoarsenHierarchy" 2212 /*@C 2213 DMCoarsenHierarchy - Coarsens a DM object, all levels at once 2214 2215 Collective on DM 2216 2217 Input Parameter: 2218 + dm - the DM object 2219 - nlevels - the number of levels of coarsening 2220 2221 Output Parameter: 2222 . dmc - the coarsened DM hierarchy 2223 2224 Level: developer 2225 2226 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2227 2228 @*/ 2229 PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[]) 2230 { 2231 PetscErrorCode ierr; 2232 2233 PetscFunctionBegin; 2234 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2235 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2236 if (nlevels == 0) PetscFunctionReturn(0); 2237 PetscValidPointer(dmc,3); 2238 if (dm->ops->coarsenhierarchy) { 2239 ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr); 2240 } else if (dm->ops->coarsen) { 2241 PetscInt i; 2242 2243 ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr); 2244 for (i=1; i<nlevels; i++) { 2245 ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr); 2246 } 2247 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet"); 2248 PetscFunctionReturn(0); 2249 } 2250 2251 #undef __FUNCT__ 2252 #define __FUNCT__ "DMCreateAggregates" 2253 /*@ 2254 DMCreateAggregates - Gets the aggregates that map between 2255 grids associated with two DMs. 2256 2257 Collective on DM 2258 2259 Input Parameters: 2260 + dmc - the coarse grid DM 2261 - dmf - the fine grid DM 2262 2263 Output Parameters: 2264 . rest - the restriction matrix (transpose of the projection matrix) 2265 2266 Level: intermediate 2267 2268 .keywords: interpolation, restriction, multigrid 2269 2270 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation() 2271 @*/ 2272 PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest) 2273 { 2274 PetscErrorCode ierr; 2275 2276 PetscFunctionBegin; 2277 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 2278 PetscValidHeaderSpecific(dmf,DM_CLASSID,2); 2279 ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr); 2280 PetscFunctionReturn(0); 2281 } 2282 2283 #undef __FUNCT__ 2284 #define __FUNCT__ "DMSetApplicationContextDestroy" 2285 /*@C 2286 DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed 2287 2288 Not Collective 2289 2290 Input Parameters: 2291 + dm - the DM object 2292 - destroy - the destroy function 2293 2294 Level: intermediate 2295 2296 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2297 2298 @*/ 2299 PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**)) 2300 { 2301 PetscFunctionBegin; 2302 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2303 dm->ctxdestroy = destroy; 2304 PetscFunctionReturn(0); 2305 } 2306 2307 #undef __FUNCT__ 2308 #define __FUNCT__ "DMSetApplicationContext" 2309 /*@ 2310 DMSetApplicationContext - Set a user context into a DM object 2311 2312 Not Collective 2313 2314 Input Parameters: 2315 + dm - the DM object 2316 - ctx - the user context 2317 2318 Level: intermediate 2319 2320 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2321 2322 @*/ 2323 PetscErrorCode DMSetApplicationContext(DM dm,void *ctx) 2324 { 2325 PetscFunctionBegin; 2326 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2327 dm->ctx = ctx; 2328 PetscFunctionReturn(0); 2329 } 2330 2331 #undef __FUNCT__ 2332 #define __FUNCT__ "DMGetApplicationContext" 2333 /*@ 2334 DMGetApplicationContext - Gets a user context from a DM object 2335 2336 Not Collective 2337 2338 Input Parameter: 2339 . dm - the DM object 2340 2341 Output Parameter: 2342 . ctx - the user context 2343 2344 Level: intermediate 2345 2346 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2347 2348 @*/ 2349 PetscErrorCode DMGetApplicationContext(DM dm,void *ctx) 2350 { 2351 PetscFunctionBegin; 2352 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2353 *(void**)ctx = dm->ctx; 2354 PetscFunctionReturn(0); 2355 } 2356 2357 #undef __FUNCT__ 2358 #define __FUNCT__ "DMSetVariableBounds" 2359 /*@C 2360 DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI. 2361 2362 Logically Collective on DM 2363 2364 Input Parameter: 2365 + dm - the DM object 2366 - f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set) 2367 2368 Level: intermediate 2369 2370 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), 2371 DMSetJacobian() 2372 2373 @*/ 2374 PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2375 { 2376 PetscFunctionBegin; 2377 dm->ops->computevariablebounds = f; 2378 PetscFunctionReturn(0); 2379 } 2380 2381 #undef __FUNCT__ 2382 #define __FUNCT__ "DMHasVariableBounds" 2383 /*@ 2384 DMHasVariableBounds - does the DM object have a variable bounds function? 2385 2386 Not Collective 2387 2388 Input Parameter: 2389 . dm - the DM object to destroy 2390 2391 Output Parameter: 2392 . flg - PETSC_TRUE if the variable bounds function exists 2393 2394 Level: developer 2395 2396 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2397 2398 @*/ 2399 PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg) 2400 { 2401 PetscFunctionBegin; 2402 *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE; 2403 PetscFunctionReturn(0); 2404 } 2405 2406 #undef __FUNCT__ 2407 #define __FUNCT__ "DMComputeVariableBounds" 2408 /*@C 2409 DMComputeVariableBounds - compute variable bounds used by SNESVI. 2410 2411 Logically Collective on DM 2412 2413 Input Parameters: 2414 + dm - the DM object to destroy 2415 - x - current solution at which the bounds are computed 2416 2417 Output parameters: 2418 + xl - lower bound 2419 - xu - upper bound 2420 2421 Level: intermediate 2422 2423 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2424 2425 @*/ 2426 PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu) 2427 { 2428 PetscErrorCode ierr; 2429 2430 PetscFunctionBegin; 2431 PetscValidHeaderSpecific(xl,VEC_CLASSID,2); 2432 PetscValidHeaderSpecific(xu,VEC_CLASSID,2); 2433 if (dm->ops->computevariablebounds) { 2434 ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr); 2435 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds."); 2436 PetscFunctionReturn(0); 2437 } 2438 2439 #undef __FUNCT__ 2440 #define __FUNCT__ "DMHasColoring" 2441 /*@ 2442 DMHasColoring - does the DM object have a method of providing a coloring? 2443 2444 Not Collective 2445 2446 Input Parameter: 2447 . dm - the DM object 2448 2449 Output Parameter: 2450 . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring(). 2451 2452 Level: developer 2453 2454 .seealso DMHasFunction(), DMCreateColoring() 2455 2456 @*/ 2457 PetscErrorCode DMHasColoring(DM dm,PetscBool *flg) 2458 { 2459 PetscFunctionBegin; 2460 *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE; 2461 PetscFunctionReturn(0); 2462 } 2463 2464 #undef __FUNCT__ 2465 #define __FUNCT__ "DMSetVec" 2466 /*@C 2467 DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear. 2468 2469 Collective on DM 2470 2471 Input Parameter: 2472 + dm - the DM object 2473 - x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems. 2474 2475 Level: developer 2476 2477 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2478 2479 @*/ 2480 PetscErrorCode DMSetVec(DM dm,Vec x) 2481 { 2482 PetscErrorCode ierr; 2483 2484 PetscFunctionBegin; 2485 if (x) { 2486 if (!dm->x) { 2487 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2488 } 2489 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2490 } else if (dm->x) { 2491 ierr = VecDestroy(&dm->x);CHKERRQ(ierr); 2492 } 2493 PetscFunctionReturn(0); 2494 } 2495 2496 PetscFunctionList DMList = NULL; 2497 PetscBool DMRegisterAllCalled = PETSC_FALSE; 2498 2499 #undef __FUNCT__ 2500 #define __FUNCT__ "DMSetType" 2501 /*@C 2502 DMSetType - Builds a DM, for a particular DM implementation. 2503 2504 Collective on DM 2505 2506 Input Parameters: 2507 + dm - The DM object 2508 - method - The name of the DM type 2509 2510 Options Database Key: 2511 . -dm_type <type> - Sets the DM type; use -help for a list of available types 2512 2513 Notes: 2514 See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D). 2515 2516 Level: intermediate 2517 2518 .keywords: DM, set, type 2519 .seealso: DMGetType(), DMCreate() 2520 @*/ 2521 PetscErrorCode DMSetType(DM dm, DMType method) 2522 { 2523 PetscErrorCode (*r)(DM); 2524 PetscBool match; 2525 PetscErrorCode ierr; 2526 2527 PetscFunctionBegin; 2528 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2529 ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr); 2530 if (match) PetscFunctionReturn(0); 2531 2532 if (!DMRegisterAllCalled) {ierr = DMRegisterAll();CHKERRQ(ierr);} 2533 ierr = PetscFunctionListFind(DMList,method,&r);CHKERRQ(ierr); 2534 if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method); 2535 2536 if (dm->ops->destroy) { 2537 ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr); 2538 dm->ops->destroy = NULL; 2539 } 2540 ierr = (*r)(dm);CHKERRQ(ierr); 2541 ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr); 2542 PetscFunctionReturn(0); 2543 } 2544 2545 #undef __FUNCT__ 2546 #define __FUNCT__ "DMGetType" 2547 /*@C 2548 DMGetType - Gets the DM type name (as a string) from the DM. 2549 2550 Not Collective 2551 2552 Input Parameter: 2553 . dm - The DM 2554 2555 Output Parameter: 2556 . type - The DM type name 2557 2558 Level: intermediate 2559 2560 .keywords: DM, get, type, name 2561 .seealso: DMSetType(), DMCreate() 2562 @*/ 2563 PetscErrorCode DMGetType(DM dm, DMType *type) 2564 { 2565 PetscErrorCode ierr; 2566 2567 PetscFunctionBegin; 2568 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2569 PetscValidCharPointer(type,2); 2570 if (!DMRegisterAllCalled) { 2571 ierr = DMRegisterAll();CHKERRQ(ierr); 2572 } 2573 *type = ((PetscObject)dm)->type_name; 2574 PetscFunctionReturn(0); 2575 } 2576 2577 #undef __FUNCT__ 2578 #define __FUNCT__ "DMConvert" 2579 /*@C 2580 DMConvert - Converts a DM to another DM, either of the same or different type. 2581 2582 Collective on DM 2583 2584 Input Parameters: 2585 + dm - the DM 2586 - newtype - new DM type (use "same" for the same type) 2587 2588 Output Parameter: 2589 . M - pointer to new DM 2590 2591 Notes: 2592 Cannot be used to convert a sequential DM to parallel or parallel to sequential, 2593 the MPI communicator of the generated DM is always the same as the communicator 2594 of the input DM. 2595 2596 Level: intermediate 2597 2598 .seealso: DMCreate() 2599 @*/ 2600 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M) 2601 { 2602 DM B; 2603 char convname[256]; 2604 PetscBool sametype, issame; 2605 PetscErrorCode ierr; 2606 2607 PetscFunctionBegin; 2608 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2609 PetscValidType(dm,1); 2610 PetscValidPointer(M,3); 2611 ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr); 2612 ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr); 2613 { 2614 PetscErrorCode (*conv)(DM, DMType, DM*) = NULL; 2615 2616 /* 2617 Order of precedence: 2618 1) See if a specialized converter is known to the current DM. 2619 2) See if a specialized converter is known to the desired DM class. 2620 3) See if a good general converter is registered for the desired class 2621 4) See if a good general converter is known for the current matrix. 2622 5) Use a really basic converter. 2623 */ 2624 2625 /* 1) See if a specialized converter is known to the current DM and the desired class */ 2626 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2627 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2628 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2629 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2630 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2631 ierr = PetscObjectQueryFunction((PetscObject)dm,convname,&conv);CHKERRQ(ierr); 2632 if (conv) goto foundconv; 2633 2634 /* 2) See if a specialized converter is known to the desired DM class. */ 2635 ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr); 2636 ierr = DMSetType(B, newtype);CHKERRQ(ierr); 2637 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2638 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2639 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2640 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2641 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2642 ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv);CHKERRQ(ierr); 2643 if (conv) { 2644 ierr = DMDestroy(&B);CHKERRQ(ierr); 2645 goto foundconv; 2646 } 2647 2648 #if 0 2649 /* 3) See if a good general converter is registered for the desired class */ 2650 conv = B->ops->convertfrom; 2651 ierr = DMDestroy(&B);CHKERRQ(ierr); 2652 if (conv) goto foundconv; 2653 2654 /* 4) See if a good general converter is known for the current matrix */ 2655 if (dm->ops->convert) { 2656 conv = dm->ops->convert; 2657 } 2658 if (conv) goto foundconv; 2659 #endif 2660 2661 /* 5) Use a really basic converter. */ 2662 SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype); 2663 2664 foundconv: 2665 ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2666 ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr); 2667 ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2668 } 2669 ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr); 2670 PetscFunctionReturn(0); 2671 } 2672 2673 /*--------------------------------------------------------------------------------------------------------------------*/ 2674 2675 #undef __FUNCT__ 2676 #define __FUNCT__ "DMRegister" 2677 /*@C 2678 DMRegister - Adds a new DM component implementation 2679 2680 Not Collective 2681 2682 Input Parameters: 2683 + name - The name of a new user-defined creation routine 2684 - create_func - The creation routine itself 2685 2686 Notes: 2687 DMRegister() may be called multiple times to add several user-defined DMs 2688 2689 2690 Sample usage: 2691 .vb 2692 DMRegister("my_da", MyDMCreate); 2693 .ve 2694 2695 Then, your DM type can be chosen with the procedural interface via 2696 .vb 2697 DMCreate(MPI_Comm, DM *); 2698 DMSetType(DM,"my_da"); 2699 .ve 2700 or at runtime via the option 2701 .vb 2702 -da_type my_da 2703 .ve 2704 2705 Level: advanced 2706 2707 .keywords: DM, register 2708 .seealso: DMRegisterAll(), DMRegisterDestroy() 2709 2710 @*/ 2711 PetscErrorCode DMRegister(const char sname[],PetscErrorCode (*function)(DM)) 2712 { 2713 PetscErrorCode ierr; 2714 2715 PetscFunctionBegin; 2716 ierr = PetscFunctionListAdd(&DMList,sname,function);CHKERRQ(ierr); 2717 PetscFunctionReturn(0); 2718 } 2719 2720 #undef __FUNCT__ 2721 #define __FUNCT__ "DMLoad" 2722 /*@C 2723 DMLoad - Loads a DM that has been stored in binary with DMView(). 2724 2725 Collective on PetscViewer 2726 2727 Input Parameters: 2728 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or 2729 some related function before a call to DMLoad(). 2730 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or 2731 HDF5 file viewer, obtained from PetscViewerHDF5Open() 2732 2733 Level: intermediate 2734 2735 Notes: 2736 The type is determined by the data in the file, any type set into the DM before this call is ignored. 2737 2738 Notes for advanced users: 2739 Most users should not need to know the details of the binary storage 2740 format, since DMLoad() and DMView() completely hide these details. 2741 But for anyone who's interested, the standard binary matrix storage 2742 format is 2743 .vb 2744 has not yet been determined 2745 .ve 2746 2747 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad() 2748 @*/ 2749 PetscErrorCode DMLoad(DM newdm, PetscViewer viewer) 2750 { 2751 PetscErrorCode ierr; 2752 PetscBool isbinary; 2753 PetscInt classid; 2754 char type[256]; 2755 2756 PetscFunctionBegin; 2757 PetscValidHeaderSpecific(newdm,DM_CLASSID,1); 2758 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 2759 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 2760 if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 2761 2762 ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr); 2763 if (classid != DM_FILE_CLASSID) SETERRQ1(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file, classid found %d",(int)classid); 2764 ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr); 2765 ierr = DMSetType(newdm, type);CHKERRQ(ierr); 2766 if (newdm->ops->load) { 2767 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 2768 } 2769 PetscFunctionReturn(0); 2770 } 2771 2772 /******************************** FEM Support **********************************/ 2773 2774 #undef __FUNCT__ 2775 #define __FUNCT__ "DMPrintCellVector" 2776 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) 2777 { 2778 PetscInt f; 2779 PetscErrorCode ierr; 2780 2781 PetscFunctionBegin; 2782 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2783 for (f = 0; f < len; ++f) { 2784 ierr = PetscPrintf(PETSC_COMM_SELF, " | %g |\n", (double)PetscRealPart(x[f]));CHKERRQ(ierr); 2785 } 2786 PetscFunctionReturn(0); 2787 } 2788 2789 #undef __FUNCT__ 2790 #define __FUNCT__ "DMPrintCellMatrix" 2791 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) 2792 { 2793 PetscInt f, g; 2794 PetscErrorCode ierr; 2795 2796 PetscFunctionBegin; 2797 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2798 for (f = 0; f < rows; ++f) { 2799 ierr = PetscPrintf(PETSC_COMM_SELF, " |");CHKERRQ(ierr); 2800 for (g = 0; g < cols; ++g) { 2801 ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr); 2802 } 2803 ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr); 2804 } 2805 PetscFunctionReturn(0); 2806 } 2807 2808 #undef __FUNCT__ 2809 #define __FUNCT__ "DMPrintLocalVec" 2810 PetscErrorCode DMPrintLocalVec(DM dm, const char name[], PetscReal tol, Vec X) 2811 { 2812 PetscMPIInt rank, numProcs; 2813 PetscInt p; 2814 PetscErrorCode ierr; 2815 2816 PetscFunctionBegin; 2817 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr); 2818 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr); 2819 ierr = PetscPrintf(PetscObjectComm((PetscObject) dm), "%s:\n", name);CHKERRQ(ierr); 2820 for (p = 0; p < numProcs; ++p) { 2821 if (p == rank) { 2822 Vec x; 2823 2824 ierr = VecDuplicate(X, &x);CHKERRQ(ierr); 2825 ierr = VecCopy(X, x);CHKERRQ(ierr); 2826 ierr = VecChop(x, tol);CHKERRQ(ierr); 2827 ierr = VecView(x, PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr); 2828 ierr = VecDestroy(&x);CHKERRQ(ierr); 2829 ierr = PetscViewerFlush(PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr); 2830 } 2831 ierr = PetscBarrier((PetscObject) dm);CHKERRQ(ierr); 2832 } 2833 PetscFunctionReturn(0); 2834 } 2835 2836 #undef __FUNCT__ 2837 #define __FUNCT__ "DMGetDefaultSection" 2838 /*@ 2839 DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM. 2840 2841 Input Parameter: 2842 . dm - The DM 2843 2844 Output Parameter: 2845 . section - The PetscSection 2846 2847 Level: intermediate 2848 2849 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2850 2851 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2852 @*/ 2853 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) 2854 { 2855 PetscFunctionBegin; 2856 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2857 PetscValidPointer(section, 2); 2858 *section = dm->defaultSection; 2859 PetscFunctionReturn(0); 2860 } 2861 2862 #undef __FUNCT__ 2863 #define __FUNCT__ "DMSetDefaultSection" 2864 /*@ 2865 DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM. 2866 2867 Input Parameters: 2868 + dm - The DM 2869 - section - The PetscSection 2870 2871 Level: intermediate 2872 2873 Note: Any existing Section will be destroyed 2874 2875 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2876 @*/ 2877 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) 2878 { 2879 PetscInt numFields; 2880 PetscInt f; 2881 PetscErrorCode ierr; 2882 2883 PetscFunctionBegin; 2884 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2885 PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2886 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2887 ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr); 2888 dm->defaultSection = section; 2889 ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr); 2890 if (numFields) { 2891 ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr); 2892 for (f = 0; f < numFields; ++f) { 2893 const char *name; 2894 2895 ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr); 2896 ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr); 2897 } 2898 } 2899 /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */ 2900 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2901 PetscFunctionReturn(0); 2902 } 2903 2904 #undef __FUNCT__ 2905 #define __FUNCT__ "DMGetDefaultGlobalSection" 2906 /*@ 2907 DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM. 2908 2909 Collective on DM 2910 2911 Input Parameter: 2912 . dm - The DM 2913 2914 Output Parameter: 2915 . section - The PetscSection 2916 2917 Level: intermediate 2918 2919 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2920 2921 .seealso: DMSetDefaultSection(), DMGetDefaultSection() 2922 @*/ 2923 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) 2924 { 2925 PetscErrorCode ierr; 2926 2927 PetscFunctionBegin; 2928 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2929 PetscValidPointer(section, 2); 2930 if (!dm->defaultGlobalSection) { 2931 if (!dm->defaultSection || !dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection"); 2932 ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr); 2933 ierr = PetscLayoutDestroy(&dm->map);CHKERRQ(ierr); 2934 ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm),dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr); 2935 } 2936 *section = dm->defaultGlobalSection; 2937 PetscFunctionReturn(0); 2938 } 2939 2940 #undef __FUNCT__ 2941 #define __FUNCT__ "DMSetDefaultGlobalSection" 2942 /*@ 2943 DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM. 2944 2945 Input Parameters: 2946 + dm - The DM 2947 - section - The PetscSection, or NULL 2948 2949 Level: intermediate 2950 2951 Note: Any existing Section will be destroyed 2952 2953 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection() 2954 @*/ 2955 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) 2956 { 2957 PetscErrorCode ierr; 2958 2959 PetscFunctionBegin; 2960 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2961 if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2962 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2963 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2964 dm->defaultGlobalSection = section; 2965 PetscFunctionReturn(0); 2966 } 2967 2968 #undef __FUNCT__ 2969 #define __FUNCT__ "DMGetDefaultSF" 2970 /*@ 2971 DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set, 2972 it is created from the default PetscSection layouts in the DM. 2973 2974 Input Parameter: 2975 . dm - The DM 2976 2977 Output Parameter: 2978 . sf - The PetscSF 2979 2980 Level: intermediate 2981 2982 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 2983 2984 .seealso: DMSetDefaultSF(), DMCreateDefaultSF() 2985 @*/ 2986 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) 2987 { 2988 PetscInt nroots; 2989 PetscErrorCode ierr; 2990 2991 PetscFunctionBegin; 2992 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2993 PetscValidPointer(sf, 2); 2994 ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr); 2995 if (nroots < 0) { 2996 PetscSection section, gSection; 2997 2998 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 2999 if (section) { 3000 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 3001 ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr); 3002 } else { 3003 *sf = NULL; 3004 PetscFunctionReturn(0); 3005 } 3006 } 3007 *sf = dm->defaultSF; 3008 PetscFunctionReturn(0); 3009 } 3010 3011 #undef __FUNCT__ 3012 #define __FUNCT__ "DMSetDefaultSF" 3013 /*@ 3014 DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM 3015 3016 Input Parameters: 3017 + dm - The DM 3018 - sf - The PetscSF 3019 3020 Level: intermediate 3021 3022 Note: Any previous SF is destroyed 3023 3024 .seealso: DMGetDefaultSF(), DMCreateDefaultSF() 3025 @*/ 3026 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) 3027 { 3028 PetscErrorCode ierr; 3029 3030 PetscFunctionBegin; 3031 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3032 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2); 3033 ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr); 3034 dm->defaultSF = sf; 3035 PetscFunctionReturn(0); 3036 } 3037 3038 #undef __FUNCT__ 3039 #define __FUNCT__ "DMCreateDefaultSF" 3040 /*@C 3041 DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections 3042 describing the data layout. 3043 3044 Input Parameters: 3045 + dm - The DM 3046 . localSection - PetscSection describing the local data layout 3047 - globalSection - PetscSection describing the global data layout 3048 3049 Level: intermediate 3050 3051 .seealso: DMGetDefaultSF(), DMSetDefaultSF() 3052 @*/ 3053 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection) 3054 { 3055 MPI_Comm comm; 3056 PetscLayout layout; 3057 const PetscInt *ranges; 3058 PetscInt *local; 3059 PetscSFNode *remote; 3060 PetscInt pStart, pEnd, p, nroots, nleaves = 0, l; 3061 PetscMPIInt size, rank; 3062 PetscErrorCode ierr; 3063 3064 PetscFunctionBegin; 3065 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 3066 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3067 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 3068 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 3069 ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr); 3070 ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr); 3071 ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr); 3072 ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr); 3073 ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr); 3074 ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr); 3075 ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr); 3076 for (p = pStart; p < pEnd; ++p) { 3077 PetscInt gdof, gcdof; 3078 3079 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3080 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3081 nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3082 } 3083 ierr = PetscMalloc1(nleaves, &local);CHKERRQ(ierr); 3084 ierr = PetscMalloc1(nleaves, &remote);CHKERRQ(ierr); 3085 for (p = pStart, l = 0; p < pEnd; ++p) { 3086 const PetscInt *cind; 3087 PetscInt dof, cdof, off, gdof, gcdof, goff, gsize, d, c; 3088 3089 ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr); 3090 ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr); 3091 ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr); 3092 ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr); 3093 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3094 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3095 ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr); 3096 if (!gdof) continue; /* Censored point */ 3097 gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3098 if (gsize != dof-cdof) { 3099 if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof); 3100 cdof = 0; /* Ignore constraints */ 3101 } 3102 for (d = 0, c = 0; d < dof; ++d) { 3103 if ((c < cdof) && (cind[c] == d)) {++c; continue;} 3104 local[l+d-c] = off+d; 3105 } 3106 if (gdof < 0) { 3107 for (d = 0; d < gsize; ++d, ++l) { 3108 PetscInt offset = -(goff+1) + d, r; 3109 3110 ierr = PetscFindInt(offset,size+1,ranges,&r);CHKERRQ(ierr); 3111 if (r < 0) r = -(r+2); 3112 if ((r < 0) || (r >= size)) SETERRQ4(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Point %d mapped to invalid process %d (%d, %d)", p, r, gdof, goff); 3113 remote[l].rank = r; 3114 remote[l].index = offset - ranges[r]; 3115 } 3116 } else { 3117 for (d = 0; d < gsize; ++d, ++l) { 3118 remote[l].rank = rank; 3119 remote[l].index = goff+d - ranges[rank]; 3120 } 3121 } 3122 } 3123 if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves); 3124 ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr); 3125 ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 3126 PetscFunctionReturn(0); 3127 } 3128 3129 #undef __FUNCT__ 3130 #define __FUNCT__ "DMGetPointSF" 3131 /*@ 3132 DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM. 3133 3134 Input Parameter: 3135 . dm - The DM 3136 3137 Output Parameter: 3138 . sf - The PetscSF 3139 3140 Level: intermediate 3141 3142 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3143 3144 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3145 @*/ 3146 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) 3147 { 3148 PetscFunctionBegin; 3149 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3150 PetscValidPointer(sf, 2); 3151 *sf = dm->sf; 3152 PetscFunctionReturn(0); 3153 } 3154 3155 #undef __FUNCT__ 3156 #define __FUNCT__ "DMSetPointSF" 3157 /*@ 3158 DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM. 3159 3160 Input Parameters: 3161 + dm - The DM 3162 - sf - The PetscSF 3163 3164 Level: intermediate 3165 3166 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3167 @*/ 3168 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) 3169 { 3170 PetscErrorCode ierr; 3171 3172 PetscFunctionBegin; 3173 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3174 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1); 3175 ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr); 3176 ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr); 3177 dm->sf = sf; 3178 PetscFunctionReturn(0); 3179 } 3180 3181 #undef __FUNCT__ 3182 #define __FUNCT__ "DMGetNumFields" 3183 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields) 3184 { 3185 PetscFunctionBegin; 3186 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3187 PetscValidPointer(numFields, 2); 3188 *numFields = dm->numFields; 3189 PetscFunctionReturn(0); 3190 } 3191 3192 #undef __FUNCT__ 3193 #define __FUNCT__ "DMSetNumFields" 3194 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields) 3195 { 3196 PetscInt f; 3197 PetscErrorCode ierr; 3198 3199 PetscFunctionBegin; 3200 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3201 for (f = 0; f < dm->numFields; ++f) { 3202 ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr); 3203 } 3204 ierr = PetscFree(dm->fields);CHKERRQ(ierr); 3205 dm->numFields = numFields; 3206 ierr = PetscMalloc1(dm->numFields, &dm->fields);CHKERRQ(ierr); 3207 for (f = 0; f < dm->numFields; ++f) { 3208 ierr = PetscContainerCreate(PetscObjectComm((PetscObject)dm), (PetscContainer*) &dm->fields[f]);CHKERRQ(ierr); 3209 } 3210 PetscFunctionReturn(0); 3211 } 3212 3213 #undef __FUNCT__ 3214 #define __FUNCT__ "DMGetField" 3215 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field) 3216 { 3217 PetscFunctionBegin; 3218 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3219 PetscValidPointer(field, 2); 3220 if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3221 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3222 *field = dm->fields[f]; 3223 PetscFunctionReturn(0); 3224 } 3225 3226 #undef __FUNCT__ 3227 #define __FUNCT__ "DMRestrictHook_Coordinates" 3228 PetscErrorCode DMRestrictHook_Coordinates(DM dm,DM dmc,void *ctx) 3229 { 3230 DM dm_coord,dmc_coord; 3231 PetscErrorCode ierr; 3232 Vec coords,ccoords; 3233 VecScatter scat; 3234 PetscFunctionBegin; 3235 ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr); 3236 ierr = DMGetCoordinateDM(dmc,&dmc_coord);CHKERRQ(ierr); 3237 ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr); 3238 ierr = DMGetCoordinates(dmc,&ccoords);CHKERRQ(ierr); 3239 if (coords && !ccoords) { 3240 ierr = DMCreateGlobalVector(dmc_coord,&ccoords);CHKERRQ(ierr); 3241 ierr = DMCreateInjection(dmc_coord,dm_coord,&scat);CHKERRQ(ierr); 3242 ierr = VecScatterBegin(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3243 ierr = VecScatterEnd(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3244 ierr = DMSetCoordinates(dmc,ccoords);CHKERRQ(ierr); 3245 ierr = VecScatterDestroy(&scat);CHKERRQ(ierr); 3246 ierr = VecDestroy(&ccoords);CHKERRQ(ierr); 3247 } 3248 PetscFunctionReturn(0); 3249 } 3250 3251 #undef __FUNCT__ 3252 #define __FUNCT__ "DMSubDomainHook_Coordinates" 3253 static PetscErrorCode DMSubDomainHook_Coordinates(DM dm,DM subdm,void *ctx) 3254 { 3255 DM dm_coord,subdm_coord; 3256 PetscErrorCode ierr; 3257 Vec coords,ccoords,clcoords; 3258 VecScatter *scat_i,*scat_g; 3259 PetscFunctionBegin; 3260 ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr); 3261 ierr = DMGetCoordinateDM(subdm,&subdm_coord);CHKERRQ(ierr); 3262 ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr); 3263 ierr = DMGetCoordinates(subdm,&ccoords);CHKERRQ(ierr); 3264 if (coords && !ccoords) { 3265 ierr = DMCreateGlobalVector(subdm_coord,&ccoords);CHKERRQ(ierr); 3266 ierr = DMCreateLocalVector(subdm_coord,&clcoords);CHKERRQ(ierr); 3267 ierr = DMCreateDomainDecompositionScatters(dm_coord,1,&subdm_coord,NULL,&scat_i,&scat_g);CHKERRQ(ierr); 3268 ierr = VecScatterBegin(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3269 ierr = VecScatterBegin(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3270 ierr = VecScatterEnd(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3271 ierr = VecScatterEnd(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3272 ierr = DMSetCoordinates(subdm,ccoords);CHKERRQ(ierr); 3273 ierr = DMSetCoordinatesLocal(subdm,clcoords);CHKERRQ(ierr); 3274 ierr = VecScatterDestroy(&scat_i[0]);CHKERRQ(ierr); 3275 ierr = VecScatterDestroy(&scat_g[0]);CHKERRQ(ierr); 3276 ierr = VecDestroy(&ccoords);CHKERRQ(ierr); 3277 ierr = VecDestroy(&clcoords);CHKERRQ(ierr); 3278 ierr = PetscFree(scat_i);CHKERRQ(ierr); 3279 ierr = PetscFree(scat_g);CHKERRQ(ierr); 3280 } 3281 PetscFunctionReturn(0); 3282 } 3283 3284 #undef __FUNCT__ 3285 #define __FUNCT__ "DMSetCoordinates" 3286 /*@ 3287 DMSetCoordinates - Sets into the DM a global vector that holds the coordinates 3288 3289 Collective on DM 3290 3291 Input Parameters: 3292 + dm - the DM 3293 - c - coordinate vector 3294 3295 Note: 3296 The coordinates do include those for ghost points, which are in the local vector 3297 3298 Level: intermediate 3299 3300 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3301 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM() 3302 @*/ 3303 PetscErrorCode DMSetCoordinates(DM dm, Vec c) 3304 { 3305 PetscErrorCode ierr; 3306 3307 PetscFunctionBegin; 3308 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3309 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3310 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3311 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3312 dm->coordinates = c; 3313 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3314 ierr = DMCoarsenHookAdd(dm,DMRestrictHook_Coordinates,NULL,NULL);CHKERRQ(ierr); 3315 ierr = DMSubDomainHookAdd(dm,DMSubDomainHook_Coordinates,NULL,NULL);CHKERRQ(ierr); 3316 PetscFunctionReturn(0); 3317 } 3318 3319 #undef __FUNCT__ 3320 #define __FUNCT__ "DMSetCoordinatesLocal" 3321 /*@ 3322 DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates 3323 3324 Collective on DM 3325 3326 Input Parameters: 3327 + dm - the DM 3328 - c - coordinate vector 3329 3330 Note: 3331 The coordinates of ghost points can be set using DMSetCoordinates() 3332 followed by DMGetCoordinatesLocal(). This is intended to enable the 3333 setting of ghost coordinates outside of the domain. 3334 3335 Level: intermediate 3336 3337 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3338 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM() 3339 @*/ 3340 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c) 3341 { 3342 PetscErrorCode ierr; 3343 3344 PetscFunctionBegin; 3345 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3346 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3347 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3348 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3349 3350 dm->coordinatesLocal = c; 3351 3352 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3353 PetscFunctionReturn(0); 3354 } 3355 3356 #undef __FUNCT__ 3357 #define __FUNCT__ "DMGetCoordinates" 3358 /*@ 3359 DMGetCoordinates - Gets a global vector with the coordinates associated with the DM. 3360 3361 Not Collective 3362 3363 Input Parameter: 3364 . dm - the DM 3365 3366 Output Parameter: 3367 . c - global coordinate vector 3368 3369 Note: 3370 This is a borrowed reference, so the user should NOT destroy this vector 3371 3372 Each process has only the local coordinates (does NOT have the ghost coordinates). 3373 3374 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3375 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3376 3377 Level: intermediate 3378 3379 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3380 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM() 3381 @*/ 3382 PetscErrorCode DMGetCoordinates(DM dm, Vec *c) 3383 { 3384 PetscErrorCode ierr; 3385 3386 PetscFunctionBegin; 3387 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3388 PetscValidPointer(c,2); 3389 if (!dm->coordinates && dm->coordinatesLocal) { 3390 DM cdm = NULL; 3391 3392 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3393 ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr); 3394 ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr); 3395 ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3396 ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3397 } 3398 *c = dm->coordinates; 3399 PetscFunctionReturn(0); 3400 } 3401 3402 #undef __FUNCT__ 3403 #define __FUNCT__ "DMGetCoordinatesLocal" 3404 /*@ 3405 DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM. 3406 3407 Collective on DM 3408 3409 Input Parameter: 3410 . dm - the DM 3411 3412 Output Parameter: 3413 . c - coordinate vector 3414 3415 Note: 3416 This is a borrowed reference, so the user should NOT destroy this vector 3417 3418 Each process has the local and ghost coordinates 3419 3420 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3421 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3422 3423 Level: intermediate 3424 3425 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3426 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM() 3427 @*/ 3428 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c) 3429 { 3430 PetscErrorCode ierr; 3431 3432 PetscFunctionBegin; 3433 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3434 PetscValidPointer(c,2); 3435 if (!dm->coordinatesLocal && dm->coordinates) { 3436 DM cdm = NULL; 3437 3438 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3439 ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr); 3440 ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr); 3441 ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3442 ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3443 } 3444 *c = dm->coordinatesLocal; 3445 PetscFunctionReturn(0); 3446 } 3447 3448 #undef __FUNCT__ 3449 #define __FUNCT__ "DMGetCoordinateDM" 3450 /*@ 3451 DMGetCoordinateDM - Gets the DM that prescribes coordinate layout and scatters between global and local coordinates 3452 3453 Collective on DM 3454 3455 Input Parameter: 3456 . dm - the DM 3457 3458 Output Parameter: 3459 . cdm - coordinate DM 3460 3461 Level: intermediate 3462 3463 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3464 .seealso: DMSetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3465 @*/ 3466 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm) 3467 { 3468 PetscErrorCode ierr; 3469 3470 PetscFunctionBegin; 3471 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3472 PetscValidPointer(cdm,2); 3473 if (!dm->coordinateDM) { 3474 if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM"); 3475 ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr); 3476 } 3477 *cdm = dm->coordinateDM; 3478 PetscFunctionReturn(0); 3479 } 3480 3481 #undef __FUNCT__ 3482 #define __FUNCT__ "DMSetCoordinateDM" 3483 /*@ 3484 DMSetCoordinateDM - Sets the DM that prescribes coordinate layout and scatters between global and local coordinates 3485 3486 Logically Collective on DM 3487 3488 Input Parameters: 3489 + dm - the DM 3490 - cdm - coordinate DM 3491 3492 Level: intermediate 3493 3494 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3495 .seealso: DMGetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3496 @*/ 3497 PetscErrorCode DMSetCoordinateDM(DM dm, DM cdm) 3498 { 3499 PetscErrorCode ierr; 3500 3501 PetscFunctionBegin; 3502 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3503 PetscValidHeaderSpecific(cdm,DM_CLASSID,2); 3504 ierr = DMDestroy(&dm->coordinateDM);CHKERRQ(ierr); 3505 dm->coordinateDM = cdm; 3506 ierr = PetscObjectReference((PetscObject) dm->coordinateDM);CHKERRQ(ierr); 3507 PetscFunctionReturn(0); 3508 } 3509 3510 #undef __FUNCT__ 3511 #define __FUNCT__ "DMGetCoordinateSection" 3512 /*@ 3513 DMGetCoordinateSection - Retrieve the layout of coordinate values over the mesh. 3514 3515 Not Collective 3516 3517 Input Parameter: 3518 . dm - The DM object 3519 3520 Output Parameter: 3521 . section - The PetscSection object 3522 3523 Level: intermediate 3524 3525 .keywords: mesh, coordinates 3526 .seealso: DMGetCoordinateDM(), DMGetDefaultSection(), DMSetDefaultSection() 3527 @*/ 3528 PetscErrorCode DMGetCoordinateSection(DM dm, PetscSection *section) 3529 { 3530 DM cdm; 3531 PetscErrorCode ierr; 3532 3533 PetscFunctionBegin; 3534 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3535 PetscValidPointer(section, 2); 3536 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3537 ierr = DMGetDefaultSection(cdm, section);CHKERRQ(ierr); 3538 PetscFunctionReturn(0); 3539 } 3540 3541 #undef __FUNCT__ 3542 #define __FUNCT__ "DMSetCoordinateSection" 3543 /*@ 3544 DMSetCoordinateSection - Set the layout of coordinate values over the mesh. 3545 3546 Not Collective 3547 3548 Input Parameters: 3549 + dm - The DM object 3550 - section - The PetscSection object 3551 3552 Level: intermediate 3553 3554 .keywords: mesh, coordinates 3555 .seealso: DMGetCoordinateSection(), DMGetDefaultSection(), DMSetDefaultSection() 3556 @*/ 3557 PetscErrorCode DMSetCoordinateSection(DM dm, PetscSection section) 3558 { 3559 DM cdm; 3560 PetscErrorCode ierr; 3561 3562 PetscFunctionBegin; 3563 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3564 PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 3565 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3566 ierr = DMSetDefaultSection(cdm, section);CHKERRQ(ierr); 3567 PetscFunctionReturn(0); 3568 } 3569 3570 #undef __FUNCT__ 3571 #define __FUNCT__ "DMLocatePoints" 3572 /*@ 3573 DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells 3574 3575 Not collective 3576 3577 Input Parameters: 3578 + dm - The DM 3579 - v - The Vec of points 3580 3581 Output Parameter: 3582 . cells - The local cell numbers for cells which contain the points 3583 3584 Level: developer 3585 3586 .keywords: point location, mesh 3587 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3588 @*/ 3589 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells) 3590 { 3591 PetscErrorCode ierr; 3592 3593 PetscFunctionBegin; 3594 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3595 PetscValidHeaderSpecific(v,VEC_CLASSID,2); 3596 PetscValidPointer(cells,3); 3597 if (dm->ops->locatepoints) { 3598 ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr); 3599 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM"); 3600 PetscFunctionReturn(0); 3601 } 3602