1 #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ 2 #include <petscsf.h> 3 4 PetscClassId DM_CLASSID; 5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal, DM_LocalToLocal; 6 7 #undef __FUNCT__ 8 #define __FUNCT__ "DMViewFromOptions" 9 /* 10 DMViewFromOptions - Processes command line options to determine if/how a DM is to be viewed. 11 12 Collective on Vec 13 14 Input Parameters: 15 + dm - the DM 16 . prefix - prefix to use for viewing, or NULL to use prefix of 'dm' 17 - optionname - option to activate viewing 18 19 Level: intermediate 20 21 .keywords: DM, view, options, database 22 .seealso: VecViewFromOptions(), MatViewFromOptions() 23 */ 24 PetscErrorCode DMViewFromOptions(DM dm,const char prefix[],const char optionname[]) 25 { 26 PetscErrorCode ierr; 27 PetscBool flg; 28 PetscViewer viewer; 29 PetscViewerFormat format; 30 31 PetscFunctionBegin; 32 if (prefix) { 33 ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr); 34 } else { 35 ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr); 36 } 37 if (flg) { 38 ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr); 39 ierr = DMView(dm,viewer);CHKERRQ(ierr); 40 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 41 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 42 } 43 PetscFunctionReturn(0); 44 } 45 46 47 #undef __FUNCT__ 48 #define __FUNCT__ "DMCreate" 49 /*@ 50 DMCreate - Creates an empty DM object. The type can then be set with DMSetType(). 51 52 If you never call DMSetType() it will generate an 53 error when you try to use the vector. 54 55 Collective on MPI_Comm 56 57 Input Parameter: 58 . comm - The communicator for the DM object 59 60 Output Parameter: 61 . dm - The DM object 62 63 Level: beginner 64 65 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE 66 @*/ 67 PetscErrorCode DMCreate(MPI_Comm comm,DM *dm) 68 { 69 DM v; 70 PetscErrorCode ierr; 71 72 PetscFunctionBegin; 73 PetscValidPointer(dm,2); 74 *dm = NULL; 75 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES) 76 ierr = VecInitializePackage();CHKERRQ(ierr); 77 ierr = MatInitializePackage();CHKERRQ(ierr); 78 ierr = DMInitializePackage();CHKERRQ(ierr); 79 #endif 80 81 ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr); 82 ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr); 83 84 85 v->ltogmap = NULL; 86 v->ltogmapb = NULL; 87 v->bs = 1; 88 v->coloringtype = IS_COLORING_GLOBAL; 89 ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr); 90 ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr); 91 v->defaultSection = NULL; 92 v->defaultGlobalSection = NULL; 93 { 94 PetscInt i; 95 for (i = 0; i < 10; ++i) { 96 v->nullspaceConstructors[i] = NULL; 97 } 98 } 99 v->numFields = 0; 100 v->fields = NULL; 101 ierr = DMSetVecType(v,VECSTANDARD);CHKERRQ(ierr); 102 ierr = DMSetMatType(v,MATAIJ);CHKERRQ(ierr); 103 *dm = v; 104 PetscFunctionReturn(0); 105 } 106 107 #undef __FUNCT__ 108 #define __FUNCT__ "DMClone" 109 /*@ 110 DMClone - Creates a DM object with the same topology as the original. 111 112 Collective on MPI_Comm 113 114 Input Parameter: 115 . dm - The original DM object 116 117 Output Parameter: 118 . newdm - The new DM object 119 120 Level: beginner 121 122 .keywords: DM, topology, create 123 @*/ 124 PetscErrorCode DMClone(DM dm, DM *newdm) 125 { 126 PetscSF sf; 127 Vec coords; 128 void *ctx; 129 PetscErrorCode ierr; 130 131 PetscFunctionBegin; 132 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 133 PetscValidPointer(newdm,2); 134 ierr = DMCreate(PetscObjectComm((PetscObject)dm), newdm);CHKERRQ(ierr); 135 if (dm->ops->clone) { 136 ierr = (*dm->ops->clone)(dm, newdm);CHKERRQ(ierr); 137 } 138 ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr); 139 ierr = DMSetPointSF(*newdm, sf);CHKERRQ(ierr); 140 ierr = DMGetApplicationContext(dm, &ctx);CHKERRQ(ierr); 141 ierr = DMSetApplicationContext(*newdm, ctx);CHKERRQ(ierr); 142 ierr = DMGetCoordinatesLocal(dm, &coords);CHKERRQ(ierr); 143 if (coords) { 144 ierr = DMSetCoordinatesLocal(*newdm, coords);CHKERRQ(ierr); 145 } else { 146 ierr = DMGetCoordinates(dm, &coords);CHKERRQ(ierr); 147 if (coords) {ierr = DMSetCoordinates(*newdm, coords);CHKERRQ(ierr);} 148 } 149 PetscFunctionReturn(0); 150 } 151 152 #undef __FUNCT__ 153 #define __FUNCT__ "DMSetVecType" 154 /*@C 155 DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 156 157 Logically Collective on DMDA 158 159 Input Parameter: 160 + da - initial distributed array 161 . ctype - the vector type, currently either VECSTANDARD or VECCUSP 162 163 Options Database: 164 . -dm_vec_type ctype 165 166 Level: intermediate 167 168 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType, DMGetVecType() 169 @*/ 170 PetscErrorCode DMSetVecType(DM da,VecType ctype) 171 { 172 PetscErrorCode ierr; 173 174 PetscFunctionBegin; 175 PetscValidHeaderSpecific(da,DM_CLASSID,1); 176 ierr = PetscFree(da->vectype);CHKERRQ(ierr); 177 ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr); 178 PetscFunctionReturn(0); 179 } 180 181 #undef __FUNCT__ 182 #define __FUNCT__ "DMGetVecType" 183 /*@C 184 DMGetVecType - Gets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 185 186 Logically Collective on DMDA 187 188 Input Parameter: 189 . da - initial distributed array 190 191 Output Parameter: 192 . ctype - the vector type 193 194 Level: intermediate 195 196 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType 197 @*/ 198 PetscErrorCode DMGetVecType(DM da,VecType *ctype) 199 { 200 PetscFunctionBegin; 201 PetscValidHeaderSpecific(da,DM_CLASSID,1); 202 *ctype = da->vectype; 203 PetscFunctionReturn(0); 204 } 205 206 #undef __FUNCT__ 207 #define __FUNCT__ "VecGetDM" 208 /*@ 209 VecGetDM - Gets the DM defining the data layout of the vector 210 211 Not collective 212 213 Input Parameter: 214 . v - The Vec 215 216 Output Parameter: 217 . dm - The DM 218 219 Level: intermediate 220 221 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 222 @*/ 223 PetscErrorCode VecGetDM(Vec v, DM *dm) 224 { 225 PetscErrorCode ierr; 226 227 PetscFunctionBegin; 228 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 229 PetscValidPointer(dm,2); 230 ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 231 PetscFunctionReturn(0); 232 } 233 234 #undef __FUNCT__ 235 #define __FUNCT__ "VecSetDM" 236 /*@ 237 VecSetDM - Sets the DM defining the data layout of the vector 238 239 Not collective 240 241 Input Parameters: 242 + v - The Vec 243 - dm - The DM 244 245 Level: intermediate 246 247 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 248 @*/ 249 PetscErrorCode VecSetDM(Vec v, DM dm) 250 { 251 PetscErrorCode ierr; 252 253 PetscFunctionBegin; 254 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 255 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 256 ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 257 PetscFunctionReturn(0); 258 } 259 260 #undef __FUNCT__ 261 #define __FUNCT__ "DMSetMatType" 262 /*@C 263 DMSetMatType - Sets the type of matrix created with DMCreateMatrix() 264 265 Logically Collective on DM 266 267 Input Parameter: 268 + dm - the DM context 269 . ctype - the matrix type 270 271 Options Database: 272 . -dm_mat_type ctype 273 274 Level: intermediate 275 276 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMGetMatType() 277 @*/ 278 PetscErrorCode DMSetMatType(DM dm,MatType ctype) 279 { 280 PetscErrorCode ierr; 281 282 PetscFunctionBegin; 283 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 284 ierr = PetscFree(dm->mattype);CHKERRQ(ierr); 285 ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr); 286 PetscFunctionReturn(0); 287 } 288 289 #undef __FUNCT__ 290 #define __FUNCT__ "DMGetMatType" 291 /*@C 292 DMGetMatType - Gets the type of matrix created with DMCreateMatrix() 293 294 Logically Collective on DM 295 296 Input Parameter: 297 . dm - the DM context 298 299 Output Parameter: 300 . ctype - the matrix type 301 302 Options Database: 303 . -dm_mat_type ctype 304 305 Level: intermediate 306 307 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMSetMatType() 308 @*/ 309 PetscErrorCode DMGetMatType(DM dm,MatType *ctype) 310 { 311 PetscFunctionBegin; 312 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 313 *ctype = dm->mattype; 314 PetscFunctionReturn(0); 315 } 316 317 #undef __FUNCT__ 318 #define __FUNCT__ "MatGetDM" 319 /*@ 320 MatGetDM - Gets the DM defining the data layout of the matrix 321 322 Not collective 323 324 Input Parameter: 325 . A - The Mat 326 327 Output Parameter: 328 . dm - The DM 329 330 Level: intermediate 331 332 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType() 333 @*/ 334 PetscErrorCode MatGetDM(Mat A, DM *dm) 335 { 336 PetscErrorCode ierr; 337 338 PetscFunctionBegin; 339 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 340 PetscValidPointer(dm,2); 341 ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 342 PetscFunctionReturn(0); 343 } 344 345 #undef __FUNCT__ 346 #define __FUNCT__ "MatSetDM" 347 /*@ 348 MatSetDM - Sets the DM defining the data layout of the matrix 349 350 Not collective 351 352 Input Parameters: 353 + A - The Mat 354 - dm - The DM 355 356 Level: intermediate 357 358 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType() 359 @*/ 360 PetscErrorCode MatSetDM(Mat A, DM dm) 361 { 362 PetscErrorCode ierr; 363 364 PetscFunctionBegin; 365 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 366 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 367 ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 368 PetscFunctionReturn(0); 369 } 370 371 #undef __FUNCT__ 372 #define __FUNCT__ "DMSetOptionsPrefix" 373 /*@C 374 DMSetOptionsPrefix - Sets the prefix used for searching for all 375 DMDA options in the database. 376 377 Logically Collective on DMDA 378 379 Input Parameter: 380 + da - the DMDA context 381 - prefix - the prefix to prepend to all option names 382 383 Notes: 384 A hyphen (-) must NOT be given at the beginning of the prefix name. 385 The first character of all runtime options is AUTOMATICALLY the hyphen. 386 387 Level: advanced 388 389 .keywords: DMDA, set, options, prefix, database 390 391 .seealso: DMSetFromOptions() 392 @*/ 393 PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[]) 394 { 395 PetscErrorCode ierr; 396 397 PetscFunctionBegin; 398 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 399 ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr); 400 PetscFunctionReturn(0); 401 } 402 403 #undef __FUNCT__ 404 #define __FUNCT__ "DMDestroy" 405 /*@ 406 DMDestroy - Destroys a vector packer or DMDA. 407 408 Collective on DM 409 410 Input Parameter: 411 . dm - the DM object to destroy 412 413 Level: developer 414 415 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 416 417 @*/ 418 PetscErrorCode DMDestroy(DM *dm) 419 { 420 PetscInt i, cnt = 0, f; 421 DMNamedVecLink nlink,nnext; 422 PetscErrorCode ierr; 423 424 PetscFunctionBegin; 425 if (!*dm) PetscFunctionReturn(0); 426 PetscValidHeaderSpecific((*dm),DM_CLASSID,1); 427 428 /* I think it makes sense to dump all attached things when you are destroyed, which also eliminates circular references */ 429 for (f = 0; f < (*dm)->numFields; ++f) { 430 ierr = PetscObjectCompose((*dm)->fields[f], "pmat", NULL);CHKERRQ(ierr); 431 ierr = PetscObjectCompose((*dm)->fields[f], "nullspace", NULL);CHKERRQ(ierr); 432 ierr = PetscObjectCompose((*dm)->fields[f], "nearnullspace", NULL);CHKERRQ(ierr); 433 } 434 /* count all the circular references of DM and its contained Vecs */ 435 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 436 if ((*dm)->localin[i]) cnt++; 437 if ((*dm)->globalin[i]) cnt++; 438 } 439 for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++; 440 if ((*dm)->x) { 441 DM obj; 442 ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr); 443 if (obj == *dm) cnt++; 444 } 445 for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++; 446 if ((*dm)->x) { 447 DM obj; 448 ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr); 449 if (obj == *dm) cnt++; 450 } 451 452 if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);} 453 /* 454 Need this test because the dm references the vectors that 455 reference the dm, so destroying the dm calls destroy on the 456 vectors that cause another destroy on the dm 457 */ 458 if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0); 459 ((PetscObject) (*dm))->refct = 0; 460 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 461 if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()"); 462 ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr); 463 } 464 for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */ 465 nnext = nlink->next; 466 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 467 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 468 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 469 ierr = PetscFree(nlink);CHKERRQ(ierr); 470 } 471 (*dm)->namedglobal = NULL; 472 473 for (nlink=(*dm)->namedlocal; nlink; nlink=nnext) { /* Destroy the named local vectors */ 474 nnext = nlink->next; 475 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 476 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 477 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 478 ierr = PetscFree(nlink);CHKERRQ(ierr); 479 } 480 (*dm)->namedlocal = NULL; 481 482 /* Destroy the list of hooks */ 483 { 484 DMCoarsenHookLink link,next; 485 for (link=(*dm)->coarsenhook; link; link=next) { 486 next = link->next; 487 ierr = PetscFree(link);CHKERRQ(ierr); 488 } 489 (*dm)->coarsenhook = NULL; 490 } 491 { 492 DMRefineHookLink link,next; 493 for (link=(*dm)->refinehook; link; link=next) { 494 next = link->next; 495 ierr = PetscFree(link);CHKERRQ(ierr); 496 } 497 (*dm)->refinehook = NULL; 498 } 499 { 500 DMSubDomainHookLink link,next; 501 for (link=(*dm)->subdomainhook; link; link=next) { 502 next = link->next; 503 ierr = PetscFree(link);CHKERRQ(ierr); 504 } 505 (*dm)->subdomainhook = NULL; 506 } 507 { 508 DMGlobalToLocalHookLink link,next; 509 for (link=(*dm)->gtolhook; link; link=next) { 510 next = link->next; 511 ierr = PetscFree(link);CHKERRQ(ierr); 512 } 513 (*dm)->gtolhook = NULL; 514 } 515 /* Destroy the work arrays */ 516 { 517 DMWorkLink link,next; 518 if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out"); 519 for (link=(*dm)->workin; link; link=next) { 520 next = link->next; 521 ierr = PetscFree(link->mem);CHKERRQ(ierr); 522 ierr = PetscFree(link);CHKERRQ(ierr); 523 } 524 (*dm)->workin = NULL; 525 } 526 527 ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr); 528 ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr); 529 ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr); 530 531 if ((*dm)->ctx && (*dm)->ctxdestroy) { 532 ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr); 533 } 534 ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr); 535 ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr); 536 ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr); 537 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr); 538 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr); 539 ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr); 540 ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr); 541 542 ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr); 543 ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr); 544 ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr); 545 ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr); 546 ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr); 547 548 ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr); 549 ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr); 550 ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr); 551 552 for (f = 0; f < (*dm)->numFields; ++f) { 553 ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr); 554 } 555 ierr = PetscFree((*dm)->fields);CHKERRQ(ierr); 556 /* if memory was published with SAWs then destroy it */ 557 ierr = PetscObjectSAWsViewOff((PetscObject)*dm);CHKERRQ(ierr); 558 559 ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr); 560 /* We do not destroy (*dm)->data here so that we can reference count backend objects */ 561 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 562 PetscFunctionReturn(0); 563 } 564 565 #undef __FUNCT__ 566 #define __FUNCT__ "DMSetUp" 567 /*@ 568 DMSetUp - sets up the data structures inside a DM object 569 570 Collective on DM 571 572 Input Parameter: 573 . dm - the DM object to setup 574 575 Level: developer 576 577 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 578 579 @*/ 580 PetscErrorCode DMSetUp(DM dm) 581 { 582 PetscErrorCode ierr; 583 584 PetscFunctionBegin; 585 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 586 if (dm->setupcalled) PetscFunctionReturn(0); 587 if (dm->ops->setup) { 588 ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr); 589 } 590 dm->setupcalled = PETSC_TRUE; 591 PetscFunctionReturn(0); 592 } 593 594 #undef __FUNCT__ 595 #define __FUNCT__ "DMSetFromOptions" 596 /*@ 597 DMSetFromOptions - sets parameters in a DM from the options database 598 599 Collective on DM 600 601 Input Parameter: 602 . dm - the DM object to set options for 603 604 Options Database: 605 + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros 606 . -dm_vec_type <type> type of vector to create inside DM 607 . -dm_mat_type <type> type of matrix to create inside DM 608 - -dm_coloring_type <global or ghosted> 609 610 Level: developer 611 612 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 613 614 @*/ 615 PetscErrorCode DMSetFromOptions(DM dm) 616 { 617 char typeName[256]; 618 PetscBool flg; 619 PetscErrorCode ierr; 620 621 PetscFunctionBegin; 622 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 623 ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr); 624 ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr); 625 ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr); 626 if (flg) { 627 ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr); 628 } 629 ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr); 630 if (flg) { 631 ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr); 632 } 633 ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr); 634 if (dm->ops->setfromoptions) { 635 ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr); 636 } 637 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 638 ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr); 639 ierr = PetscOptionsEnd();CHKERRQ(ierr); 640 ierr = DMViewFromOptions(dm,NULL,"-dm_view");CHKERRQ(ierr); 641 PetscFunctionReturn(0); 642 } 643 644 #undef __FUNCT__ 645 #define __FUNCT__ "DMView" 646 /*@C 647 DMView - Views a vector packer or DMDA. 648 649 Collective on DM 650 651 Input Parameter: 652 + dm - the DM object to view 653 - v - the viewer 654 655 Level: developer 656 657 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 658 659 @*/ 660 PetscErrorCode DMView(DM dm,PetscViewer v) 661 { 662 PetscErrorCode ierr; 663 PetscBool isbinary; 664 665 PetscFunctionBegin; 666 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 667 if (!v) { 668 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr); 669 } 670 ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 671 if (isbinary) { 672 PetscInt classid = DM_FILE_CLASSID; 673 char type[256]; 674 675 ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr); 676 ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr); 677 ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr); 678 } 679 if (dm->ops->view) { 680 ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr); 681 } 682 PetscFunctionReturn(0); 683 } 684 685 #undef __FUNCT__ 686 #define __FUNCT__ "DMCreateGlobalVector" 687 /*@ 688 DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object 689 690 Collective on DM 691 692 Input Parameter: 693 . dm - the DM object 694 695 Output Parameter: 696 . vec - the global vector 697 698 Level: beginner 699 700 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 701 702 @*/ 703 PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec) 704 { 705 PetscErrorCode ierr; 706 707 PetscFunctionBegin; 708 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 709 ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr); 710 PetscFunctionReturn(0); 711 } 712 713 #undef __FUNCT__ 714 #define __FUNCT__ "DMCreateLocalVector" 715 /*@ 716 DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object 717 718 Not Collective 719 720 Input Parameter: 721 . dm - the DM object 722 723 Output Parameter: 724 . vec - the local vector 725 726 Level: beginner 727 728 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 729 730 @*/ 731 PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec) 732 { 733 PetscErrorCode ierr; 734 735 PetscFunctionBegin; 736 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 737 ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr); 738 PetscFunctionReturn(0); 739 } 740 741 #undef __FUNCT__ 742 #define __FUNCT__ "DMGetLocalToGlobalMapping" 743 /*@ 744 DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM. 745 746 Collective on DM 747 748 Input Parameter: 749 . dm - the DM that provides the mapping 750 751 Output Parameter: 752 . ltog - the mapping 753 754 Level: intermediate 755 756 Notes: 757 This mapping can then be used by VecSetLocalToGlobalMapping() or 758 MatSetLocalToGlobalMapping(). 759 760 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock() 761 @*/ 762 PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog) 763 { 764 PetscErrorCode ierr; 765 766 PetscFunctionBegin; 767 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 768 PetscValidPointer(ltog,2); 769 if (!dm->ltogmap) { 770 PetscSection section, sectionGlobal; 771 772 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 773 if (section) { 774 PetscInt *ltog; 775 PetscInt pStart, pEnd, size, p, l; 776 777 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 778 ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr); 779 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 780 ierr = PetscMalloc(size * sizeof(PetscInt), <og);CHKERRQ(ierr); /* We want the local+overlap size */ 781 for (p = pStart, l = 0; p < pEnd; ++p) { 782 PetscInt dof, off, c; 783 784 /* Should probably use constrained dofs */ 785 ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr); 786 ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr); 787 for (c = 0; c < dof; ++c, ++l) { 788 ltog[l] = off+c; 789 } 790 } 791 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr); 792 ierr = PetscLogObjectParent((PetscObject)dm, (PetscObject)dm->ltogmap);CHKERRQ(ierr); 793 } else { 794 if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping"); 795 ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr); 796 } 797 } 798 *ltog = dm->ltogmap; 799 PetscFunctionReturn(0); 800 } 801 802 #undef __FUNCT__ 803 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock" 804 /*@ 805 DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM. 806 807 Collective on DM 808 809 Input Parameter: 810 . da - the distributed array that provides the mapping 811 812 Output Parameter: 813 . ltog - the block mapping 814 815 Level: intermediate 816 817 Notes: 818 This mapping can then be used by VecSetLocalToGlobalMappingBlock() or 819 MatSetLocalToGlobalMappingBlock(). 820 821 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize() 822 @*/ 823 PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog) 824 { 825 PetscErrorCode ierr; 826 827 PetscFunctionBegin; 828 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 829 PetscValidPointer(ltog,2); 830 if (!dm->ltogmapb) { 831 PetscInt bs; 832 ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr); 833 if (bs > 1) { 834 if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock"); 835 ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr); 836 } else { 837 ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr); 838 ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr); 839 } 840 } 841 *ltog = dm->ltogmapb; 842 PetscFunctionReturn(0); 843 } 844 845 #undef __FUNCT__ 846 #define __FUNCT__ "DMGetBlockSize" 847 /*@ 848 DMGetBlockSize - Gets the inherent block size associated with a DM 849 850 Not Collective 851 852 Input Parameter: 853 . dm - the DM with block structure 854 855 Output Parameter: 856 . bs - the block size, 1 implies no exploitable block structure 857 858 Level: intermediate 859 860 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock() 861 @*/ 862 PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs) 863 { 864 PetscFunctionBegin; 865 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 866 PetscValidPointer(bs,2); 867 if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet"); 868 *bs = dm->bs; 869 PetscFunctionReturn(0); 870 } 871 872 #undef __FUNCT__ 873 #define __FUNCT__ "DMCreateInterpolation" 874 /*@ 875 DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects 876 877 Collective on DM 878 879 Input Parameter: 880 + dm1 - the DM object 881 - dm2 - the second, finer DM object 882 883 Output Parameter: 884 + mat - the interpolation 885 - vec - the scaling (optional) 886 887 Level: developer 888 889 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 890 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation. 891 892 For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors 893 EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic. 894 895 896 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen() 897 898 @*/ 899 PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 900 { 901 PetscErrorCode ierr; 902 903 PetscFunctionBegin; 904 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 905 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 906 ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr); 907 PetscFunctionReturn(0); 908 } 909 910 #undef __FUNCT__ 911 #define __FUNCT__ "DMCreateInjection" 912 /*@ 913 DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects 914 915 Collective on DM 916 917 Input Parameter: 918 + dm1 - the DM object 919 - dm2 - the second, finer DM object 920 921 Output Parameter: 922 . ctx - the injection 923 924 Level: developer 925 926 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 927 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection. 928 929 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation() 930 931 @*/ 932 PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx) 933 { 934 PetscErrorCode ierr; 935 936 PetscFunctionBegin; 937 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 938 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 939 ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr); 940 PetscFunctionReturn(0); 941 } 942 943 #undef __FUNCT__ 944 #define __FUNCT__ "DMCreateColoring" 945 /*@ 946 DMCreateColoring - Gets coloring for a DM 947 948 Collective on DM 949 950 Input Parameter: 951 + dm - the DM object 952 - ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL 953 954 Output Parameter: 955 . coloring - the coloring 956 957 Level: developer 958 959 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix(), DMSetMatType() 960 961 @*/ 962 PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,ISColoring *coloring) 963 { 964 PetscErrorCode ierr; 965 966 PetscFunctionBegin; 967 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 968 if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet"); 969 ierr = (*dm->ops->getcoloring)(dm,ctype,coloring);CHKERRQ(ierr); 970 PetscFunctionReturn(0); 971 } 972 973 #undef __FUNCT__ 974 #define __FUNCT__ "DMCreateMatrix" 975 /*@ 976 DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite 977 978 Collective on DM 979 980 Input Parameter: 981 . dm - the DM object 982 983 Output Parameter: 984 . mat - the empty Jacobian 985 986 Level: beginner 987 988 Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 989 do not need to do it yourself. 990 991 By default it also sets the nonzero structure and puts in the zero entries. To prevent setting 992 the nonzero pattern call DMDASetMatPreallocateOnly() 993 994 For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used 995 internally by PETSc. 996 997 For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires 998 the indices for the global numbering for DMDAs which is complicated. 999 1000 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMSetMatType() 1001 1002 @*/ 1003 PetscErrorCode DMCreateMatrix(DM dm,Mat *mat) 1004 { 1005 PetscErrorCode ierr; 1006 1007 PetscFunctionBegin; 1008 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1009 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES) 1010 ierr = MatInitializePackage();CHKERRQ(ierr); 1011 #endif 1012 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1013 PetscValidPointer(mat,3); 1014 ierr = (*dm->ops->creatematrix)(dm,mat);CHKERRQ(ierr); 1015 PetscFunctionReturn(0); 1016 } 1017 1018 #undef __FUNCT__ 1019 #define __FUNCT__ "DMSetMatrixPreallocateOnly" 1020 /*@ 1021 DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly 1022 preallocated but the nonzero structure and zero values will not be set. 1023 1024 Logically Collective on DMDA 1025 1026 Input Parameter: 1027 + dm - the DM 1028 - only - PETSC_TRUE if only want preallocation 1029 1030 Level: developer 1031 .seealso DMCreateMatrix() 1032 @*/ 1033 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only) 1034 { 1035 PetscFunctionBegin; 1036 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1037 dm->prealloc_only = only; 1038 PetscFunctionReturn(0); 1039 } 1040 1041 #undef __FUNCT__ 1042 #define __FUNCT__ "DMGetWorkArray" 1043 /*@C 1044 DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 1045 1046 Not Collective 1047 1048 Input Parameters: 1049 + dm - the DM object 1050 . count - The minium size 1051 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 1052 1053 Output Parameter: 1054 . array - the work array 1055 1056 Level: developer 1057 1058 .seealso DMDestroy(), DMCreate() 1059 @*/ 1060 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1061 { 1062 PetscErrorCode ierr; 1063 DMWorkLink link; 1064 size_t size; 1065 1066 PetscFunctionBegin; 1067 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1068 PetscValidPointer(mem,4); 1069 if (dm->workin) { 1070 link = dm->workin; 1071 dm->workin = dm->workin->next; 1072 } else { 1073 ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr); 1074 } 1075 ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr); 1076 if (size*count > link->bytes) { 1077 ierr = PetscFree(link->mem);CHKERRQ(ierr); 1078 ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr); 1079 link->bytes = size*count; 1080 } 1081 link->next = dm->workout; 1082 dm->workout = link; 1083 *(void**)mem = link->mem; 1084 PetscFunctionReturn(0); 1085 } 1086 1087 #undef __FUNCT__ 1088 #define __FUNCT__ "DMRestoreWorkArray" 1089 /*@C 1090 DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 1091 1092 Not Collective 1093 1094 Input Parameters: 1095 + dm - the DM object 1096 . count - The minium size 1097 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 1098 1099 Output Parameter: 1100 . array - the work array 1101 1102 Level: developer 1103 1104 .seealso DMDestroy(), DMCreate() 1105 @*/ 1106 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1107 { 1108 DMWorkLink *p,link; 1109 1110 PetscFunctionBegin; 1111 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1112 PetscValidPointer(mem,4); 1113 for (p=&dm->workout; (link=*p); p=&link->next) { 1114 if (link->mem == *(void**)mem) { 1115 *p = link->next; 1116 link->next = dm->workin; 1117 dm->workin = link; 1118 *(void**)mem = NULL; 1119 PetscFunctionReturn(0); 1120 } 1121 } 1122 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out"); 1123 PetscFunctionReturn(0); 1124 } 1125 1126 #undef __FUNCT__ 1127 #define __FUNCT__ "DMSetNullSpaceConstructor" 1128 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace)) 1129 { 1130 PetscFunctionBegin; 1131 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1132 if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field); 1133 dm->nullspaceConstructors[field] = nullsp; 1134 PetscFunctionReturn(0); 1135 } 1136 1137 #undef __FUNCT__ 1138 #define __FUNCT__ "DMCreateFieldIS" 1139 /*@C 1140 DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field 1141 1142 Not collective 1143 1144 Input Parameter: 1145 . dm - the DM object 1146 1147 Output Parameters: 1148 + numFields - The number of fields (or NULL if not requested) 1149 . fieldNames - The name for each field (or NULL if not requested) 1150 - fields - The global indices for each field (or NULL if not requested) 1151 1152 Level: intermediate 1153 1154 Notes: 1155 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1156 PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with 1157 PetscFree(). 1158 1159 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 1160 @*/ 1161 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields) 1162 { 1163 PetscSection section, sectionGlobal; 1164 PetscErrorCode ierr; 1165 1166 PetscFunctionBegin; 1167 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1168 if (numFields) { 1169 PetscValidPointer(numFields,2); 1170 *numFields = 0; 1171 } 1172 if (fieldNames) { 1173 PetscValidPointer(fieldNames,3); 1174 *fieldNames = NULL; 1175 } 1176 if (fields) { 1177 PetscValidPointer(fields,4); 1178 *fields = NULL; 1179 } 1180 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1181 if (section) { 1182 PetscInt *fieldSizes, **fieldIndices; 1183 PetscInt nF, f, pStart, pEnd, p; 1184 1185 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 1186 ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr); 1187 ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt*,&fieldIndices);CHKERRQ(ierr); 1188 ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr); 1189 for (f = 0; f < nF; ++f) { 1190 fieldSizes[f] = 0; 1191 } 1192 for (p = pStart; p < pEnd; ++p) { 1193 PetscInt gdof; 1194 1195 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1196 if (gdof > 0) { 1197 for (f = 0; f < nF; ++f) { 1198 PetscInt fdof, fcdof; 1199 1200 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1201 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1202 fieldSizes[f] += fdof-fcdof; 1203 } 1204 } 1205 } 1206 for (f = 0; f < nF; ++f) { 1207 ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr); 1208 fieldSizes[f] = 0; 1209 } 1210 for (p = pStart; p < pEnd; ++p) { 1211 PetscInt gdof, goff; 1212 1213 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1214 if (gdof > 0) { 1215 ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr); 1216 for (f = 0; f < nF; ++f) { 1217 PetscInt fdof, fcdof, fc; 1218 1219 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1220 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1221 for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) { 1222 fieldIndices[f][fieldSizes[f]] = goff++; 1223 } 1224 } 1225 } 1226 } 1227 if (numFields) *numFields = nF; 1228 if (fieldNames) { 1229 ierr = PetscMalloc(nF * sizeof(char*), fieldNames);CHKERRQ(ierr); 1230 for (f = 0; f < nF; ++f) { 1231 const char *fieldName; 1232 1233 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1234 ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr); 1235 } 1236 } 1237 if (fields) { 1238 ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr); 1239 for (f = 0; f < nF; ++f) { 1240 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr); 1241 } 1242 } 1243 ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr); 1244 } else if (dm->ops->createfieldis) { 1245 ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr); 1246 } 1247 PetscFunctionReturn(0); 1248 } 1249 1250 1251 #undef __FUNCT__ 1252 #define __FUNCT__ "DMCreateFieldDecomposition" 1253 /*@C 1254 DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems 1255 corresponding to different fields: each IS contains the global indices of the dofs of the 1256 corresponding field. The optional list of DMs define the DM for each subproblem. 1257 Generalizes DMCreateFieldIS(). 1258 1259 Not collective 1260 1261 Input Parameter: 1262 . dm - the DM object 1263 1264 Output Parameters: 1265 + len - The number of subproblems in the field decomposition (or NULL if not requested) 1266 . namelist - The name for each field (or NULL if not requested) 1267 . islist - The global indices for each field (or NULL if not requested) 1268 - dmlist - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1269 1270 Level: intermediate 1271 1272 Notes: 1273 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1274 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1275 and all of the arrays should be freed with PetscFree(). 1276 1277 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1278 @*/ 1279 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) 1280 { 1281 PetscErrorCode ierr; 1282 1283 PetscFunctionBegin; 1284 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1285 if (len) { 1286 PetscValidPointer(len,2); 1287 *len = 0; 1288 } 1289 if (namelist) { 1290 PetscValidPointer(namelist,3); 1291 *namelist = 0; 1292 } 1293 if (islist) { 1294 PetscValidPointer(islist,4); 1295 *islist = 0; 1296 } 1297 if (dmlist) { 1298 PetscValidPointer(dmlist,5); 1299 *dmlist = 0; 1300 } 1301 /* 1302 Is it a good idea to apply the following check across all impls? 1303 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1304 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1305 */ 1306 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1307 if (!dm->ops->createfielddecomposition) { 1308 PetscSection section; 1309 PetscInt numFields, f; 1310 1311 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1312 if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);} 1313 if (section && numFields && dm->ops->createsubdm) { 1314 *len = numFields; 1315 ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr); 1316 for (f = 0; f < numFields; ++f) { 1317 const char *fieldName; 1318 1319 ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr); 1320 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1321 ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr); 1322 } 1323 } else { 1324 ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr); 1325 /* By default there are no DMs associated with subproblems. */ 1326 if (dmlist) *dmlist = NULL; 1327 } 1328 } else { 1329 ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr); 1330 } 1331 PetscFunctionReturn(0); 1332 } 1333 1334 #undef __FUNCT__ 1335 #define __FUNCT__ "DMCreateSubDM" 1336 /*@C 1337 DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in. 1338 The fields are defined by DMCreateFieldIS(). 1339 1340 Not collective 1341 1342 Input Parameters: 1343 + dm - the DM object 1344 . numFields - number of fields in this subproblem 1345 - len - The number of subproblems in the decomposition (or NULL if not requested) 1346 1347 Output Parameters: 1348 . is - The global indices for the subproblem 1349 - dm - The DM for the subproblem 1350 1351 Level: intermediate 1352 1353 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1354 @*/ 1355 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm) 1356 { 1357 PetscErrorCode ierr; 1358 1359 PetscFunctionBegin; 1360 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1361 PetscValidPointer(fields,3); 1362 if (is) PetscValidPointer(is,4); 1363 if (subdm) PetscValidPointer(subdm,5); 1364 if (dm->ops->createsubdm) { 1365 ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr); 1366 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined"); 1367 PetscFunctionReturn(0); 1368 } 1369 1370 1371 #undef __FUNCT__ 1372 #define __FUNCT__ "DMCreateDomainDecomposition" 1373 /*@C 1374 DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems 1375 corresponding to restrictions to pairs nested subdomains: each IS contains the global 1376 indices of the dofs of the corresponding subdomains. The inner subdomains conceptually 1377 define a nonoverlapping covering, while outer subdomains can overlap. 1378 The optional list of DMs define the DM for each subproblem. 1379 1380 Not collective 1381 1382 Input Parameter: 1383 . dm - the DM object 1384 1385 Output Parameters: 1386 + len - The number of subproblems in the domain decomposition (or NULL if not requested) 1387 . namelist - The name for each subdomain (or NULL if not requested) 1388 . innerislist - The global indices for each inner subdomain (or NULL, if not requested) 1389 . outerislist - The global indices for each outer subdomain (or NULL, if not requested) 1390 - dmlist - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1391 1392 Level: intermediate 1393 1394 Notes: 1395 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1396 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1397 and all of the arrays should be freed with PetscFree(). 1398 1399 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition() 1400 @*/ 1401 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) 1402 { 1403 PetscErrorCode ierr; 1404 DMSubDomainHookLink link; 1405 PetscInt i,l; 1406 1407 PetscFunctionBegin; 1408 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1409 if (len) {PetscValidPointer(len,2); *len = 0;} 1410 if (namelist) {PetscValidPointer(namelist,3); *namelist = NULL;} 1411 if (innerislist) {PetscValidPointer(innerislist,4); *innerislist = NULL;} 1412 if (outerislist) {PetscValidPointer(outerislist,5); *outerislist = NULL;} 1413 if (dmlist) {PetscValidPointer(dmlist,6); *dmlist = NULL;} 1414 /* 1415 Is it a good idea to apply the following check across all impls? 1416 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1417 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1418 */ 1419 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1420 if (dm->ops->createdomaindecomposition) { 1421 ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr); 1422 /* copy subdomain hooks and context over to the subdomain DMs */ 1423 if (dmlist) { 1424 for (i = 0; i < l; i++) { 1425 for (link=dm->subdomainhook; link; link=link->next) { 1426 if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);} 1427 } 1428 (*dmlist)[i]->ctx = dm->ctx; 1429 } 1430 } 1431 if (len) *len = l; 1432 } 1433 PetscFunctionReturn(0); 1434 } 1435 1436 1437 #undef __FUNCT__ 1438 #define __FUNCT__ "DMCreateDomainDecompositionScatters" 1439 /*@C 1440 DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector 1441 1442 Not collective 1443 1444 Input Parameters: 1445 + dm - the DM object 1446 . n - the number of subdomain scatters 1447 - subdms - the local subdomains 1448 1449 Output Parameters: 1450 + n - the number of scatters returned 1451 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain 1452 . oscat - scatter from global vector to overlapping global vector entries on subdomain 1453 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts) 1454 1455 Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution 1456 of general nonlinear problems with overlapping subdomain methods. While merely having index sets that enable subsets 1457 of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of 1458 solution and residual data. 1459 1460 Level: developer 1461 1462 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1463 @*/ 1464 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat) 1465 { 1466 PetscErrorCode ierr; 1467 1468 PetscFunctionBegin; 1469 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1470 PetscValidPointer(subdms,3); 1471 if (dm->ops->createddscatters) { 1472 ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr); 1473 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined"); 1474 PetscFunctionReturn(0); 1475 } 1476 1477 #undef __FUNCT__ 1478 #define __FUNCT__ "DMRefine" 1479 /*@ 1480 DMRefine - Refines a DM object 1481 1482 Collective on DM 1483 1484 Input Parameter: 1485 + dm - the DM object 1486 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1487 1488 Output Parameter: 1489 . dmf - the refined DM, or NULL 1490 1491 Note: If no refinement was done, the return value is NULL 1492 1493 Level: developer 1494 1495 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1496 @*/ 1497 PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf) 1498 { 1499 PetscErrorCode ierr; 1500 DMRefineHookLink link; 1501 1502 PetscFunctionBegin; 1503 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1504 ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr); 1505 if (*dmf) { 1506 (*dmf)->ops->creatematrix = dm->ops->creatematrix; 1507 1508 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr); 1509 1510 (*dmf)->ctx = dm->ctx; 1511 (*dmf)->leveldown = dm->leveldown; 1512 (*dmf)->levelup = dm->levelup + 1; 1513 1514 ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr); 1515 for (link=dm->refinehook; link; link=link->next) { 1516 if (link->refinehook) { 1517 ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr); 1518 } 1519 } 1520 } 1521 PetscFunctionReturn(0); 1522 } 1523 1524 #undef __FUNCT__ 1525 #define __FUNCT__ "DMRefineHookAdd" 1526 /*@C 1527 DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid 1528 1529 Logically Collective 1530 1531 Input Arguments: 1532 + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level 1533 . refinehook - function to run when setting up a coarser level 1534 . interphook - function to run to update data on finer levels (once per SNESSolve()) 1535 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1536 1537 Calling sequence of refinehook: 1538 $ refinehook(DM coarse,DM fine,void *ctx); 1539 1540 + coarse - coarse level DM 1541 . fine - fine level DM to interpolate problem to 1542 - ctx - optional user-defined function context 1543 1544 Calling sequence for interphook: 1545 $ interphook(DM coarse,Mat interp,DM fine,void *ctx) 1546 1547 + coarse - coarse level DM 1548 . interp - matrix interpolating a coarse-level solution to the finer grid 1549 . fine - fine level DM to update 1550 - ctx - optional user-defined function context 1551 1552 Level: advanced 1553 1554 Notes: 1555 This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing 1556 1557 If this function is called multiple times, the hooks will be run in the order they are added. 1558 1559 This function is currently not available from Fortran. 1560 1561 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1562 @*/ 1563 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx) 1564 { 1565 PetscErrorCode ierr; 1566 DMRefineHookLink link,*p; 1567 1568 PetscFunctionBegin; 1569 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1570 for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1571 ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr); 1572 link->refinehook = refinehook; 1573 link->interphook = interphook; 1574 link->ctx = ctx; 1575 link->next = NULL; 1576 *p = link; 1577 PetscFunctionReturn(0); 1578 } 1579 1580 #undef __FUNCT__ 1581 #define __FUNCT__ "DMInterpolate" 1582 /*@ 1583 DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd() 1584 1585 Collective if any hooks are 1586 1587 Input Arguments: 1588 + coarse - coarser DM to use as a base 1589 . restrct - interpolation matrix, apply using MatInterpolate() 1590 - fine - finer DM to update 1591 1592 Level: developer 1593 1594 .seealso: DMRefineHookAdd(), MatInterpolate() 1595 @*/ 1596 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine) 1597 { 1598 PetscErrorCode ierr; 1599 DMRefineHookLink link; 1600 1601 PetscFunctionBegin; 1602 for (link=fine->refinehook; link; link=link->next) { 1603 if (link->interphook) { 1604 ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr); 1605 } 1606 } 1607 PetscFunctionReturn(0); 1608 } 1609 1610 #undef __FUNCT__ 1611 #define __FUNCT__ "DMGetRefineLevel" 1612 /*@ 1613 DMGetRefineLevel - Get's the number of refinements that have generated this DM. 1614 1615 Not Collective 1616 1617 Input Parameter: 1618 . dm - the DM object 1619 1620 Output Parameter: 1621 . level - number of refinements 1622 1623 Level: developer 1624 1625 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1626 1627 @*/ 1628 PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level) 1629 { 1630 PetscFunctionBegin; 1631 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1632 *level = dm->levelup; 1633 PetscFunctionReturn(0); 1634 } 1635 1636 #undef __FUNCT__ 1637 #define __FUNCT__ "DMGlobalToLocalHookAdd" 1638 /*@C 1639 DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called 1640 1641 Logically Collective 1642 1643 Input Arguments: 1644 + dm - the DM 1645 . beginhook - function to run at the beginning of DMGlobalToLocalBegin() 1646 . endhook - function to run after DMGlobalToLocalEnd() has completed 1647 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1648 1649 Calling sequence for beginhook: 1650 $ beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1651 1652 + dm - global DM 1653 . g - global vector 1654 . mode - mode 1655 . l - local vector 1656 - ctx - optional user-defined function context 1657 1658 1659 Calling sequence for endhook: 1660 $ endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1661 1662 + global - global DM 1663 - ctx - optional user-defined function context 1664 1665 Level: advanced 1666 1667 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1668 @*/ 1669 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx) 1670 { 1671 PetscErrorCode ierr; 1672 DMGlobalToLocalHookLink link,*p; 1673 1674 PetscFunctionBegin; 1675 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1676 for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1677 ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr); 1678 link->beginhook = beginhook; 1679 link->endhook = endhook; 1680 link->ctx = ctx; 1681 link->next = NULL; 1682 *p = link; 1683 PetscFunctionReturn(0); 1684 } 1685 1686 #undef __FUNCT__ 1687 #define __FUNCT__ "DMGlobalToLocalBegin" 1688 /*@ 1689 DMGlobalToLocalBegin - Begins updating local vectors from global vector 1690 1691 Neighbor-wise Collective on DM 1692 1693 Input Parameters: 1694 + dm - the DM object 1695 . g - the global vector 1696 . mode - INSERT_VALUES or ADD_VALUES 1697 - l - the local vector 1698 1699 1700 Level: beginner 1701 1702 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1703 1704 @*/ 1705 PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1706 { 1707 PetscSF sf; 1708 PetscErrorCode ierr; 1709 DMGlobalToLocalHookLink link; 1710 1711 PetscFunctionBegin; 1712 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1713 for (link=dm->gtolhook; link; link=link->next) { 1714 if (link->beginhook) { 1715 ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr); 1716 } 1717 } 1718 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1719 if (sf) { 1720 PetscScalar *lArray, *gArray; 1721 1722 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1723 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1724 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1725 ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1726 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1727 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1728 } else { 1729 ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1730 } 1731 PetscFunctionReturn(0); 1732 } 1733 1734 #undef __FUNCT__ 1735 #define __FUNCT__ "DMGlobalToLocalEnd" 1736 /*@ 1737 DMGlobalToLocalEnd - Ends updating local vectors from global vector 1738 1739 Neighbor-wise Collective on DM 1740 1741 Input Parameters: 1742 + dm - the DM object 1743 . g - the global vector 1744 . mode - INSERT_VALUES or ADD_VALUES 1745 - l - the local vector 1746 1747 1748 Level: beginner 1749 1750 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1751 1752 @*/ 1753 PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1754 { 1755 PetscSF sf; 1756 PetscErrorCode ierr; 1757 PetscScalar *lArray, *gArray; 1758 DMGlobalToLocalHookLink link; 1759 1760 PetscFunctionBegin; 1761 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1762 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1763 if (sf) { 1764 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1765 1766 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1767 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1768 ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1769 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1770 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1771 } else { 1772 ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1773 } 1774 for (link=dm->gtolhook; link; link=link->next) { 1775 if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);} 1776 } 1777 PetscFunctionReturn(0); 1778 } 1779 1780 #undef __FUNCT__ 1781 #define __FUNCT__ "DMLocalToGlobalBegin" 1782 /*@ 1783 DMLocalToGlobalBegin - updates global vectors from local vectors 1784 1785 Neighbor-wise Collective on DM 1786 1787 Input Parameters: 1788 + dm - the DM object 1789 . l - the local vector 1790 . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that 1791 base point. 1792 - - the global vector 1793 1794 Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local 1795 array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work 1796 global array to the final global array with VecAXPY(). 1797 1798 Level: beginner 1799 1800 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin() 1801 1802 @*/ 1803 PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g) 1804 { 1805 PetscSF sf; 1806 PetscErrorCode ierr; 1807 1808 PetscFunctionBegin; 1809 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1810 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1811 if (sf) { 1812 MPI_Op op; 1813 PetscScalar *lArray, *gArray; 1814 1815 switch (mode) { 1816 case INSERT_VALUES: 1817 case INSERT_ALL_VALUES: 1818 op = MPIU_REPLACE; break; 1819 case ADD_VALUES: 1820 case ADD_ALL_VALUES: 1821 op = MPI_SUM; break; 1822 default: 1823 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1824 } 1825 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1826 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1827 ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1828 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1829 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1830 } else { 1831 ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1832 } 1833 PetscFunctionReturn(0); 1834 } 1835 1836 #undef __FUNCT__ 1837 #define __FUNCT__ "DMLocalToGlobalEnd" 1838 /*@ 1839 DMLocalToGlobalEnd - updates global vectors from local vectors 1840 1841 Neighbor-wise Collective on DM 1842 1843 Input Parameters: 1844 + dm - the DM object 1845 . l - the local vector 1846 . mode - INSERT_VALUES or ADD_VALUES 1847 - g - the global vector 1848 1849 1850 Level: beginner 1851 1852 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd() 1853 1854 @*/ 1855 PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g) 1856 { 1857 PetscSF sf; 1858 PetscErrorCode ierr; 1859 1860 PetscFunctionBegin; 1861 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1862 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1863 if (sf) { 1864 MPI_Op op; 1865 PetscScalar *lArray, *gArray; 1866 1867 switch (mode) { 1868 case INSERT_VALUES: 1869 case INSERT_ALL_VALUES: 1870 op = MPIU_REPLACE; break; 1871 case ADD_VALUES: 1872 case ADD_ALL_VALUES: 1873 op = MPI_SUM; break; 1874 default: 1875 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1876 } 1877 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1878 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1879 ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1880 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1881 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1882 } else { 1883 ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1884 } 1885 PetscFunctionReturn(0); 1886 } 1887 1888 #undef __FUNCT__ 1889 #define __FUNCT__ "DMLocalToLocalBegin" 1890 /*@ 1891 DMLocalToLocalBegin - Maps from a local vector (including ghost points 1892 that contain irrelevant values) to another local vector where the ghost 1893 points in the second are set correctly. Must be followed by DMLocalToLocalEnd(). 1894 1895 Neighbor-wise Collective on DM and Vec 1896 1897 Input Parameters: 1898 + dm - the DM object 1899 . g - the original local vector 1900 - mode - one of INSERT_VALUES or ADD_VALUES 1901 1902 Output Parameter: 1903 . l - the local vector with correct ghost values 1904 1905 Level: intermediate 1906 1907 Notes: 1908 The local vectors used here need not be the same as those 1909 obtained from DMCreateLocalVector(), BUT they 1910 must have the same parallel data layout; they could, for example, be 1911 obtained with VecDuplicate() from the DM originating vectors. 1912 1913 .keywords: DM, local-to-local, begin 1914 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalEnd(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1915 1916 @*/ 1917 PetscErrorCode DMLocalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1918 { 1919 PetscErrorCode ierr; 1920 1921 PetscFunctionBegin; 1922 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1923 ierr = (*dm->ops->localtolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1924 PetscFunctionReturn(0); 1925 } 1926 1927 #undef __FUNCT__ 1928 #define __FUNCT__ "DMLocalToLocalEnd" 1929 /*@ 1930 DMLocalToLocalEnd - Maps from a local vector (including ghost points 1931 that contain irrelevant values) to another local vector where the ghost 1932 points in the second are set correctly. Must be preceded by DMLocalToLocalBegin(). 1933 1934 Neighbor-wise Collective on DM and Vec 1935 1936 Input Parameters: 1937 + da - the DM object 1938 . g - the original local vector 1939 - mode - one of INSERT_VALUES or ADD_VALUES 1940 1941 Output Parameter: 1942 . l - the local vector with correct ghost values 1943 1944 Level: intermediate 1945 1946 Notes: 1947 The local vectors used here need not be the same as those 1948 obtained from DMCreateLocalVector(), BUT they 1949 must have the same parallel data layout; they could, for example, be 1950 obtained with VecDuplicate() from the DM originating vectors. 1951 1952 .keywords: DM, local-to-local, end 1953 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1954 1955 @*/ 1956 PetscErrorCode DMLocalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1957 { 1958 PetscErrorCode ierr; 1959 1960 PetscFunctionBegin; 1961 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1962 ierr = (*dm->ops->localtolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1963 PetscFunctionReturn(0); 1964 } 1965 1966 1967 #undef __FUNCT__ 1968 #define __FUNCT__ "DMCoarsen" 1969 /*@ 1970 DMCoarsen - Coarsens a DM object 1971 1972 Collective on DM 1973 1974 Input Parameter: 1975 + dm - the DM object 1976 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1977 1978 Output Parameter: 1979 . dmc - the coarsened DM 1980 1981 Level: developer 1982 1983 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1984 1985 @*/ 1986 PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) 1987 { 1988 PetscErrorCode ierr; 1989 DMCoarsenHookLink link; 1990 1991 PetscFunctionBegin; 1992 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1993 ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr); 1994 (*dmc)->ops->creatematrix = dm->ops->creatematrix; 1995 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr); 1996 (*dmc)->ctx = dm->ctx; 1997 (*dmc)->levelup = dm->levelup; 1998 (*dmc)->leveldown = dm->leveldown + 1; 1999 ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr); 2000 for (link=dm->coarsenhook; link; link=link->next) { 2001 if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);} 2002 } 2003 PetscFunctionReturn(0); 2004 } 2005 2006 #undef __FUNCT__ 2007 #define __FUNCT__ "DMCoarsenHookAdd" 2008 /*@C 2009 DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 2010 2011 Logically Collective 2012 2013 Input Arguments: 2014 + fine - nonlinear solver context on which to run a hook when restricting to a coarser level 2015 . coarsenhook - function to run when setting up a coarser level 2016 . restricthook - function to run to update data on coarser levels (once per SNESSolve()) 2017 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 2018 2019 Calling sequence of coarsenhook: 2020 $ coarsenhook(DM fine,DM coarse,void *ctx); 2021 2022 + fine - fine level DM 2023 . coarse - coarse level DM to restrict problem to 2024 - ctx - optional user-defined function context 2025 2026 Calling sequence for restricthook: 2027 $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx) 2028 2029 + fine - fine level DM 2030 . mrestrict - matrix restricting a fine-level solution to the coarse grid 2031 . rscale - scaling vector for restriction 2032 . inject - matrix restricting by injection 2033 . coarse - coarse level DM to update 2034 - ctx - optional user-defined function context 2035 2036 Level: advanced 2037 2038 Notes: 2039 This function is only needed if auxiliary data needs to be set up on coarse grids. 2040 2041 If this function is called multiple times, the hooks will be run in the order they are added. 2042 2043 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 2044 extract the finest level information from its context (instead of from the SNES). 2045 2046 This function is currently not available from Fortran. 2047 2048 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 2049 @*/ 2050 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx) 2051 { 2052 PetscErrorCode ierr; 2053 DMCoarsenHookLink link,*p; 2054 2055 PetscFunctionBegin; 2056 PetscValidHeaderSpecific(fine,DM_CLASSID,1); 2057 for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 2058 ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr); 2059 link->coarsenhook = coarsenhook; 2060 link->restricthook = restricthook; 2061 link->ctx = ctx; 2062 link->next = NULL; 2063 *p = link; 2064 PetscFunctionReturn(0); 2065 } 2066 2067 #undef __FUNCT__ 2068 #define __FUNCT__ "DMRestrict" 2069 /*@ 2070 DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd() 2071 2072 Collective if any hooks are 2073 2074 Input Arguments: 2075 + fine - finer DM to use as a base 2076 . restrct - restriction matrix, apply using MatRestrict() 2077 . inject - injection matrix, also use MatRestrict() 2078 - coarse - coarer DM to update 2079 2080 Level: developer 2081 2082 .seealso: DMCoarsenHookAdd(), MatRestrict() 2083 @*/ 2084 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse) 2085 { 2086 PetscErrorCode ierr; 2087 DMCoarsenHookLink link; 2088 2089 PetscFunctionBegin; 2090 for (link=fine->coarsenhook; link; link=link->next) { 2091 if (link->restricthook) { 2092 ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr); 2093 } 2094 } 2095 PetscFunctionReturn(0); 2096 } 2097 2098 #undef __FUNCT__ 2099 #define __FUNCT__ "DMSubDomainHookAdd" 2100 /*@C 2101 DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid 2102 2103 Logically Collective 2104 2105 Input Arguments: 2106 + global - global DM 2107 . ddhook - function to run to pass data to the decomposition DM upon its creation 2108 . restricthook - function to run to update data on block solve (at the beginning of the block solve) 2109 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 2110 2111 2112 Calling sequence for ddhook: 2113 $ ddhook(DM global,DM block,void *ctx) 2114 2115 + global - global DM 2116 . block - block DM 2117 - ctx - optional user-defined function context 2118 2119 Calling sequence for restricthook: 2120 $ restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx) 2121 2122 + global - global DM 2123 . out - scatter to the outer (with ghost and overlap points) block vector 2124 . in - scatter to block vector values only owned locally 2125 . block - block DM 2126 - ctx - optional user-defined function context 2127 2128 Level: advanced 2129 2130 Notes: 2131 This function is only needed if auxiliary data needs to be set up on subdomain DMs. 2132 2133 If this function is called multiple times, the hooks will be run in the order they are added. 2134 2135 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 2136 extract the global information from its context (instead of from the SNES). 2137 2138 This function is currently not available from Fortran. 2139 2140 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 2141 @*/ 2142 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx) 2143 { 2144 PetscErrorCode ierr; 2145 DMSubDomainHookLink link,*p; 2146 2147 PetscFunctionBegin; 2148 PetscValidHeaderSpecific(global,DM_CLASSID,1); 2149 for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 2150 ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr); 2151 link->restricthook = restricthook; 2152 link->ddhook = ddhook; 2153 link->ctx = ctx; 2154 link->next = NULL; 2155 *p = link; 2156 PetscFunctionReturn(0); 2157 } 2158 2159 #undef __FUNCT__ 2160 #define __FUNCT__ "DMSubDomainRestrict" 2161 /*@ 2162 DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd() 2163 2164 Collective if any hooks are 2165 2166 Input Arguments: 2167 + fine - finer DM to use as a base 2168 . oscatter - scatter from domain global vector filling subdomain global vector with overlap 2169 . gscatter - scatter from domain global vector filling subdomain local vector with ghosts 2170 - coarse - coarer DM to update 2171 2172 Level: developer 2173 2174 .seealso: DMCoarsenHookAdd(), MatRestrict() 2175 @*/ 2176 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm) 2177 { 2178 PetscErrorCode ierr; 2179 DMSubDomainHookLink link; 2180 2181 PetscFunctionBegin; 2182 for (link=global->subdomainhook; link; link=link->next) { 2183 if (link->restricthook) { 2184 ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr); 2185 } 2186 } 2187 PetscFunctionReturn(0); 2188 } 2189 2190 #undef __FUNCT__ 2191 #define __FUNCT__ "DMGetCoarsenLevel" 2192 /*@ 2193 DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM. 2194 2195 Not Collective 2196 2197 Input Parameter: 2198 . dm - the DM object 2199 2200 Output Parameter: 2201 . level - number of coarsenings 2202 2203 Level: developer 2204 2205 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2206 2207 @*/ 2208 PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level) 2209 { 2210 PetscFunctionBegin; 2211 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2212 *level = dm->leveldown; 2213 PetscFunctionReturn(0); 2214 } 2215 2216 2217 2218 #undef __FUNCT__ 2219 #define __FUNCT__ "DMRefineHierarchy" 2220 /*@C 2221 DMRefineHierarchy - Refines a DM object, all levels at once 2222 2223 Collective on DM 2224 2225 Input Parameter: 2226 + dm - the DM object 2227 - nlevels - the number of levels of refinement 2228 2229 Output Parameter: 2230 . dmf - the refined DM hierarchy 2231 2232 Level: developer 2233 2234 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2235 2236 @*/ 2237 PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[]) 2238 { 2239 PetscErrorCode ierr; 2240 2241 PetscFunctionBegin; 2242 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2243 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2244 if (nlevels == 0) PetscFunctionReturn(0); 2245 if (dm->ops->refinehierarchy) { 2246 ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr); 2247 } else if (dm->ops->refine) { 2248 PetscInt i; 2249 2250 ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr); 2251 for (i=1; i<nlevels; i++) { 2252 ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr); 2253 } 2254 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet"); 2255 PetscFunctionReturn(0); 2256 } 2257 2258 #undef __FUNCT__ 2259 #define __FUNCT__ "DMCoarsenHierarchy" 2260 /*@C 2261 DMCoarsenHierarchy - Coarsens a DM object, all levels at once 2262 2263 Collective on DM 2264 2265 Input Parameter: 2266 + dm - the DM object 2267 - nlevels - the number of levels of coarsening 2268 2269 Output Parameter: 2270 . dmc - the coarsened DM hierarchy 2271 2272 Level: developer 2273 2274 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2275 2276 @*/ 2277 PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[]) 2278 { 2279 PetscErrorCode ierr; 2280 2281 PetscFunctionBegin; 2282 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2283 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2284 if (nlevels == 0) PetscFunctionReturn(0); 2285 PetscValidPointer(dmc,3); 2286 if (dm->ops->coarsenhierarchy) { 2287 ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr); 2288 } else if (dm->ops->coarsen) { 2289 PetscInt i; 2290 2291 ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr); 2292 for (i=1; i<nlevels; i++) { 2293 ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr); 2294 } 2295 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet"); 2296 PetscFunctionReturn(0); 2297 } 2298 2299 #undef __FUNCT__ 2300 #define __FUNCT__ "DMCreateAggregates" 2301 /*@ 2302 DMCreateAggregates - Gets the aggregates that map between 2303 grids associated with two DMs. 2304 2305 Collective on DM 2306 2307 Input Parameters: 2308 + dmc - the coarse grid DM 2309 - dmf - the fine grid DM 2310 2311 Output Parameters: 2312 . rest - the restriction matrix (transpose of the projection matrix) 2313 2314 Level: intermediate 2315 2316 .keywords: interpolation, restriction, multigrid 2317 2318 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation() 2319 @*/ 2320 PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest) 2321 { 2322 PetscErrorCode ierr; 2323 2324 PetscFunctionBegin; 2325 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 2326 PetscValidHeaderSpecific(dmf,DM_CLASSID,2); 2327 ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr); 2328 PetscFunctionReturn(0); 2329 } 2330 2331 #undef __FUNCT__ 2332 #define __FUNCT__ "DMSetApplicationContextDestroy" 2333 /*@C 2334 DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed 2335 2336 Not Collective 2337 2338 Input Parameters: 2339 + dm - the DM object 2340 - destroy - the destroy function 2341 2342 Level: intermediate 2343 2344 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2345 2346 @*/ 2347 PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**)) 2348 { 2349 PetscFunctionBegin; 2350 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2351 dm->ctxdestroy = destroy; 2352 PetscFunctionReturn(0); 2353 } 2354 2355 #undef __FUNCT__ 2356 #define __FUNCT__ "DMSetApplicationContext" 2357 /*@ 2358 DMSetApplicationContext - Set a user context into a DM object 2359 2360 Not Collective 2361 2362 Input Parameters: 2363 + dm - the DM object 2364 - ctx - the user context 2365 2366 Level: intermediate 2367 2368 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2369 2370 @*/ 2371 PetscErrorCode DMSetApplicationContext(DM dm,void *ctx) 2372 { 2373 PetscFunctionBegin; 2374 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2375 dm->ctx = ctx; 2376 PetscFunctionReturn(0); 2377 } 2378 2379 #undef __FUNCT__ 2380 #define __FUNCT__ "DMGetApplicationContext" 2381 /*@ 2382 DMGetApplicationContext - Gets a user context from a DM object 2383 2384 Not Collective 2385 2386 Input Parameter: 2387 . dm - the DM object 2388 2389 Output Parameter: 2390 . ctx - the user context 2391 2392 Level: intermediate 2393 2394 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2395 2396 @*/ 2397 PetscErrorCode DMGetApplicationContext(DM dm,void *ctx) 2398 { 2399 PetscFunctionBegin; 2400 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2401 *(void**)ctx = dm->ctx; 2402 PetscFunctionReturn(0); 2403 } 2404 2405 #undef __FUNCT__ 2406 #define __FUNCT__ "DMSetVariableBounds" 2407 /*@C 2408 DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI. 2409 2410 Logically Collective on DM 2411 2412 Input Parameter: 2413 + dm - the DM object 2414 - f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set) 2415 2416 Level: intermediate 2417 2418 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), 2419 DMSetJacobian() 2420 2421 @*/ 2422 PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2423 { 2424 PetscFunctionBegin; 2425 dm->ops->computevariablebounds = f; 2426 PetscFunctionReturn(0); 2427 } 2428 2429 #undef __FUNCT__ 2430 #define __FUNCT__ "DMHasVariableBounds" 2431 /*@ 2432 DMHasVariableBounds - does the DM object have a variable bounds function? 2433 2434 Not Collective 2435 2436 Input Parameter: 2437 . dm - the DM object to destroy 2438 2439 Output Parameter: 2440 . flg - PETSC_TRUE if the variable bounds function exists 2441 2442 Level: developer 2443 2444 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2445 2446 @*/ 2447 PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg) 2448 { 2449 PetscFunctionBegin; 2450 *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE; 2451 PetscFunctionReturn(0); 2452 } 2453 2454 #undef __FUNCT__ 2455 #define __FUNCT__ "DMComputeVariableBounds" 2456 /*@C 2457 DMComputeVariableBounds - compute variable bounds used by SNESVI. 2458 2459 Logically Collective on DM 2460 2461 Input Parameters: 2462 + dm - the DM object to destroy 2463 - x - current solution at which the bounds are computed 2464 2465 Output parameters: 2466 + xl - lower bound 2467 - xu - upper bound 2468 2469 Level: intermediate 2470 2471 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2472 2473 @*/ 2474 PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu) 2475 { 2476 PetscErrorCode ierr; 2477 2478 PetscFunctionBegin; 2479 PetscValidHeaderSpecific(xl,VEC_CLASSID,2); 2480 PetscValidHeaderSpecific(xu,VEC_CLASSID,2); 2481 if (dm->ops->computevariablebounds) { 2482 ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr); 2483 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds."); 2484 PetscFunctionReturn(0); 2485 } 2486 2487 #undef __FUNCT__ 2488 #define __FUNCT__ "DMHasColoring" 2489 /*@ 2490 DMHasColoring - does the DM object have a method of providing a coloring? 2491 2492 Not Collective 2493 2494 Input Parameter: 2495 . dm - the DM object 2496 2497 Output Parameter: 2498 . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring(). 2499 2500 Level: developer 2501 2502 .seealso DMHasFunction(), DMCreateColoring() 2503 2504 @*/ 2505 PetscErrorCode DMHasColoring(DM dm,PetscBool *flg) 2506 { 2507 PetscFunctionBegin; 2508 *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE; 2509 PetscFunctionReturn(0); 2510 } 2511 2512 #undef __FUNCT__ 2513 #define __FUNCT__ "DMSetVec" 2514 /*@C 2515 DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear. 2516 2517 Collective on DM 2518 2519 Input Parameter: 2520 + dm - the DM object 2521 - x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems. 2522 2523 Level: developer 2524 2525 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2526 2527 @*/ 2528 PetscErrorCode DMSetVec(DM dm,Vec x) 2529 { 2530 PetscErrorCode ierr; 2531 2532 PetscFunctionBegin; 2533 if (x) { 2534 if (!dm->x) { 2535 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2536 } 2537 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2538 } else if (dm->x) { 2539 ierr = VecDestroy(&dm->x);CHKERRQ(ierr); 2540 } 2541 PetscFunctionReturn(0); 2542 } 2543 2544 PetscFunctionList DMList = NULL; 2545 PetscBool DMRegisterAllCalled = PETSC_FALSE; 2546 2547 #undef __FUNCT__ 2548 #define __FUNCT__ "DMSetType" 2549 /*@C 2550 DMSetType - Builds a DM, for a particular DM implementation. 2551 2552 Collective on DM 2553 2554 Input Parameters: 2555 + dm - The DM object 2556 - method - The name of the DM type 2557 2558 Options Database Key: 2559 . -dm_type <type> - Sets the DM type; use -help for a list of available types 2560 2561 Notes: 2562 See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D). 2563 2564 Level: intermediate 2565 2566 .keywords: DM, set, type 2567 .seealso: DMGetType(), DMCreate() 2568 @*/ 2569 PetscErrorCode DMSetType(DM dm, DMType method) 2570 { 2571 PetscErrorCode (*r)(DM); 2572 PetscBool match; 2573 PetscErrorCode ierr; 2574 2575 PetscFunctionBegin; 2576 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2577 ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr); 2578 if (match) PetscFunctionReturn(0); 2579 2580 if (!DMRegisterAllCalled) {ierr = DMRegisterAll();CHKERRQ(ierr);} 2581 ierr = PetscFunctionListFind(DMList,method,&r);CHKERRQ(ierr); 2582 if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method); 2583 2584 if (dm->ops->destroy) { 2585 ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr); 2586 dm->ops->destroy = NULL; 2587 } 2588 ierr = (*r)(dm);CHKERRQ(ierr); 2589 ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr); 2590 PetscFunctionReturn(0); 2591 } 2592 2593 #undef __FUNCT__ 2594 #define __FUNCT__ "DMGetType" 2595 /*@C 2596 DMGetType - Gets the DM type name (as a string) from the DM. 2597 2598 Not Collective 2599 2600 Input Parameter: 2601 . dm - The DM 2602 2603 Output Parameter: 2604 . type - The DM type name 2605 2606 Level: intermediate 2607 2608 .keywords: DM, get, type, name 2609 .seealso: DMSetType(), DMCreate() 2610 @*/ 2611 PetscErrorCode DMGetType(DM dm, DMType *type) 2612 { 2613 PetscErrorCode ierr; 2614 2615 PetscFunctionBegin; 2616 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2617 PetscValidCharPointer(type,2); 2618 if (!DMRegisterAllCalled) { 2619 ierr = DMRegisterAll();CHKERRQ(ierr); 2620 } 2621 *type = ((PetscObject)dm)->type_name; 2622 PetscFunctionReturn(0); 2623 } 2624 2625 #undef __FUNCT__ 2626 #define __FUNCT__ "DMConvert" 2627 /*@C 2628 DMConvert - Converts a DM to another DM, either of the same or different type. 2629 2630 Collective on DM 2631 2632 Input Parameters: 2633 + dm - the DM 2634 - newtype - new DM type (use "same" for the same type) 2635 2636 Output Parameter: 2637 . M - pointer to new DM 2638 2639 Notes: 2640 Cannot be used to convert a sequential DM to parallel or parallel to sequential, 2641 the MPI communicator of the generated DM is always the same as the communicator 2642 of the input DM. 2643 2644 Level: intermediate 2645 2646 .seealso: DMCreate() 2647 @*/ 2648 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M) 2649 { 2650 DM B; 2651 char convname[256]; 2652 PetscBool sametype, issame; 2653 PetscErrorCode ierr; 2654 2655 PetscFunctionBegin; 2656 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2657 PetscValidType(dm,1); 2658 PetscValidPointer(M,3); 2659 ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr); 2660 ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr); 2661 { 2662 PetscErrorCode (*conv)(DM, DMType, DM*) = NULL; 2663 2664 /* 2665 Order of precedence: 2666 1) See if a specialized converter is known to the current DM. 2667 2) See if a specialized converter is known to the desired DM class. 2668 3) See if a good general converter is registered for the desired class 2669 4) See if a good general converter is known for the current matrix. 2670 5) Use a really basic converter. 2671 */ 2672 2673 /* 1) See if a specialized converter is known to the current DM and the desired class */ 2674 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2675 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2676 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2677 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2678 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2679 ierr = PetscObjectQueryFunction((PetscObject)dm,convname,&conv);CHKERRQ(ierr); 2680 if (conv) goto foundconv; 2681 2682 /* 2) See if a specialized converter is known to the desired DM class. */ 2683 ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr); 2684 ierr = DMSetType(B, newtype);CHKERRQ(ierr); 2685 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2686 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2687 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2688 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2689 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2690 ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv);CHKERRQ(ierr); 2691 if (conv) { 2692 ierr = DMDestroy(&B);CHKERRQ(ierr); 2693 goto foundconv; 2694 } 2695 2696 #if 0 2697 /* 3) See if a good general converter is registered for the desired class */ 2698 conv = B->ops->convertfrom; 2699 ierr = DMDestroy(&B);CHKERRQ(ierr); 2700 if (conv) goto foundconv; 2701 2702 /* 4) See if a good general converter is known for the current matrix */ 2703 if (dm->ops->convert) { 2704 conv = dm->ops->convert; 2705 } 2706 if (conv) goto foundconv; 2707 #endif 2708 2709 /* 5) Use a really basic converter. */ 2710 SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype); 2711 2712 foundconv: 2713 ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2714 ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr); 2715 ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2716 } 2717 ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr); 2718 PetscFunctionReturn(0); 2719 } 2720 2721 /*--------------------------------------------------------------------------------------------------------------------*/ 2722 2723 #undef __FUNCT__ 2724 #define __FUNCT__ "DMRegister" 2725 /*@C 2726 DMRegister - Adds a new DM component implementation 2727 2728 Not Collective 2729 2730 Input Parameters: 2731 + name - The name of a new user-defined creation routine 2732 - create_func - The creation routine itself 2733 2734 Notes: 2735 DMRegister() may be called multiple times to add several user-defined DMs 2736 2737 2738 Sample usage: 2739 .vb 2740 DMRegister("my_da", MyDMCreate); 2741 .ve 2742 2743 Then, your DM type can be chosen with the procedural interface via 2744 .vb 2745 DMCreate(MPI_Comm, DM *); 2746 DMSetType(DM,"my_da"); 2747 .ve 2748 or at runtime via the option 2749 .vb 2750 -da_type my_da 2751 .ve 2752 2753 Level: advanced 2754 2755 .keywords: DM, register 2756 .seealso: DMRegisterAll(), DMRegisterDestroy() 2757 2758 @*/ 2759 PetscErrorCode DMRegister(const char sname[],PetscErrorCode (*function)(DM)) 2760 { 2761 PetscErrorCode ierr; 2762 2763 PetscFunctionBegin; 2764 ierr = PetscFunctionListAdd(&DMList,sname,function);CHKERRQ(ierr); 2765 PetscFunctionReturn(0); 2766 } 2767 2768 #undef __FUNCT__ 2769 #define __FUNCT__ "DMLoad" 2770 /*@C 2771 DMLoad - Loads a DM that has been stored in binary with DMView(). 2772 2773 Collective on PetscViewer 2774 2775 Input Parameters: 2776 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or 2777 some related function before a call to DMLoad(). 2778 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or 2779 HDF5 file viewer, obtained from PetscViewerHDF5Open() 2780 2781 Level: intermediate 2782 2783 Notes: 2784 The type is determined by the data in the file, any type set into the DM before this call is ignored. 2785 2786 Notes for advanced users: 2787 Most users should not need to know the details of the binary storage 2788 format, since DMLoad() and DMView() completely hide these details. 2789 But for anyone who's interested, the standard binary matrix storage 2790 format is 2791 .vb 2792 has not yet been determined 2793 .ve 2794 2795 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad() 2796 @*/ 2797 PetscErrorCode DMLoad(DM newdm, PetscViewer viewer) 2798 { 2799 PetscErrorCode ierr; 2800 PetscBool isbinary; 2801 PetscInt classid; 2802 char type[256]; 2803 2804 PetscFunctionBegin; 2805 PetscValidHeaderSpecific(newdm,DM_CLASSID,1); 2806 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 2807 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 2808 if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 2809 2810 ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr); 2811 if (classid != DM_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file"); 2812 ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr); 2813 ierr = DMSetType(newdm, type);CHKERRQ(ierr); 2814 if (newdm->ops->load) { 2815 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 2816 } 2817 PetscFunctionReturn(0); 2818 } 2819 2820 /******************************** FEM Support **********************************/ 2821 2822 #undef __FUNCT__ 2823 #define __FUNCT__ "DMPrintCellVector" 2824 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) 2825 { 2826 PetscInt f; 2827 PetscErrorCode ierr; 2828 2829 PetscFunctionBegin; 2830 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2831 for (f = 0; f < len; ++f) { 2832 ierr = PetscPrintf(PETSC_COMM_SELF, " | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr); 2833 } 2834 PetscFunctionReturn(0); 2835 } 2836 2837 #undef __FUNCT__ 2838 #define __FUNCT__ "DMPrintCellMatrix" 2839 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) 2840 { 2841 PetscInt f, g; 2842 PetscErrorCode ierr; 2843 2844 PetscFunctionBegin; 2845 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2846 for (f = 0; f < rows; ++f) { 2847 ierr = PetscPrintf(PETSC_COMM_SELF, " |");CHKERRQ(ierr); 2848 for (g = 0; g < cols; ++g) { 2849 ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr); 2850 } 2851 ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr); 2852 } 2853 PetscFunctionReturn(0); 2854 } 2855 2856 #undef __FUNCT__ 2857 #define __FUNCT__ "DMGetDefaultSection" 2858 /*@ 2859 DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM. 2860 2861 Input Parameter: 2862 . dm - The DM 2863 2864 Output Parameter: 2865 . section - The PetscSection 2866 2867 Level: intermediate 2868 2869 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2870 2871 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2872 @*/ 2873 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) 2874 { 2875 PetscFunctionBegin; 2876 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2877 PetscValidPointer(section, 2); 2878 *section = dm->defaultSection; 2879 PetscFunctionReturn(0); 2880 } 2881 2882 #undef __FUNCT__ 2883 #define __FUNCT__ "DMSetDefaultSection" 2884 /*@ 2885 DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM. 2886 2887 Input Parameters: 2888 + dm - The DM 2889 - section - The PetscSection 2890 2891 Level: intermediate 2892 2893 Note: Any existing Section will be destroyed 2894 2895 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2896 @*/ 2897 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) 2898 { 2899 PetscInt numFields; 2900 PetscInt f; 2901 PetscErrorCode ierr; 2902 2903 PetscFunctionBegin; 2904 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2905 PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2906 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2907 ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr); 2908 dm->defaultSection = section; 2909 ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr); 2910 if (numFields) { 2911 ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr); 2912 for (f = 0; f < numFields; ++f) { 2913 const char *name; 2914 2915 ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr); 2916 ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr); 2917 } 2918 } 2919 /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */ 2920 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2921 PetscFunctionReturn(0); 2922 } 2923 2924 #undef __FUNCT__ 2925 #define __FUNCT__ "DMGetDefaultGlobalSection" 2926 /*@ 2927 DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM. 2928 2929 Collective on DM 2930 2931 Input Parameter: 2932 . dm - The DM 2933 2934 Output Parameter: 2935 . section - The PetscSection 2936 2937 Level: intermediate 2938 2939 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2940 2941 .seealso: DMSetDefaultSection(), DMGetDefaultSection() 2942 @*/ 2943 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) 2944 { 2945 PetscErrorCode ierr; 2946 2947 PetscFunctionBegin; 2948 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2949 PetscValidPointer(section, 2); 2950 if (!dm->defaultGlobalSection) { 2951 if (!dm->defaultSection || !dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection"); 2952 ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr); 2953 ierr = PetscLayoutDestroy(&dm->map);CHKERRQ(ierr); 2954 ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm),dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr); 2955 } 2956 *section = dm->defaultGlobalSection; 2957 PetscFunctionReturn(0); 2958 } 2959 2960 #undef __FUNCT__ 2961 #define __FUNCT__ "DMSetDefaultGlobalSection" 2962 /*@ 2963 DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM. 2964 2965 Input Parameters: 2966 + dm - The DM 2967 - section - The PetscSection, or NULL 2968 2969 Level: intermediate 2970 2971 Note: Any existing Section will be destroyed 2972 2973 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection() 2974 @*/ 2975 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) 2976 { 2977 PetscErrorCode ierr; 2978 2979 PetscFunctionBegin; 2980 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2981 if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2982 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2983 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2984 dm->defaultGlobalSection = section; 2985 PetscFunctionReturn(0); 2986 } 2987 2988 #undef __FUNCT__ 2989 #define __FUNCT__ "DMGetDefaultSF" 2990 /*@ 2991 DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set, 2992 it is created from the default PetscSection layouts in the DM. 2993 2994 Input Parameter: 2995 . dm - The DM 2996 2997 Output Parameter: 2998 . sf - The PetscSF 2999 3000 Level: intermediate 3001 3002 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3003 3004 .seealso: DMSetDefaultSF(), DMCreateDefaultSF() 3005 @*/ 3006 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) 3007 { 3008 PetscInt nroots; 3009 PetscErrorCode ierr; 3010 3011 PetscFunctionBegin; 3012 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3013 PetscValidPointer(sf, 2); 3014 ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr); 3015 if (nroots < 0) { 3016 PetscSection section, gSection; 3017 3018 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 3019 if (section) { 3020 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 3021 ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr); 3022 } else { 3023 *sf = NULL; 3024 PetscFunctionReturn(0); 3025 } 3026 } 3027 *sf = dm->defaultSF; 3028 PetscFunctionReturn(0); 3029 } 3030 3031 #undef __FUNCT__ 3032 #define __FUNCT__ "DMSetDefaultSF" 3033 /*@ 3034 DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM 3035 3036 Input Parameters: 3037 + dm - The DM 3038 - sf - The PetscSF 3039 3040 Level: intermediate 3041 3042 Note: Any previous SF is destroyed 3043 3044 .seealso: DMGetDefaultSF(), DMCreateDefaultSF() 3045 @*/ 3046 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) 3047 { 3048 PetscErrorCode ierr; 3049 3050 PetscFunctionBegin; 3051 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3052 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2); 3053 ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr); 3054 dm->defaultSF = sf; 3055 PetscFunctionReturn(0); 3056 } 3057 3058 #undef __FUNCT__ 3059 #define __FUNCT__ "DMCreateDefaultSF" 3060 /*@C 3061 DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections 3062 describing the data layout. 3063 3064 Input Parameters: 3065 + dm - The DM 3066 . localSection - PetscSection describing the local data layout 3067 - globalSection - PetscSection describing the global data layout 3068 3069 Level: intermediate 3070 3071 .seealso: DMGetDefaultSF(), DMSetDefaultSF() 3072 @*/ 3073 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection) 3074 { 3075 MPI_Comm comm; 3076 PetscLayout layout; 3077 const PetscInt *ranges; 3078 PetscInt *local; 3079 PetscSFNode *remote; 3080 PetscInt pStart, pEnd, p, nroots, nleaves = 0, l; 3081 PetscMPIInt size, rank; 3082 PetscErrorCode ierr; 3083 3084 PetscFunctionBegin; 3085 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 3086 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3087 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 3088 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 3089 ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr); 3090 ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr); 3091 ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr); 3092 ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr); 3093 ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr); 3094 ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr); 3095 ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr); 3096 for (p = pStart; p < pEnd; ++p) { 3097 PetscInt gdof, gcdof; 3098 3099 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3100 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3101 nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3102 } 3103 ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr); 3104 ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr); 3105 for (p = pStart, l = 0; p < pEnd; ++p) { 3106 const PetscInt *cind; 3107 PetscInt dof, cdof, off, gdof, gcdof, goff, gsize, d, c; 3108 3109 ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr); 3110 ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr); 3111 ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr); 3112 ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr); 3113 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3114 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3115 ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr); 3116 if (!gdof) continue; /* Censored point */ 3117 gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3118 if (gsize != dof-cdof) { 3119 if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof); 3120 cdof = 0; /* Ignore constraints */ 3121 } 3122 for (d = 0, c = 0; d < dof; ++d) { 3123 if ((c < cdof) && (cind[c] == d)) {++c; continue;} 3124 local[l+d-c] = off+d; 3125 } 3126 if (gdof < 0) { 3127 for (d = 0; d < gsize; ++d, ++l) { 3128 PetscInt offset = -(goff+1) + d, r; 3129 3130 ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr); 3131 if (r < 0) r = -(r+2); 3132 remote[l].rank = r; 3133 remote[l].index = offset - ranges[r]; 3134 } 3135 } else { 3136 for (d = 0; d < gsize; ++d, ++l) { 3137 remote[l].rank = rank; 3138 remote[l].index = goff+d - ranges[rank]; 3139 } 3140 } 3141 } 3142 if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves); 3143 ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr); 3144 ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 3145 PetscFunctionReturn(0); 3146 } 3147 3148 #undef __FUNCT__ 3149 #define __FUNCT__ "DMGetPointSF" 3150 /*@ 3151 DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM. 3152 3153 Input Parameter: 3154 . dm - The DM 3155 3156 Output Parameter: 3157 . sf - The PetscSF 3158 3159 Level: intermediate 3160 3161 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3162 3163 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3164 @*/ 3165 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) 3166 { 3167 PetscFunctionBegin; 3168 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3169 PetscValidPointer(sf, 2); 3170 *sf = dm->sf; 3171 PetscFunctionReturn(0); 3172 } 3173 3174 #undef __FUNCT__ 3175 #define __FUNCT__ "DMSetPointSF" 3176 /*@ 3177 DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM. 3178 3179 Input Parameters: 3180 + dm - The DM 3181 - sf - The PetscSF 3182 3183 Level: intermediate 3184 3185 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3186 @*/ 3187 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) 3188 { 3189 PetscErrorCode ierr; 3190 3191 PetscFunctionBegin; 3192 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3193 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1); 3194 ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr); 3195 ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr); 3196 dm->sf = sf; 3197 PetscFunctionReturn(0); 3198 } 3199 3200 #undef __FUNCT__ 3201 #define __FUNCT__ "DMGetNumFields" 3202 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields) 3203 { 3204 PetscFunctionBegin; 3205 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3206 PetscValidPointer(numFields, 2); 3207 *numFields = dm->numFields; 3208 PetscFunctionReturn(0); 3209 } 3210 3211 #undef __FUNCT__ 3212 #define __FUNCT__ "DMSetNumFields" 3213 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields) 3214 { 3215 PetscInt f; 3216 PetscErrorCode ierr; 3217 3218 PetscFunctionBegin; 3219 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3220 for (f = 0; f < dm->numFields; ++f) { 3221 ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr); 3222 } 3223 ierr = PetscFree(dm->fields);CHKERRQ(ierr); 3224 dm->numFields = numFields; 3225 ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr); 3226 for (f = 0; f < dm->numFields; ++f) { 3227 ierr = PetscContainerCreate(PetscObjectComm((PetscObject)dm), (PetscContainer*) &dm->fields[f]);CHKERRQ(ierr); 3228 } 3229 PetscFunctionReturn(0); 3230 } 3231 3232 #undef __FUNCT__ 3233 #define __FUNCT__ "DMGetField" 3234 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field) 3235 { 3236 PetscFunctionBegin; 3237 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3238 PetscValidPointer(field, 2); 3239 if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3240 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3241 *field = dm->fields[f]; 3242 PetscFunctionReturn(0); 3243 } 3244 3245 #undef __FUNCT__ 3246 #define __FUNCT__ "DMRestrictHook_Coordinates" 3247 PetscErrorCode DMRestrictHook_Coordinates(DM dm,DM dmc,void *ctx) 3248 { 3249 DM dm_coord,dmc_coord; 3250 PetscErrorCode ierr; 3251 Vec coords,ccoords; 3252 VecScatter scat; 3253 PetscFunctionBegin; 3254 ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr); 3255 ierr = DMGetCoordinateDM(dmc,&dmc_coord);CHKERRQ(ierr); 3256 ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr); 3257 ierr = DMGetCoordinates(dmc,&ccoords);CHKERRQ(ierr); 3258 if (coords && !ccoords) { 3259 ierr = DMCreateGlobalVector(dmc_coord,&ccoords);CHKERRQ(ierr); 3260 ierr = DMCreateInjection(dmc_coord,dm_coord,&scat);CHKERRQ(ierr); 3261 ierr = VecScatterBegin(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3262 ierr = VecScatterEnd(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3263 ierr = DMSetCoordinates(dmc,ccoords);CHKERRQ(ierr); 3264 ierr = VecScatterDestroy(&scat);CHKERRQ(ierr); 3265 ierr = VecDestroy(&ccoords);CHKERRQ(ierr); 3266 } 3267 PetscFunctionReturn(0); 3268 } 3269 3270 #undef __FUNCT__ 3271 #define __FUNCT__ "DMSubDomainHook_Coordinates" 3272 static PetscErrorCode DMSubDomainHook_Coordinates(DM dm,DM subdm,void *ctx) 3273 { 3274 DM dm_coord,subdm_coord; 3275 PetscErrorCode ierr; 3276 Vec coords,ccoords,clcoords; 3277 VecScatter *scat_i,*scat_g; 3278 PetscFunctionBegin; 3279 ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr); 3280 ierr = DMGetCoordinateDM(subdm,&subdm_coord);CHKERRQ(ierr); 3281 ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr); 3282 ierr = DMGetCoordinates(subdm,&ccoords);CHKERRQ(ierr); 3283 if (coords && !ccoords) { 3284 ierr = DMCreateGlobalVector(subdm_coord,&ccoords);CHKERRQ(ierr); 3285 ierr = DMCreateLocalVector(subdm_coord,&clcoords);CHKERRQ(ierr); 3286 ierr = DMCreateDomainDecompositionScatters(dm_coord,1,&subdm_coord,NULL,&scat_i,&scat_g);CHKERRQ(ierr); 3287 ierr = VecScatterBegin(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3288 ierr = VecScatterBegin(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3289 ierr = VecScatterEnd(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3290 ierr = VecScatterEnd(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3291 ierr = DMSetCoordinates(subdm,ccoords);CHKERRQ(ierr); 3292 ierr = DMSetCoordinatesLocal(subdm,clcoords);CHKERRQ(ierr); 3293 ierr = VecScatterDestroy(&scat_i[0]);CHKERRQ(ierr); 3294 ierr = VecScatterDestroy(&scat_g[0]);CHKERRQ(ierr); 3295 ierr = VecDestroy(&ccoords);CHKERRQ(ierr); 3296 ierr = VecDestroy(&clcoords);CHKERRQ(ierr); 3297 ierr = PetscFree(scat_i);CHKERRQ(ierr); 3298 ierr = PetscFree(scat_g);CHKERRQ(ierr); 3299 } 3300 PetscFunctionReturn(0); 3301 } 3302 3303 #undef __FUNCT__ 3304 #define __FUNCT__ "DMSetCoordinates" 3305 /*@ 3306 DMSetCoordinates - Sets into the DM a global vector that holds the coordinates 3307 3308 Collective on DM 3309 3310 Input Parameters: 3311 + dm - the DM 3312 - c - coordinate vector 3313 3314 Note: 3315 The coordinates do include those for ghost points, which are in the local vector 3316 3317 Level: intermediate 3318 3319 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3320 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM() 3321 @*/ 3322 PetscErrorCode DMSetCoordinates(DM dm, Vec c) 3323 { 3324 PetscErrorCode ierr; 3325 3326 PetscFunctionBegin; 3327 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3328 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3329 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3330 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3331 dm->coordinates = c; 3332 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3333 ierr = DMCoarsenHookAdd(dm,DMRestrictHook_Coordinates,NULL,NULL);CHKERRQ(ierr); 3334 ierr = DMSubDomainHookAdd(dm,DMSubDomainHook_Coordinates,NULL,NULL);CHKERRQ(ierr); 3335 PetscFunctionReturn(0); 3336 } 3337 3338 #undef __FUNCT__ 3339 #define __FUNCT__ "DMSetCoordinatesLocal" 3340 /*@ 3341 DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates 3342 3343 Collective on DM 3344 3345 Input Parameters: 3346 + dm - the DM 3347 - c - coordinate vector 3348 3349 Note: 3350 The coordinates of ghost points can be set using DMSetCoordinates() 3351 followed by DMGetCoordinatesLocal(). This is intended to enable the 3352 setting of ghost coordinates outside of the domain. 3353 3354 Level: intermediate 3355 3356 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3357 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM() 3358 @*/ 3359 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c) 3360 { 3361 PetscErrorCode ierr; 3362 3363 PetscFunctionBegin; 3364 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3365 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3366 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3367 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3368 3369 dm->coordinatesLocal = c; 3370 3371 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3372 PetscFunctionReturn(0); 3373 } 3374 3375 #undef __FUNCT__ 3376 #define __FUNCT__ "DMGetCoordinates" 3377 /*@ 3378 DMGetCoordinates - Gets a global vector with the coordinates associated with the DM. 3379 3380 Not Collective 3381 3382 Input Parameter: 3383 . dm - the DM 3384 3385 Output Parameter: 3386 . c - global coordinate vector 3387 3388 Note: 3389 This is a borrowed reference, so the user should NOT destroy this vector 3390 3391 Each process has only the local coordinates (does NOT have the ghost coordinates). 3392 3393 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3394 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3395 3396 Level: intermediate 3397 3398 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3399 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM() 3400 @*/ 3401 PetscErrorCode DMGetCoordinates(DM dm, Vec *c) 3402 { 3403 PetscErrorCode ierr; 3404 3405 PetscFunctionBegin; 3406 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3407 PetscValidPointer(c,2); 3408 if (!dm->coordinates && dm->coordinatesLocal) { 3409 DM cdm = NULL; 3410 3411 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3412 ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr); 3413 ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr); 3414 ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3415 ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3416 } 3417 *c = dm->coordinates; 3418 PetscFunctionReturn(0); 3419 } 3420 3421 #undef __FUNCT__ 3422 #define __FUNCT__ "DMGetCoordinatesLocal" 3423 /*@ 3424 DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM. 3425 3426 Collective on DM 3427 3428 Input Parameter: 3429 . dm - the DM 3430 3431 Output Parameter: 3432 . c - coordinate vector 3433 3434 Note: 3435 This is a borrowed reference, so the user should NOT destroy this vector 3436 3437 Each process has the local and ghost coordinates 3438 3439 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3440 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3441 3442 Level: intermediate 3443 3444 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3445 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM() 3446 @*/ 3447 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c) 3448 { 3449 PetscErrorCode ierr; 3450 3451 PetscFunctionBegin; 3452 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3453 PetscValidPointer(c,2); 3454 if (!dm->coordinatesLocal && dm->coordinates) { 3455 DM cdm = NULL; 3456 3457 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3458 ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr); 3459 ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr); 3460 ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3461 ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3462 } 3463 *c = dm->coordinatesLocal; 3464 PetscFunctionReturn(0); 3465 } 3466 3467 #undef __FUNCT__ 3468 #define __FUNCT__ "DMGetCoordinateDM" 3469 /*@ 3470 DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates 3471 3472 Collective on DM 3473 3474 Input Parameter: 3475 . dm - the DM 3476 3477 Output Parameter: 3478 . cdm - coordinate DM 3479 3480 Level: intermediate 3481 3482 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3483 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3484 @*/ 3485 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm) 3486 { 3487 PetscErrorCode ierr; 3488 3489 PetscFunctionBegin; 3490 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3491 PetscValidPointer(cdm,2); 3492 if (!dm->coordinateDM) { 3493 if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM"); 3494 ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr); 3495 } 3496 *cdm = dm->coordinateDM; 3497 PetscFunctionReturn(0); 3498 } 3499 3500 #undef __FUNCT__ 3501 #define __FUNCT__ "DMLocatePoints" 3502 /*@ 3503 DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells 3504 3505 Not collective 3506 3507 Input Parameters: 3508 + dm - The DM 3509 - v - The Vec of points 3510 3511 Output Parameter: 3512 . cells - The local cell numbers for cells which contain the points 3513 3514 Level: developer 3515 3516 .keywords: point location, mesh 3517 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3518 @*/ 3519 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells) 3520 { 3521 PetscErrorCode ierr; 3522 3523 PetscFunctionBegin; 3524 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3525 PetscValidHeaderSpecific(v,VEC_CLASSID,2); 3526 PetscValidPointer(cells,3); 3527 if (dm->ops->locatepoints) { 3528 ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr); 3529 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM"); 3530 PetscFunctionReturn(0); 3531 } 3532