1 #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ 2 #include <petscsf.h> 3 4 PetscClassId DM_CLASSID; 5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal; 6 7 #undef __FUNCT__ 8 #define __FUNCT__ "DMViewFromOptions" 9 /* 10 DMViewFromOptions - Processes command line options to determine if/how a DM is to be viewed. 11 12 Collective on Vec 13 14 Input Parameters: 15 + dm - the DM 16 . prefix - prefix to use for viewing, or NULL to use prefix of 'dm' 17 - optionname - option to activate viewing 18 19 Level: intermediate 20 21 .keywords: DM, view, options, database 22 .seealso: VecViewFromOptions(), MatViewFromOptions() 23 */ 24 PetscErrorCode DMViewFromOptions(DM dm,const char prefix[],const char optionname[]) 25 { 26 PetscErrorCode ierr; 27 PetscBool flg; 28 PetscViewer viewer; 29 PetscViewerFormat format; 30 31 PetscFunctionBegin; 32 if (prefix) { 33 ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr); 34 } else { 35 ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr); 36 } 37 if (flg) { 38 ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr); 39 ierr = DMView(dm,viewer);CHKERRQ(ierr); 40 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 41 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 42 } 43 PetscFunctionReturn(0); 44 } 45 46 47 #undef __FUNCT__ 48 #define __FUNCT__ "DMCreate" 49 /*@ 50 DMCreate - Creates an empty DM object. The type can then be set with DMSetType(). 51 52 If you never call DMSetType() it will generate an 53 error when you try to use the vector. 54 55 Collective on MPI_Comm 56 57 Input Parameter: 58 . comm - The communicator for the DM object 59 60 Output Parameter: 61 . dm - The DM object 62 63 Level: beginner 64 65 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE 66 @*/ 67 PetscErrorCode DMCreate(MPI_Comm comm,DM *dm) 68 { 69 DM v; 70 PetscErrorCode ierr; 71 72 PetscFunctionBegin; 73 PetscValidPointer(dm,2); 74 *dm = NULL; 75 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES) 76 ierr = VecInitializePackage();CHKERRQ(ierr); 77 ierr = MatInitializePackage();CHKERRQ(ierr); 78 ierr = DMInitializePackage();CHKERRQ(ierr); 79 #endif 80 81 ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr); 82 ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr); 83 84 85 v->ltogmap = NULL; 86 v->ltogmapb = NULL; 87 v->bs = 1; 88 v->coloringtype = IS_COLORING_GLOBAL; 89 ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr); 90 ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr); 91 v->defaultSection = NULL; 92 v->defaultGlobalSection = NULL; 93 { 94 PetscInt i; 95 for (i = 0; i < 10; ++i) { 96 v->nullspaceConstructors[i] = NULL; 97 } 98 } 99 v->numFields = 0; 100 v->fields = NULL; 101 102 *dm = v; 103 PetscFunctionReturn(0); 104 } 105 106 #undef __FUNCT__ 107 #define __FUNCT__ "DMSetVecType" 108 /*@C 109 DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 110 111 Logically Collective on DMDA 112 113 Input Parameter: 114 + da - initial distributed array 115 . ctype - the vector type, currently either VECSTANDARD or VECCUSP 116 117 Options Database: 118 . -dm_vec_type ctype 119 120 Level: intermediate 121 122 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType 123 @*/ 124 PetscErrorCode DMSetVecType(DM da,VecType ctype) 125 { 126 PetscErrorCode ierr; 127 128 PetscFunctionBegin; 129 PetscValidHeaderSpecific(da,DM_CLASSID,1); 130 ierr = PetscFree(da->vectype);CHKERRQ(ierr); 131 ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr); 132 PetscFunctionReturn(0); 133 } 134 135 #undef __FUNCT__ 136 #define __FUNCT__ "VecGetDM" 137 /*@ 138 VecGetDM - Gets the DM defining the data layout of the vector 139 140 Not collective 141 142 Input Parameter: 143 . v - The Vec 144 145 Output Parameter: 146 . dm - The DM 147 148 Level: intermediate 149 150 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 151 @*/ 152 PetscErrorCode VecGetDM(Vec v, DM *dm) 153 { 154 PetscErrorCode ierr; 155 156 PetscFunctionBegin; 157 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 158 PetscValidPointer(dm,2); 159 ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 160 PetscFunctionReturn(0); 161 } 162 163 #undef __FUNCT__ 164 #define __FUNCT__ "VecSetDM" 165 /*@ 166 VecSetDM - Sets the DM defining the data layout of the vector 167 168 Not collective 169 170 Input Parameters: 171 + v - The Vec 172 - dm - The DM 173 174 Level: intermediate 175 176 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 177 @*/ 178 PetscErrorCode VecSetDM(Vec v, DM dm) 179 { 180 PetscErrorCode ierr; 181 182 PetscFunctionBegin; 183 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 184 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 185 ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 186 PetscFunctionReturn(0); 187 } 188 189 #undef __FUNCT__ 190 #define __FUNCT__ "DMSetMatType" 191 /*@C 192 DMSetMatType - Sets the type of matrix created with DMCreateMatrix() 193 194 Logically Collective on DM 195 196 Input Parameter: 197 + dm - the DM context 198 . ctype - the matrix type 199 200 Options Database: 201 . -dm_mat_type ctype 202 203 Level: intermediate 204 205 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType 206 @*/ 207 PetscErrorCode DMSetMatType(DM dm,MatType ctype) 208 { 209 PetscErrorCode ierr; 210 211 PetscFunctionBegin; 212 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 213 ierr = PetscFree(dm->mattype);CHKERRQ(ierr); 214 ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr); 215 PetscFunctionReturn(0); 216 } 217 218 #undef __FUNCT__ 219 #define __FUNCT__ "MatGetDM" 220 /*@ 221 MatGetDM - Gets the DM defining the data layout of the matrix 222 223 Not collective 224 225 Input Parameter: 226 . A - The Mat 227 228 Output Parameter: 229 . dm - The DM 230 231 Level: intermediate 232 233 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType() 234 @*/ 235 PetscErrorCode MatGetDM(Mat A, DM *dm) 236 { 237 PetscErrorCode ierr; 238 239 PetscFunctionBegin; 240 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 241 PetscValidPointer(dm,2); 242 ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 243 PetscFunctionReturn(0); 244 } 245 246 #undef __FUNCT__ 247 #define __FUNCT__ "MatSetDM" 248 /*@ 249 MatSetDM - Sets the DM defining the data layout of the matrix 250 251 Not collective 252 253 Input Parameters: 254 + A - The Mat 255 - dm - The DM 256 257 Level: intermediate 258 259 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType() 260 @*/ 261 PetscErrorCode MatSetDM(Mat A, DM dm) 262 { 263 PetscErrorCode ierr; 264 265 PetscFunctionBegin; 266 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 267 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 268 ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 269 PetscFunctionReturn(0); 270 } 271 272 #undef __FUNCT__ 273 #define __FUNCT__ "DMSetOptionsPrefix" 274 /*@C 275 DMSetOptionsPrefix - Sets the prefix used for searching for all 276 DMDA options in the database. 277 278 Logically Collective on DMDA 279 280 Input Parameter: 281 + da - the DMDA context 282 - prefix - the prefix to prepend to all option names 283 284 Notes: 285 A hyphen (-) must NOT be given at the beginning of the prefix name. 286 The first character of all runtime options is AUTOMATICALLY the hyphen. 287 288 Level: advanced 289 290 .keywords: DMDA, set, options, prefix, database 291 292 .seealso: DMSetFromOptions() 293 @*/ 294 PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[]) 295 { 296 PetscErrorCode ierr; 297 298 PetscFunctionBegin; 299 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 300 ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr); 301 PetscFunctionReturn(0); 302 } 303 304 #undef __FUNCT__ 305 #define __FUNCT__ "DMDestroy" 306 /*@ 307 DMDestroy - Destroys a vector packer or DMDA. 308 309 Collective on DM 310 311 Input Parameter: 312 . dm - the DM object to destroy 313 314 Level: developer 315 316 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 317 318 @*/ 319 PetscErrorCode DMDestroy(DM *dm) 320 { 321 PetscInt i, cnt = 0, f; 322 DMNamedVecLink nlink,nnext; 323 PetscErrorCode ierr; 324 325 PetscFunctionBegin; 326 if (!*dm) PetscFunctionReturn(0); 327 PetscValidHeaderSpecific((*dm),DM_CLASSID,1); 328 329 /* count all the circular references of DM and its contained Vecs */ 330 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 331 if ((*dm)->localin[i]) cnt++; 332 if ((*dm)->globalin[i]) cnt++; 333 } 334 for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++; 335 for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++; 336 if ((*dm)->x) { 337 DM obj; 338 ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr); 339 if (obj == *dm) cnt++; 340 } 341 342 if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);} 343 /* 344 Need this test because the dm references the vectors that 345 reference the dm, so destroying the dm calls destroy on the 346 vectors that cause another destroy on the dm 347 */ 348 if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0); 349 ((PetscObject) (*dm))->refct = 0; 350 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 351 if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()"); 352 ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr); 353 } 354 nnext=(*dm)->namedglobal; 355 (*dm)->namedglobal = NULL; 356 for (nlink=nnext; nlink; nlink=nnext) { /* Destroy the named vectors */ 357 nnext = nlink->next; 358 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 359 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 360 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 361 ierr = PetscFree(nlink);CHKERRQ(ierr); 362 } 363 nnext=(*dm)->namedlocal; 364 (*dm)->namedlocal = NULL; 365 for (nlink=nnext; nlink; nlink=nnext) { /* Destroy the named local vectors */ 366 nnext = nlink->next; 367 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 368 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 369 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 370 ierr = PetscFree(nlink);CHKERRQ(ierr); 371 } 372 373 /* Destroy the list of hooks */ 374 { 375 DMCoarsenHookLink link,next; 376 for (link=(*dm)->coarsenhook; link; link=next) { 377 next = link->next; 378 ierr = PetscFree(link);CHKERRQ(ierr); 379 } 380 (*dm)->coarsenhook = NULL; 381 } 382 { 383 DMRefineHookLink link,next; 384 for (link=(*dm)->refinehook; link; link=next) { 385 next = link->next; 386 ierr = PetscFree(link);CHKERRQ(ierr); 387 } 388 (*dm)->refinehook = NULL; 389 } 390 { 391 DMSubDomainHookLink link,next; 392 for (link=(*dm)->subdomainhook; link; link=next) { 393 next = link->next; 394 ierr = PetscFree(link);CHKERRQ(ierr); 395 } 396 (*dm)->subdomainhook = NULL; 397 } 398 { 399 DMGlobalToLocalHookLink link,next; 400 for (link=(*dm)->gtolhook; link; link=next) { 401 next = link->next; 402 ierr = PetscFree(link);CHKERRQ(ierr); 403 } 404 (*dm)->gtolhook = NULL; 405 } 406 /* Destroy the work arrays */ 407 { 408 DMWorkLink link,next; 409 if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out"); 410 for (link=(*dm)->workin; link; link=next) { 411 next = link->next; 412 ierr = PetscFree(link->mem);CHKERRQ(ierr); 413 ierr = PetscFree(link);CHKERRQ(ierr); 414 } 415 (*dm)->workin = NULL; 416 } 417 418 ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr); 419 ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr); 420 ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr); 421 422 if ((*dm)->ctx && (*dm)->ctxdestroy) { 423 ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr); 424 } 425 ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr); 426 ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr); 427 ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr); 428 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr); 429 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr); 430 ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr); 431 ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr); 432 433 ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr); 434 ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr); 435 ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr); 436 ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr); 437 ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr); 438 439 ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr); 440 ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr); 441 ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr); 442 443 for (f = 0; f < (*dm)->numFields; ++f) { 444 ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr); 445 } 446 ierr = PetscFree((*dm)->fields);CHKERRQ(ierr); 447 /* if memory was published with AMS then destroy it */ 448 ierr = PetscObjectAMSViewOff((PetscObject)*dm);CHKERRQ(ierr); 449 450 ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr); 451 /* We do not destroy (*dm)->data here so that we can reference count backend objects */ 452 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 453 PetscFunctionReturn(0); 454 } 455 456 #undef __FUNCT__ 457 #define __FUNCT__ "DMSetUp" 458 /*@ 459 DMSetUp - sets up the data structures inside a DM object 460 461 Collective on DM 462 463 Input Parameter: 464 . dm - the DM object to setup 465 466 Level: developer 467 468 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 469 470 @*/ 471 PetscErrorCode DMSetUp(DM dm) 472 { 473 PetscErrorCode ierr; 474 475 PetscFunctionBegin; 476 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 477 if (dm->setupcalled) PetscFunctionReturn(0); 478 if (dm->ops->setup) { 479 ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr); 480 } 481 dm->setupcalled = PETSC_TRUE; 482 PetscFunctionReturn(0); 483 } 484 485 #undef __FUNCT__ 486 #define __FUNCT__ "DMSetFromOptions" 487 /*@ 488 DMSetFromOptions - sets parameters in a DM from the options database 489 490 Collective on DM 491 492 Input Parameter: 493 . dm - the DM object to set options for 494 495 Options Database: 496 + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros 497 . -dm_vec_type <type> type of vector to create inside DM 498 . -dm_mat_type <type> type of matrix to create inside DM 499 - -dm_coloring_type <global or ghosted> 500 501 Level: developer 502 503 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 504 505 @*/ 506 PetscErrorCode DMSetFromOptions(DM dm) 507 { 508 char typeName[256] = MATAIJ; 509 PetscBool flg; 510 PetscErrorCode ierr; 511 512 PetscFunctionBegin; 513 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 514 ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr); 515 ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr); 516 ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr); 517 if (flg) { 518 ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr); 519 } 520 ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr); 521 if (flg) { 522 ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr); 523 } 524 ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr); 525 if (dm->ops->setfromoptions) { 526 ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr); 527 } 528 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 529 ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr); 530 ierr = PetscOptionsEnd();CHKERRQ(ierr); 531 ierr = DMViewFromOptions(dm,NULL,"-dm_view");CHKERRQ(ierr); 532 PetscFunctionReturn(0); 533 } 534 535 #undef __FUNCT__ 536 #define __FUNCT__ "DMView" 537 /*@C 538 DMView - Views a vector packer or DMDA. 539 540 Collective on DM 541 542 Input Parameter: 543 + dm - the DM object to view 544 - v - the viewer 545 546 Level: developer 547 548 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 549 550 @*/ 551 PetscErrorCode DMView(DM dm,PetscViewer v) 552 { 553 PetscErrorCode ierr; 554 PetscBool isbinary; 555 556 PetscFunctionBegin; 557 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 558 if (!v) { 559 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr); 560 } 561 ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 562 if (isbinary) { 563 PetscInt classid = DM_FILE_CLASSID; 564 char type[256]; 565 566 ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr); 567 ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr); 568 ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr); 569 } 570 if (dm->ops->view) { 571 ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr); 572 } 573 PetscFunctionReturn(0); 574 } 575 576 #undef __FUNCT__ 577 #define __FUNCT__ "DMCreateGlobalVector" 578 /*@ 579 DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object 580 581 Collective on DM 582 583 Input Parameter: 584 . dm - the DM object 585 586 Output Parameter: 587 . vec - the global vector 588 589 Level: beginner 590 591 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 592 593 @*/ 594 PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec) 595 { 596 PetscErrorCode ierr; 597 598 PetscFunctionBegin; 599 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 600 ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr); 601 PetscFunctionReturn(0); 602 } 603 604 #undef __FUNCT__ 605 #define __FUNCT__ "DMCreateLocalVector" 606 /*@ 607 DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object 608 609 Not Collective 610 611 Input Parameter: 612 . dm - the DM object 613 614 Output Parameter: 615 . vec - the local vector 616 617 Level: beginner 618 619 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 620 621 @*/ 622 PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec) 623 { 624 PetscErrorCode ierr; 625 626 PetscFunctionBegin; 627 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 628 ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr); 629 PetscFunctionReturn(0); 630 } 631 632 #undef __FUNCT__ 633 #define __FUNCT__ "DMGetLocalToGlobalMapping" 634 /*@ 635 DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM. 636 637 Collective on DM 638 639 Input Parameter: 640 . dm - the DM that provides the mapping 641 642 Output Parameter: 643 . ltog - the mapping 644 645 Level: intermediate 646 647 Notes: 648 This mapping can then be used by VecSetLocalToGlobalMapping() or 649 MatSetLocalToGlobalMapping(). 650 651 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock() 652 @*/ 653 PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog) 654 { 655 PetscErrorCode ierr; 656 657 PetscFunctionBegin; 658 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 659 PetscValidPointer(ltog,2); 660 if (!dm->ltogmap) { 661 PetscSection section, sectionGlobal; 662 663 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 664 if (section) { 665 PetscInt *ltog; 666 PetscInt pStart, pEnd, size, p, l; 667 668 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 669 ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr); 670 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 671 ierr = PetscMalloc(size * sizeof(PetscInt), <og);CHKERRQ(ierr); /* We want the local+overlap size */ 672 for (p = pStart, l = 0; p < pEnd; ++p) { 673 PetscInt dof, off, c; 674 675 /* Should probably use constrained dofs */ 676 ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr); 677 ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr); 678 for (c = 0; c < dof; ++c, ++l) { 679 ltog[l] = off+c; 680 } 681 } 682 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr); 683 ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr); 684 } else { 685 if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping"); 686 ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr); 687 } 688 } 689 *ltog = dm->ltogmap; 690 PetscFunctionReturn(0); 691 } 692 693 #undef __FUNCT__ 694 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock" 695 /*@ 696 DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM. 697 698 Collective on DM 699 700 Input Parameter: 701 . da - the distributed array that provides the mapping 702 703 Output Parameter: 704 . ltog - the block mapping 705 706 Level: intermediate 707 708 Notes: 709 This mapping can then be used by VecSetLocalToGlobalMappingBlock() or 710 MatSetLocalToGlobalMappingBlock(). 711 712 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize() 713 @*/ 714 PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog) 715 { 716 PetscErrorCode ierr; 717 718 PetscFunctionBegin; 719 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 720 PetscValidPointer(ltog,2); 721 if (!dm->ltogmapb) { 722 PetscInt bs; 723 ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr); 724 if (bs > 1) { 725 if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock"); 726 ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr); 727 } else { 728 ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr); 729 ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr); 730 } 731 } 732 *ltog = dm->ltogmapb; 733 PetscFunctionReturn(0); 734 } 735 736 #undef __FUNCT__ 737 #define __FUNCT__ "DMGetBlockSize" 738 /*@ 739 DMGetBlockSize - Gets the inherent block size associated with a DM 740 741 Not Collective 742 743 Input Parameter: 744 . dm - the DM with block structure 745 746 Output Parameter: 747 . bs - the block size, 1 implies no exploitable block structure 748 749 Level: intermediate 750 751 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock() 752 @*/ 753 PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs) 754 { 755 PetscFunctionBegin; 756 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 757 PetscValidPointer(bs,2); 758 if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet"); 759 *bs = dm->bs; 760 PetscFunctionReturn(0); 761 } 762 763 #undef __FUNCT__ 764 #define __FUNCT__ "DMCreateInterpolation" 765 /*@ 766 DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects 767 768 Collective on DM 769 770 Input Parameter: 771 + dm1 - the DM object 772 - dm2 - the second, finer DM object 773 774 Output Parameter: 775 + mat - the interpolation 776 - vec - the scaling (optional) 777 778 Level: developer 779 780 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 781 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation. 782 783 For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors 784 EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic. 785 786 787 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen() 788 789 @*/ 790 PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 791 { 792 PetscErrorCode ierr; 793 794 PetscFunctionBegin; 795 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 796 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 797 ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr); 798 PetscFunctionReturn(0); 799 } 800 801 #undef __FUNCT__ 802 #define __FUNCT__ "DMCreateInjection" 803 /*@ 804 DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects 805 806 Collective on DM 807 808 Input Parameter: 809 + dm1 - the DM object 810 - dm2 - the second, finer DM object 811 812 Output Parameter: 813 . ctx - the injection 814 815 Level: developer 816 817 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 818 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection. 819 820 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation() 821 822 @*/ 823 PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx) 824 { 825 PetscErrorCode ierr; 826 827 PetscFunctionBegin; 828 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 829 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 830 ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr); 831 PetscFunctionReturn(0); 832 } 833 834 #undef __FUNCT__ 835 #define __FUNCT__ "DMCreateColoring" 836 /*@C 837 DMCreateColoring - Gets coloring for a DMDA or DMComposite 838 839 Collective on DM 840 841 Input Parameter: 842 + dm - the DM object 843 . ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL 844 - matype - either MATAIJ or MATBAIJ 845 846 Output Parameter: 847 . coloring - the coloring 848 849 Level: developer 850 851 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix() 852 853 @*/ 854 PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring) 855 { 856 PetscErrorCode ierr; 857 858 PetscFunctionBegin; 859 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 860 if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet"); 861 ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr); 862 PetscFunctionReturn(0); 863 } 864 865 #undef __FUNCT__ 866 #define __FUNCT__ "DMCreateMatrix" 867 /*@C 868 DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite 869 870 Collective on DM 871 872 Input Parameter: 873 + dm - the DM object 874 - mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or 875 any type which inherits from one of these (such as MATAIJ) 876 877 Output Parameter: 878 . mat - the empty Jacobian 879 880 Level: beginner 881 882 Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 883 do not need to do it yourself. 884 885 By default it also sets the nonzero structure and puts in the zero entries. To prevent setting 886 the nonzero pattern call DMDASetMatPreallocateOnly() 887 888 For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used 889 internally by PETSc. 890 891 For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires 892 the indices for the global numbering for DMDAs which is complicated. 893 894 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 895 896 @*/ 897 PetscErrorCode DMCreateMatrix(DM dm,MatType mtype,Mat *mat) 898 { 899 PetscErrorCode ierr; 900 901 PetscFunctionBegin; 902 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 903 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES) 904 ierr = MatInitializePackage();CHKERRQ(ierr); 905 #endif 906 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 907 PetscValidPointer(mat,3); 908 if (dm->mattype) { 909 ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr); 910 } else { 911 ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr); 912 } 913 PetscFunctionReturn(0); 914 } 915 916 #undef __FUNCT__ 917 #define __FUNCT__ "DMSetMatrixPreallocateOnly" 918 /*@ 919 DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly 920 preallocated but the nonzero structure and zero values will not be set. 921 922 Logically Collective on DMDA 923 924 Input Parameter: 925 + dm - the DM 926 - only - PETSC_TRUE if only want preallocation 927 928 Level: developer 929 .seealso DMCreateMatrix() 930 @*/ 931 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only) 932 { 933 PetscFunctionBegin; 934 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 935 dm->prealloc_only = only; 936 PetscFunctionReturn(0); 937 } 938 939 #undef __FUNCT__ 940 #define __FUNCT__ "DMGetWorkArray" 941 /*@C 942 DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 943 944 Not Collective 945 946 Input Parameters: 947 + dm - the DM object 948 . count - The minium size 949 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 950 951 Output Parameter: 952 . array - the work array 953 954 Level: developer 955 956 .seealso DMDestroy(), DMCreate() 957 @*/ 958 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 959 { 960 PetscErrorCode ierr; 961 DMWorkLink link; 962 size_t size; 963 964 PetscFunctionBegin; 965 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 966 PetscValidPointer(mem,4); 967 if (dm->workin) { 968 link = dm->workin; 969 dm->workin = dm->workin->next; 970 } else { 971 ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr); 972 } 973 ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr); 974 if (size*count > link->bytes) { 975 ierr = PetscFree(link->mem);CHKERRQ(ierr); 976 ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr); 977 link->bytes = size*count; 978 } 979 link->next = dm->workout; 980 dm->workout = link; 981 *(void**)mem = link->mem; 982 PetscFunctionReturn(0); 983 } 984 985 #undef __FUNCT__ 986 #define __FUNCT__ "DMRestoreWorkArray" 987 /*@C 988 DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 989 990 Not Collective 991 992 Input Parameters: 993 + dm - the DM object 994 . count - The minium size 995 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 996 997 Output Parameter: 998 . array - the work array 999 1000 Level: developer 1001 1002 .seealso DMDestroy(), DMCreate() 1003 @*/ 1004 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1005 { 1006 DMWorkLink *p,link; 1007 1008 PetscFunctionBegin; 1009 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1010 PetscValidPointer(mem,4); 1011 for (p=&dm->workout; (link=*p); p=&link->next) { 1012 if (link->mem == *(void**)mem) { 1013 *p = link->next; 1014 link->next = dm->workin; 1015 dm->workin = link; 1016 *(void**)mem = NULL; 1017 PetscFunctionReturn(0); 1018 } 1019 } 1020 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out"); 1021 PetscFunctionReturn(0); 1022 } 1023 1024 #undef __FUNCT__ 1025 #define __FUNCT__ "DMSetNullSpaceConstructor" 1026 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace)) 1027 { 1028 PetscFunctionBegin; 1029 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1030 if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field); 1031 dm->nullspaceConstructors[field] = nullsp; 1032 PetscFunctionReturn(0); 1033 } 1034 1035 #undef __FUNCT__ 1036 #define __FUNCT__ "DMCreateFieldIS" 1037 /*@C 1038 DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field 1039 1040 Not collective 1041 1042 Input Parameter: 1043 . dm - the DM object 1044 1045 Output Parameters: 1046 + numFields - The number of fields (or NULL if not requested) 1047 . fieldNames - The name for each field (or NULL if not requested) 1048 - fields - The global indices for each field (or NULL if not requested) 1049 1050 Level: intermediate 1051 1052 Notes: 1053 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1054 PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with 1055 PetscFree(). 1056 1057 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 1058 @*/ 1059 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields) 1060 { 1061 PetscSection section, sectionGlobal; 1062 PetscErrorCode ierr; 1063 1064 PetscFunctionBegin; 1065 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1066 if (numFields) { 1067 PetscValidPointer(numFields,2); 1068 *numFields = 0; 1069 } 1070 if (fieldNames) { 1071 PetscValidPointer(fieldNames,3); 1072 *fieldNames = NULL; 1073 } 1074 if (fields) { 1075 PetscValidPointer(fields,4); 1076 *fields = NULL; 1077 } 1078 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1079 if (section) { 1080 PetscInt *fieldSizes, **fieldIndices; 1081 PetscInt nF, f, pStart, pEnd, p; 1082 1083 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 1084 ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr); 1085 ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt*,&fieldIndices);CHKERRQ(ierr); 1086 ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr); 1087 for (f = 0; f < nF; ++f) { 1088 fieldSizes[f] = 0; 1089 } 1090 for (p = pStart; p < pEnd; ++p) { 1091 PetscInt gdof; 1092 1093 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1094 if (gdof > 0) { 1095 for (f = 0; f < nF; ++f) { 1096 PetscInt fdof, fcdof; 1097 1098 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1099 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1100 fieldSizes[f] += fdof-fcdof; 1101 } 1102 } 1103 } 1104 for (f = 0; f < nF; ++f) { 1105 ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr); 1106 fieldSizes[f] = 0; 1107 } 1108 for (p = pStart; p < pEnd; ++p) { 1109 PetscInt gdof, goff; 1110 1111 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1112 if (gdof > 0) { 1113 ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr); 1114 for (f = 0; f < nF; ++f) { 1115 PetscInt fdof, fcdof, fc; 1116 1117 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1118 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1119 for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) { 1120 fieldIndices[f][fieldSizes[f]] = goff++; 1121 } 1122 } 1123 } 1124 } 1125 if (numFields) *numFields = nF; 1126 if (fieldNames) { 1127 ierr = PetscMalloc(nF * sizeof(char*), fieldNames);CHKERRQ(ierr); 1128 for (f = 0; f < nF; ++f) { 1129 const char *fieldName; 1130 1131 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1132 ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr); 1133 } 1134 } 1135 if (fields) { 1136 ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr); 1137 for (f = 0; f < nF; ++f) { 1138 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr); 1139 } 1140 } 1141 ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr); 1142 } else if (dm->ops->createfieldis) { 1143 ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr); 1144 } 1145 PetscFunctionReturn(0); 1146 } 1147 1148 1149 #undef __FUNCT__ 1150 #define __FUNCT__ "DMCreateFieldDecomposition" 1151 /*@C 1152 DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems 1153 corresponding to different fields: each IS contains the global indices of the dofs of the 1154 corresponding field. The optional list of DMs define the DM for each subproblem. 1155 Generalizes DMCreateFieldIS(). 1156 1157 Not collective 1158 1159 Input Parameter: 1160 . dm - the DM object 1161 1162 Output Parameters: 1163 + len - The number of subproblems in the field decomposition (or NULL if not requested) 1164 . namelist - The name for each field (or NULL if not requested) 1165 . islist - The global indices for each field (or NULL if not requested) 1166 - dmlist - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1167 1168 Level: intermediate 1169 1170 Notes: 1171 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1172 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1173 and all of the arrays should be freed with PetscFree(). 1174 1175 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1176 @*/ 1177 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) 1178 { 1179 PetscErrorCode ierr; 1180 1181 PetscFunctionBegin; 1182 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1183 if (len) { 1184 PetscValidPointer(len,2); 1185 *len = 0; 1186 } 1187 if (namelist) { 1188 PetscValidPointer(namelist,3); 1189 *namelist = 0; 1190 } 1191 if (islist) { 1192 PetscValidPointer(islist,4); 1193 *islist = 0; 1194 } 1195 if (dmlist) { 1196 PetscValidPointer(dmlist,5); 1197 *dmlist = 0; 1198 } 1199 /* 1200 Is it a good idea to apply the following check across all impls? 1201 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1202 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1203 */ 1204 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1205 if (!dm->ops->createfielddecomposition) { 1206 PetscSection section; 1207 PetscInt numFields, f; 1208 1209 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1210 if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);} 1211 if (section && numFields && dm->ops->createsubdm) { 1212 *len = numFields; 1213 ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr); 1214 for (f = 0; f < numFields; ++f) { 1215 const char *fieldName; 1216 1217 ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr); 1218 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1219 ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr); 1220 } 1221 } else { 1222 ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr); 1223 /* By default there are no DMs associated with subproblems. */ 1224 if (dmlist) *dmlist = NULL; 1225 } 1226 } else { 1227 ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr); 1228 } 1229 PetscFunctionReturn(0); 1230 } 1231 1232 #undef __FUNCT__ 1233 #define __FUNCT__ "DMCreateSubDM" 1234 /*@C 1235 DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in. 1236 The fields are defined by DMCreateFieldIS(). 1237 1238 Not collective 1239 1240 Input Parameters: 1241 + dm - the DM object 1242 . numFields - number of fields in this subproblem 1243 - len - The number of subproblems in the decomposition (or NULL if not requested) 1244 1245 Output Parameters: 1246 . is - The global indices for the subproblem 1247 - dm - The DM for the subproblem 1248 1249 Level: intermediate 1250 1251 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1252 @*/ 1253 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm) 1254 { 1255 PetscErrorCode ierr; 1256 1257 PetscFunctionBegin; 1258 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1259 PetscValidPointer(fields,3); 1260 if (is) PetscValidPointer(is,4); 1261 if (subdm) PetscValidPointer(subdm,5); 1262 if (dm->ops->createsubdm) { 1263 ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr); 1264 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined"); 1265 PetscFunctionReturn(0); 1266 } 1267 1268 1269 #undef __FUNCT__ 1270 #define __FUNCT__ "DMCreateDomainDecomposition" 1271 /*@C 1272 DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems 1273 corresponding to restrictions to pairs nested subdomains: each IS contains the global 1274 indices of the dofs of the corresponding subdomains. The inner subdomains conceptually 1275 define a nonoverlapping covering, while outer subdomains can overlap. 1276 The optional list of DMs define the DM for each subproblem. 1277 1278 Not collective 1279 1280 Input Parameter: 1281 . dm - the DM object 1282 1283 Output Parameters: 1284 + len - The number of subproblems in the domain decomposition (or NULL if not requested) 1285 . namelist - The name for each subdomain (or NULL if not requested) 1286 . innerislist - The global indices for each inner subdomain (or NULL, if not requested) 1287 . outerislist - The global indices for each outer subdomain (or NULL, if not requested) 1288 - dmlist - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1289 1290 Level: intermediate 1291 1292 Notes: 1293 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1294 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1295 and all of the arrays should be freed with PetscFree(). 1296 1297 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition() 1298 @*/ 1299 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) 1300 { 1301 PetscErrorCode ierr; 1302 DMSubDomainHookLink link; 1303 PetscInt i,l; 1304 1305 PetscFunctionBegin; 1306 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1307 if (len) {PetscValidPointer(len,2); *len = 0;} 1308 if (namelist) {PetscValidPointer(namelist,3); *namelist = NULL;} 1309 if (innerislist) {PetscValidPointer(innerislist,4); *innerislist = NULL;} 1310 if (outerislist) {PetscValidPointer(outerislist,5); *outerislist = NULL;} 1311 if (dmlist) {PetscValidPointer(dmlist,6); *dmlist = NULL;} 1312 /* 1313 Is it a good idea to apply the following check across all impls? 1314 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1315 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1316 */ 1317 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1318 if (dm->ops->createdomaindecomposition) { 1319 ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr); 1320 /* copy subdomain hooks and context over to the subdomain DMs */ 1321 if (dmlist) { 1322 for (i = 0; i < l; i++) { 1323 for (link=dm->subdomainhook; link; link=link->next) { 1324 if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);} 1325 } 1326 (*dmlist)[i]->ctx = dm->ctx; 1327 } 1328 } 1329 if (len) *len = l; 1330 } 1331 PetscFunctionReturn(0); 1332 } 1333 1334 1335 #undef __FUNCT__ 1336 #define __FUNCT__ "DMCreateDomainDecompositionScatters" 1337 /*@C 1338 DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector 1339 1340 Not collective 1341 1342 Input Parameters: 1343 + dm - the DM object 1344 . n - the number of subdomain scatters 1345 - subdms - the local subdomains 1346 1347 Output Parameters: 1348 + n - the number of scatters returned 1349 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain 1350 . oscat - scatter from global vector to overlapping global vector entries on subdomain 1351 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts) 1352 1353 Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution 1354 of general nonlinear problems with overlapping subdomain methods. While merely having index sets that enable subsets 1355 of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of 1356 solution and residual data. 1357 1358 Level: developer 1359 1360 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1361 @*/ 1362 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat) 1363 { 1364 PetscErrorCode ierr; 1365 1366 PetscFunctionBegin; 1367 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1368 PetscValidPointer(subdms,3); 1369 if (dm->ops->createddscatters) { 1370 ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr); 1371 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined"); 1372 PetscFunctionReturn(0); 1373 } 1374 1375 #undef __FUNCT__ 1376 #define __FUNCT__ "DMRefine" 1377 /*@ 1378 DMRefine - Refines a DM object 1379 1380 Collective on DM 1381 1382 Input Parameter: 1383 + dm - the DM object 1384 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1385 1386 Output Parameter: 1387 . dmf - the refined DM, or NULL 1388 1389 Note: If no refinement was done, the return value is NULL 1390 1391 Level: developer 1392 1393 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1394 @*/ 1395 PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf) 1396 { 1397 PetscErrorCode ierr; 1398 DMRefineHookLink link; 1399 1400 PetscFunctionBegin; 1401 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1402 ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr); 1403 if (*dmf) { 1404 (*dmf)->ops->creatematrix = dm->ops->creatematrix; 1405 1406 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr); 1407 1408 (*dmf)->ctx = dm->ctx; 1409 (*dmf)->leveldown = dm->leveldown; 1410 (*dmf)->levelup = dm->levelup + 1; 1411 1412 ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr); 1413 for (link=dm->refinehook; link; link=link->next) { 1414 if (link->refinehook) { 1415 ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr); 1416 } 1417 } 1418 } 1419 PetscFunctionReturn(0); 1420 } 1421 1422 #undef __FUNCT__ 1423 #define __FUNCT__ "DMRefineHookAdd" 1424 /*@C 1425 DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid 1426 1427 Logically Collective 1428 1429 Input Arguments: 1430 + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level 1431 . refinehook - function to run when setting up a coarser level 1432 . interphook - function to run to update data on finer levels (once per SNESSolve()) 1433 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1434 1435 Calling sequence of refinehook: 1436 $ refinehook(DM coarse,DM fine,void *ctx); 1437 1438 + coarse - coarse level DM 1439 . fine - fine level DM to interpolate problem to 1440 - ctx - optional user-defined function context 1441 1442 Calling sequence for interphook: 1443 $ interphook(DM coarse,Mat interp,DM fine,void *ctx) 1444 1445 + coarse - coarse level DM 1446 . interp - matrix interpolating a coarse-level solution to the finer grid 1447 . fine - fine level DM to update 1448 - ctx - optional user-defined function context 1449 1450 Level: advanced 1451 1452 Notes: 1453 This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing 1454 1455 If this function is called multiple times, the hooks will be run in the order they are added. 1456 1457 This function is currently not available from Fortran. 1458 1459 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1460 @*/ 1461 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx) 1462 { 1463 PetscErrorCode ierr; 1464 DMRefineHookLink link,*p; 1465 1466 PetscFunctionBegin; 1467 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1468 for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1469 ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr); 1470 link->refinehook = refinehook; 1471 link->interphook = interphook; 1472 link->ctx = ctx; 1473 link->next = NULL; 1474 *p = link; 1475 PetscFunctionReturn(0); 1476 } 1477 1478 #undef __FUNCT__ 1479 #define __FUNCT__ "DMInterpolate" 1480 /*@ 1481 DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd() 1482 1483 Collective if any hooks are 1484 1485 Input Arguments: 1486 + coarse - coarser DM to use as a base 1487 . restrct - interpolation matrix, apply using MatInterpolate() 1488 - fine - finer DM to update 1489 1490 Level: developer 1491 1492 .seealso: DMRefineHookAdd(), MatInterpolate() 1493 @*/ 1494 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine) 1495 { 1496 PetscErrorCode ierr; 1497 DMRefineHookLink link; 1498 1499 PetscFunctionBegin; 1500 for (link=fine->refinehook; link; link=link->next) { 1501 if (link->interphook) { 1502 ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr); 1503 } 1504 } 1505 PetscFunctionReturn(0); 1506 } 1507 1508 #undef __FUNCT__ 1509 #define __FUNCT__ "DMGetRefineLevel" 1510 /*@ 1511 DMGetRefineLevel - Get's the number of refinements that have generated this DM. 1512 1513 Not Collective 1514 1515 Input Parameter: 1516 . dm - the DM object 1517 1518 Output Parameter: 1519 . level - number of refinements 1520 1521 Level: developer 1522 1523 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1524 1525 @*/ 1526 PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level) 1527 { 1528 PetscFunctionBegin; 1529 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1530 *level = dm->levelup; 1531 PetscFunctionReturn(0); 1532 } 1533 1534 #undef __FUNCT__ 1535 #define __FUNCT__ "DMGlobalToLocalHookAdd" 1536 /*@C 1537 DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called 1538 1539 Logically Collective 1540 1541 Input Arguments: 1542 + dm - the DM 1543 . beginhook - function to run at the beginning of DMGlobalToLocalBegin() 1544 . endhook - function to run after DMGlobalToLocalEnd() has completed 1545 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1546 1547 Calling sequence for beginhook: 1548 $ beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1549 1550 + dm - global DM 1551 . g - global vector 1552 . mode - mode 1553 . l - local vector 1554 - ctx - optional user-defined function context 1555 1556 1557 Calling sequence for endhook: 1558 $ endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1559 1560 + global - global DM 1561 - ctx - optional user-defined function context 1562 1563 Level: advanced 1564 1565 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1566 @*/ 1567 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx) 1568 { 1569 PetscErrorCode ierr; 1570 DMGlobalToLocalHookLink link,*p; 1571 1572 PetscFunctionBegin; 1573 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1574 for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1575 ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr); 1576 link->beginhook = beginhook; 1577 link->endhook = endhook; 1578 link->ctx = ctx; 1579 link->next = NULL; 1580 *p = link; 1581 PetscFunctionReturn(0); 1582 } 1583 1584 #undef __FUNCT__ 1585 #define __FUNCT__ "DMGlobalToLocalBegin" 1586 /*@ 1587 DMGlobalToLocalBegin - Begins updating local vectors from global vector 1588 1589 Neighbor-wise Collective on DM 1590 1591 Input Parameters: 1592 + dm - the DM object 1593 . g - the global vector 1594 . mode - INSERT_VALUES or ADD_VALUES 1595 - l - the local vector 1596 1597 1598 Level: beginner 1599 1600 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1601 1602 @*/ 1603 PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1604 { 1605 PetscSF sf; 1606 PetscErrorCode ierr; 1607 DMGlobalToLocalHookLink link; 1608 1609 PetscFunctionBegin; 1610 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1611 for (link=dm->gtolhook; link; link=link->next) { 1612 if (link->beginhook) { 1613 ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr); 1614 } 1615 } 1616 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1617 if (sf) { 1618 PetscScalar *lArray, *gArray; 1619 1620 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1621 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1622 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1623 ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1624 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1625 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1626 } else { 1627 ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1628 } 1629 PetscFunctionReturn(0); 1630 } 1631 1632 #undef __FUNCT__ 1633 #define __FUNCT__ "DMGlobalToLocalEnd" 1634 /*@ 1635 DMGlobalToLocalEnd - Ends updating local vectors from global vector 1636 1637 Neighbor-wise Collective on DM 1638 1639 Input Parameters: 1640 + dm - the DM object 1641 . g - the global vector 1642 . mode - INSERT_VALUES or ADD_VALUES 1643 - l - the local vector 1644 1645 1646 Level: beginner 1647 1648 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1649 1650 @*/ 1651 PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1652 { 1653 PetscSF sf; 1654 PetscErrorCode ierr; 1655 PetscScalar *lArray, *gArray; 1656 DMGlobalToLocalHookLink link; 1657 1658 PetscFunctionBegin; 1659 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1660 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1661 if (sf) { 1662 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1663 1664 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1665 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1666 ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1667 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1668 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1669 } else { 1670 ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1671 } 1672 for (link=dm->gtolhook; link; link=link->next) { 1673 if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);} 1674 } 1675 PetscFunctionReturn(0); 1676 } 1677 1678 #undef __FUNCT__ 1679 #define __FUNCT__ "DMLocalToGlobalBegin" 1680 /*@ 1681 DMLocalToGlobalBegin - updates global vectors from local vectors 1682 1683 Neighbor-wise Collective on DM 1684 1685 Input Parameters: 1686 + dm - the DM object 1687 . l - the local vector 1688 . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that 1689 base point. 1690 - - the global vector 1691 1692 Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local 1693 array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work 1694 global array to the final global array with VecAXPY(). 1695 1696 Level: beginner 1697 1698 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin() 1699 1700 @*/ 1701 PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g) 1702 { 1703 PetscSF sf; 1704 PetscErrorCode ierr; 1705 1706 PetscFunctionBegin; 1707 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1708 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1709 if (sf) { 1710 MPI_Op op; 1711 PetscScalar *lArray, *gArray; 1712 1713 switch (mode) { 1714 case INSERT_VALUES: 1715 case INSERT_ALL_VALUES: 1716 op = MPIU_REPLACE; break; 1717 case ADD_VALUES: 1718 case ADD_ALL_VALUES: 1719 op = MPI_SUM; break; 1720 default: 1721 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1722 } 1723 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1724 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1725 ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1726 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1727 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1728 } else { 1729 ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1730 } 1731 PetscFunctionReturn(0); 1732 } 1733 1734 #undef __FUNCT__ 1735 #define __FUNCT__ "DMLocalToGlobalEnd" 1736 /*@ 1737 DMLocalToGlobalEnd - updates global vectors from local vectors 1738 1739 Neighbor-wise Collective on DM 1740 1741 Input Parameters: 1742 + dm - the DM object 1743 . l - the local vector 1744 . mode - INSERT_VALUES or ADD_VALUES 1745 - g - the global vector 1746 1747 1748 Level: beginner 1749 1750 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd() 1751 1752 @*/ 1753 PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g) 1754 { 1755 PetscSF sf; 1756 PetscErrorCode ierr; 1757 1758 PetscFunctionBegin; 1759 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1760 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1761 if (sf) { 1762 MPI_Op op; 1763 PetscScalar *lArray, *gArray; 1764 1765 switch (mode) { 1766 case INSERT_VALUES: 1767 case INSERT_ALL_VALUES: 1768 op = MPIU_REPLACE; break; 1769 case ADD_VALUES: 1770 case ADD_ALL_VALUES: 1771 op = MPI_SUM; break; 1772 default: 1773 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1774 } 1775 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1776 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1777 ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1778 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1779 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1780 } else { 1781 ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1782 } 1783 PetscFunctionReturn(0); 1784 } 1785 1786 #undef __FUNCT__ 1787 #define __FUNCT__ "DMCoarsen" 1788 /*@ 1789 DMCoarsen - Coarsens a DM object 1790 1791 Collective on DM 1792 1793 Input Parameter: 1794 + dm - the DM object 1795 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1796 1797 Output Parameter: 1798 . dmc - the coarsened DM 1799 1800 Level: developer 1801 1802 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1803 1804 @*/ 1805 PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) 1806 { 1807 PetscErrorCode ierr; 1808 DMCoarsenHookLink link; 1809 1810 PetscFunctionBegin; 1811 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1812 ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr); 1813 (*dmc)->ops->creatematrix = dm->ops->creatematrix; 1814 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr); 1815 (*dmc)->ctx = dm->ctx; 1816 (*dmc)->levelup = dm->levelup; 1817 (*dmc)->leveldown = dm->leveldown + 1; 1818 ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr); 1819 for (link=dm->coarsenhook; link; link=link->next) { 1820 if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);} 1821 } 1822 PetscFunctionReturn(0); 1823 } 1824 1825 #undef __FUNCT__ 1826 #define __FUNCT__ "DMCoarsenHookAdd" 1827 /*@C 1828 DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1829 1830 Logically Collective 1831 1832 Input Arguments: 1833 + fine - nonlinear solver context on which to run a hook when restricting to a coarser level 1834 . coarsenhook - function to run when setting up a coarser level 1835 . restricthook - function to run to update data on coarser levels (once per SNESSolve()) 1836 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1837 1838 Calling sequence of coarsenhook: 1839 $ coarsenhook(DM fine,DM coarse,void *ctx); 1840 1841 + fine - fine level DM 1842 . coarse - coarse level DM to restrict problem to 1843 - ctx - optional user-defined function context 1844 1845 Calling sequence for restricthook: 1846 $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx) 1847 1848 + fine - fine level DM 1849 . mrestrict - matrix restricting a fine-level solution to the coarse grid 1850 . rscale - scaling vector for restriction 1851 . inject - matrix restricting by injection 1852 . coarse - coarse level DM to update 1853 - ctx - optional user-defined function context 1854 1855 Level: advanced 1856 1857 Notes: 1858 This function is only needed if auxiliary data needs to be set up on coarse grids. 1859 1860 If this function is called multiple times, the hooks will be run in the order they are added. 1861 1862 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1863 extract the finest level information from its context (instead of from the SNES). 1864 1865 This function is currently not available from Fortran. 1866 1867 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1868 @*/ 1869 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx) 1870 { 1871 PetscErrorCode ierr; 1872 DMCoarsenHookLink link,*p; 1873 1874 PetscFunctionBegin; 1875 PetscValidHeaderSpecific(fine,DM_CLASSID,1); 1876 for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1877 ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr); 1878 link->coarsenhook = coarsenhook; 1879 link->restricthook = restricthook; 1880 link->ctx = ctx; 1881 link->next = NULL; 1882 *p = link; 1883 PetscFunctionReturn(0); 1884 } 1885 1886 #undef __FUNCT__ 1887 #define __FUNCT__ "DMRestrict" 1888 /*@ 1889 DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd() 1890 1891 Collective if any hooks are 1892 1893 Input Arguments: 1894 + fine - finer DM to use as a base 1895 . restrct - restriction matrix, apply using MatRestrict() 1896 . inject - injection matrix, also use MatRestrict() 1897 - coarse - coarer DM to update 1898 1899 Level: developer 1900 1901 .seealso: DMCoarsenHookAdd(), MatRestrict() 1902 @*/ 1903 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse) 1904 { 1905 PetscErrorCode ierr; 1906 DMCoarsenHookLink link; 1907 1908 PetscFunctionBegin; 1909 for (link=fine->coarsenhook; link; link=link->next) { 1910 if (link->restricthook) { 1911 ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr); 1912 } 1913 } 1914 PetscFunctionReturn(0); 1915 } 1916 1917 #undef __FUNCT__ 1918 #define __FUNCT__ "DMSubDomainHookAdd" 1919 /*@C 1920 DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid 1921 1922 Logically Collective 1923 1924 Input Arguments: 1925 + global - global DM 1926 . ddhook - function to run to pass data to the decomposition DM upon its creation 1927 . restricthook - function to run to update data on block solve (at the beginning of the block solve) 1928 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1929 1930 1931 Calling sequence for ddhook: 1932 $ ddhook(DM global,DM block,void *ctx) 1933 1934 + global - global DM 1935 . block - block DM 1936 - ctx - optional user-defined function context 1937 1938 Calling sequence for restricthook: 1939 $ restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx) 1940 1941 + global - global DM 1942 . out - scatter to the outer (with ghost and overlap points) block vector 1943 . in - scatter to block vector values only owned locally 1944 . block - block DM 1945 - ctx - optional user-defined function context 1946 1947 Level: advanced 1948 1949 Notes: 1950 This function is only needed if auxiliary data needs to be set up on subdomain DMs. 1951 1952 If this function is called multiple times, the hooks will be run in the order they are added. 1953 1954 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1955 extract the global information from its context (instead of from the SNES). 1956 1957 This function is currently not available from Fortran. 1958 1959 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1960 @*/ 1961 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx) 1962 { 1963 PetscErrorCode ierr; 1964 DMSubDomainHookLink link,*p; 1965 1966 PetscFunctionBegin; 1967 PetscValidHeaderSpecific(global,DM_CLASSID,1); 1968 for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1969 ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr); 1970 link->restricthook = restricthook; 1971 link->ddhook = ddhook; 1972 link->ctx = ctx; 1973 link->next = NULL; 1974 *p = link; 1975 PetscFunctionReturn(0); 1976 } 1977 1978 #undef __FUNCT__ 1979 #define __FUNCT__ "DMSubDomainRestrict" 1980 /*@ 1981 DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd() 1982 1983 Collective if any hooks are 1984 1985 Input Arguments: 1986 + fine - finer DM to use as a base 1987 . oscatter - scatter from domain global vector filling subdomain global vector with overlap 1988 . gscatter - scatter from domain global vector filling subdomain local vector with ghosts 1989 - coarse - coarer DM to update 1990 1991 Level: developer 1992 1993 .seealso: DMCoarsenHookAdd(), MatRestrict() 1994 @*/ 1995 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm) 1996 { 1997 PetscErrorCode ierr; 1998 DMSubDomainHookLink link; 1999 2000 PetscFunctionBegin; 2001 for (link=global->subdomainhook; link; link=link->next) { 2002 if (link->restricthook) { 2003 ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr); 2004 } 2005 } 2006 PetscFunctionReturn(0); 2007 } 2008 2009 #undef __FUNCT__ 2010 #define __FUNCT__ "DMGetCoarsenLevel" 2011 /*@ 2012 DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM. 2013 2014 Not Collective 2015 2016 Input Parameter: 2017 . dm - the DM object 2018 2019 Output Parameter: 2020 . level - number of coarsenings 2021 2022 Level: developer 2023 2024 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2025 2026 @*/ 2027 PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level) 2028 { 2029 PetscFunctionBegin; 2030 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2031 *level = dm->leveldown; 2032 PetscFunctionReturn(0); 2033 } 2034 2035 2036 2037 #undef __FUNCT__ 2038 #define __FUNCT__ "DMRefineHierarchy" 2039 /*@C 2040 DMRefineHierarchy - Refines a DM object, all levels at once 2041 2042 Collective on DM 2043 2044 Input Parameter: 2045 + dm - the DM object 2046 - nlevels - the number of levels of refinement 2047 2048 Output Parameter: 2049 . dmf - the refined DM hierarchy 2050 2051 Level: developer 2052 2053 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2054 2055 @*/ 2056 PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[]) 2057 { 2058 PetscErrorCode ierr; 2059 2060 PetscFunctionBegin; 2061 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2062 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2063 if (nlevels == 0) PetscFunctionReturn(0); 2064 if (dm->ops->refinehierarchy) { 2065 ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr); 2066 } else if (dm->ops->refine) { 2067 PetscInt i; 2068 2069 ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr); 2070 for (i=1; i<nlevels; i++) { 2071 ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr); 2072 } 2073 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet"); 2074 PetscFunctionReturn(0); 2075 } 2076 2077 #undef __FUNCT__ 2078 #define __FUNCT__ "DMCoarsenHierarchy" 2079 /*@C 2080 DMCoarsenHierarchy - Coarsens a DM object, all levels at once 2081 2082 Collective on DM 2083 2084 Input Parameter: 2085 + dm - the DM object 2086 - nlevels - the number of levels of coarsening 2087 2088 Output Parameter: 2089 . dmc - the coarsened DM hierarchy 2090 2091 Level: developer 2092 2093 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2094 2095 @*/ 2096 PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[]) 2097 { 2098 PetscErrorCode ierr; 2099 2100 PetscFunctionBegin; 2101 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2102 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2103 if (nlevels == 0) PetscFunctionReturn(0); 2104 PetscValidPointer(dmc,3); 2105 if (dm->ops->coarsenhierarchy) { 2106 ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr); 2107 } else if (dm->ops->coarsen) { 2108 PetscInt i; 2109 2110 ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr); 2111 for (i=1; i<nlevels; i++) { 2112 ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr); 2113 } 2114 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet"); 2115 PetscFunctionReturn(0); 2116 } 2117 2118 #undef __FUNCT__ 2119 #define __FUNCT__ "DMCreateAggregates" 2120 /*@ 2121 DMCreateAggregates - Gets the aggregates that map between 2122 grids associated with two DMs. 2123 2124 Collective on DM 2125 2126 Input Parameters: 2127 + dmc - the coarse grid DM 2128 - dmf - the fine grid DM 2129 2130 Output Parameters: 2131 . rest - the restriction matrix (transpose of the projection matrix) 2132 2133 Level: intermediate 2134 2135 .keywords: interpolation, restriction, multigrid 2136 2137 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation() 2138 @*/ 2139 PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest) 2140 { 2141 PetscErrorCode ierr; 2142 2143 PetscFunctionBegin; 2144 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 2145 PetscValidHeaderSpecific(dmf,DM_CLASSID,2); 2146 ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr); 2147 PetscFunctionReturn(0); 2148 } 2149 2150 #undef __FUNCT__ 2151 #define __FUNCT__ "DMSetApplicationContextDestroy" 2152 /*@C 2153 DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed 2154 2155 Not Collective 2156 2157 Input Parameters: 2158 + dm - the DM object 2159 - destroy - the destroy function 2160 2161 Level: intermediate 2162 2163 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2164 2165 @*/ 2166 PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**)) 2167 { 2168 PetscFunctionBegin; 2169 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2170 dm->ctxdestroy = destroy; 2171 PetscFunctionReturn(0); 2172 } 2173 2174 #undef __FUNCT__ 2175 #define __FUNCT__ "DMSetApplicationContext" 2176 /*@ 2177 DMSetApplicationContext - Set a user context into a DM object 2178 2179 Not Collective 2180 2181 Input Parameters: 2182 + dm - the DM object 2183 - ctx - the user context 2184 2185 Level: intermediate 2186 2187 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2188 2189 @*/ 2190 PetscErrorCode DMSetApplicationContext(DM dm,void *ctx) 2191 { 2192 PetscFunctionBegin; 2193 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2194 dm->ctx = ctx; 2195 PetscFunctionReturn(0); 2196 } 2197 2198 #undef __FUNCT__ 2199 #define __FUNCT__ "DMGetApplicationContext" 2200 /*@ 2201 DMGetApplicationContext - Gets a user context from a DM object 2202 2203 Not Collective 2204 2205 Input Parameter: 2206 . dm - the DM object 2207 2208 Output Parameter: 2209 . ctx - the user context 2210 2211 Level: intermediate 2212 2213 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2214 2215 @*/ 2216 PetscErrorCode DMGetApplicationContext(DM dm,void *ctx) 2217 { 2218 PetscFunctionBegin; 2219 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2220 *(void**)ctx = dm->ctx; 2221 PetscFunctionReturn(0); 2222 } 2223 2224 #undef __FUNCT__ 2225 #define __FUNCT__ "DMSetVariableBounds" 2226 /*@C 2227 DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI. 2228 2229 Logically Collective on DM 2230 2231 Input Parameter: 2232 + dm - the DM object 2233 - f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set) 2234 2235 Level: intermediate 2236 2237 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), 2238 DMSetJacobian() 2239 2240 @*/ 2241 PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2242 { 2243 PetscFunctionBegin; 2244 dm->ops->computevariablebounds = f; 2245 PetscFunctionReturn(0); 2246 } 2247 2248 #undef __FUNCT__ 2249 #define __FUNCT__ "DMHasVariableBounds" 2250 /*@ 2251 DMHasVariableBounds - does the DM object have a variable bounds function? 2252 2253 Not Collective 2254 2255 Input Parameter: 2256 . dm - the DM object to destroy 2257 2258 Output Parameter: 2259 . flg - PETSC_TRUE if the variable bounds function exists 2260 2261 Level: developer 2262 2263 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2264 2265 @*/ 2266 PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg) 2267 { 2268 PetscFunctionBegin; 2269 *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE; 2270 PetscFunctionReturn(0); 2271 } 2272 2273 #undef __FUNCT__ 2274 #define __FUNCT__ "DMComputeVariableBounds" 2275 /*@C 2276 DMComputeVariableBounds - compute variable bounds used by SNESVI. 2277 2278 Logically Collective on DM 2279 2280 Input Parameters: 2281 + dm - the DM object to destroy 2282 - x - current solution at which the bounds are computed 2283 2284 Output parameters: 2285 + xl - lower bound 2286 - xu - upper bound 2287 2288 Level: intermediate 2289 2290 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2291 2292 @*/ 2293 PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu) 2294 { 2295 PetscErrorCode ierr; 2296 2297 PetscFunctionBegin; 2298 PetscValidHeaderSpecific(xl,VEC_CLASSID,2); 2299 PetscValidHeaderSpecific(xu,VEC_CLASSID,2); 2300 if (dm->ops->computevariablebounds) { 2301 ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr); 2302 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds."); 2303 PetscFunctionReturn(0); 2304 } 2305 2306 #undef __FUNCT__ 2307 #define __FUNCT__ "DMHasColoring" 2308 /*@ 2309 DMHasColoring - does the DM object have a method of providing a coloring? 2310 2311 Not Collective 2312 2313 Input Parameter: 2314 . dm - the DM object 2315 2316 Output Parameter: 2317 . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring(). 2318 2319 Level: developer 2320 2321 .seealso DMHasFunction(), DMCreateColoring() 2322 2323 @*/ 2324 PetscErrorCode DMHasColoring(DM dm,PetscBool *flg) 2325 { 2326 PetscFunctionBegin; 2327 *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE; 2328 PetscFunctionReturn(0); 2329 } 2330 2331 #undef __FUNCT__ 2332 #define __FUNCT__ "DMSetVec" 2333 /*@C 2334 DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear. 2335 2336 Collective on DM 2337 2338 Input Parameter: 2339 + dm - the DM object 2340 - x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems. 2341 2342 Level: developer 2343 2344 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2345 2346 @*/ 2347 PetscErrorCode DMSetVec(DM dm,Vec x) 2348 { 2349 PetscErrorCode ierr; 2350 2351 PetscFunctionBegin; 2352 if (x) { 2353 if (!dm->x) { 2354 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2355 } 2356 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2357 } else if (dm->x) { 2358 ierr = VecDestroy(&dm->x);CHKERRQ(ierr); 2359 } 2360 PetscFunctionReturn(0); 2361 } 2362 2363 PetscFunctionList DMList = NULL; 2364 PetscBool DMRegisterAllCalled = PETSC_FALSE; 2365 2366 #undef __FUNCT__ 2367 #define __FUNCT__ "DMSetType" 2368 /*@C 2369 DMSetType - Builds a DM, for a particular DM implementation. 2370 2371 Collective on DM 2372 2373 Input Parameters: 2374 + dm - The DM object 2375 - method - The name of the DM type 2376 2377 Options Database Key: 2378 . -dm_type <type> - Sets the DM type; use -help for a list of available types 2379 2380 Notes: 2381 See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D). 2382 2383 Level: intermediate 2384 2385 .keywords: DM, set, type 2386 .seealso: DMGetType(), DMCreate() 2387 @*/ 2388 PetscErrorCode DMSetType(DM dm, DMType method) 2389 { 2390 PetscErrorCode (*r)(DM); 2391 PetscBool match; 2392 PetscErrorCode ierr; 2393 2394 PetscFunctionBegin; 2395 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2396 ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr); 2397 if (match) PetscFunctionReturn(0); 2398 2399 if (!DMRegisterAllCalled) {ierr = DMRegisterAll();CHKERRQ(ierr);} 2400 ierr = PetscFunctionListFind(DMList,method,&r);CHKERRQ(ierr); 2401 if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method); 2402 2403 if (dm->ops->destroy) { 2404 ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr); 2405 dm->ops->destroy = NULL; 2406 } 2407 ierr = (*r)(dm);CHKERRQ(ierr); 2408 ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr); 2409 PetscFunctionReturn(0); 2410 } 2411 2412 #undef __FUNCT__ 2413 #define __FUNCT__ "DMGetType" 2414 /*@C 2415 DMGetType - Gets the DM type name (as a string) from the DM. 2416 2417 Not Collective 2418 2419 Input Parameter: 2420 . dm - The DM 2421 2422 Output Parameter: 2423 . type - The DM type name 2424 2425 Level: intermediate 2426 2427 .keywords: DM, get, type, name 2428 .seealso: DMSetType(), DMCreate() 2429 @*/ 2430 PetscErrorCode DMGetType(DM dm, DMType *type) 2431 { 2432 PetscErrorCode ierr; 2433 2434 PetscFunctionBegin; 2435 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2436 PetscValidCharPointer(type,2); 2437 if (!DMRegisterAllCalled) { 2438 ierr = DMRegisterAll();CHKERRQ(ierr); 2439 } 2440 *type = ((PetscObject)dm)->type_name; 2441 PetscFunctionReturn(0); 2442 } 2443 2444 #undef __FUNCT__ 2445 #define __FUNCT__ "DMConvert" 2446 /*@C 2447 DMConvert - Converts a DM to another DM, either of the same or different type. 2448 2449 Collective on DM 2450 2451 Input Parameters: 2452 + dm - the DM 2453 - newtype - new DM type (use "same" for the same type) 2454 2455 Output Parameter: 2456 . M - pointer to new DM 2457 2458 Notes: 2459 Cannot be used to convert a sequential DM to parallel or parallel to sequential, 2460 the MPI communicator of the generated DM is always the same as the communicator 2461 of the input DM. 2462 2463 Level: intermediate 2464 2465 .seealso: DMCreate() 2466 @*/ 2467 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M) 2468 { 2469 DM B; 2470 char convname[256]; 2471 PetscBool sametype, issame; 2472 PetscErrorCode ierr; 2473 2474 PetscFunctionBegin; 2475 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2476 PetscValidType(dm,1); 2477 PetscValidPointer(M,3); 2478 ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr); 2479 ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr); 2480 { 2481 PetscErrorCode (*conv)(DM, DMType, DM*) = NULL; 2482 2483 /* 2484 Order of precedence: 2485 1) See if a specialized converter is known to the current DM. 2486 2) See if a specialized converter is known to the desired DM class. 2487 3) See if a good general converter is registered for the desired class 2488 4) See if a good general converter is known for the current matrix. 2489 5) Use a really basic converter. 2490 */ 2491 2492 /* 1) See if a specialized converter is known to the current DM and the desired class */ 2493 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2494 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2495 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2496 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2497 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2498 ierr = PetscObjectQueryFunction((PetscObject)dm,convname,&conv);CHKERRQ(ierr); 2499 if (conv) goto foundconv; 2500 2501 /* 2) See if a specialized converter is known to the desired DM class. */ 2502 ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr); 2503 ierr = DMSetType(B, newtype);CHKERRQ(ierr); 2504 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2505 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2506 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2507 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2508 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2509 ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv);CHKERRQ(ierr); 2510 if (conv) { 2511 ierr = DMDestroy(&B);CHKERRQ(ierr); 2512 goto foundconv; 2513 } 2514 2515 #if 0 2516 /* 3) See if a good general converter is registered for the desired class */ 2517 conv = B->ops->convertfrom; 2518 ierr = DMDestroy(&B);CHKERRQ(ierr); 2519 if (conv) goto foundconv; 2520 2521 /* 4) See if a good general converter is known for the current matrix */ 2522 if (dm->ops->convert) { 2523 conv = dm->ops->convert; 2524 } 2525 if (conv) goto foundconv; 2526 #endif 2527 2528 /* 5) Use a really basic converter. */ 2529 SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype); 2530 2531 foundconv: 2532 ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2533 ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr); 2534 ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2535 } 2536 ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr); 2537 PetscFunctionReturn(0); 2538 } 2539 2540 /*--------------------------------------------------------------------------------------------------------------------*/ 2541 2542 #undef __FUNCT__ 2543 #define __FUNCT__ "DMRegister" 2544 /*@C 2545 DMRegister - Adds a new DM component implementation 2546 2547 Not Collective 2548 2549 Input Parameters: 2550 + name - The name of a new user-defined creation routine 2551 - create_func - The creation routine itself 2552 2553 Notes: 2554 DMRegister() may be called multiple times to add several user-defined DMs 2555 2556 2557 Sample usage: 2558 .vb 2559 DMRegister("my_da", MyDMCreate); 2560 .ve 2561 2562 Then, your DM type can be chosen with the procedural interface via 2563 .vb 2564 DMCreate(MPI_Comm, DM *); 2565 DMSetType(DM,"my_da"); 2566 .ve 2567 or at runtime via the option 2568 .vb 2569 -da_type my_da 2570 .ve 2571 2572 Level: advanced 2573 2574 .keywords: DM, register 2575 .seealso: DMRegisterAll(), DMRegisterDestroy() 2576 2577 @*/ 2578 PetscErrorCode DMRegister(const char sname[],PetscErrorCode (*function)(DM)) 2579 { 2580 PetscErrorCode ierr; 2581 2582 PetscFunctionBegin; 2583 ierr = PetscFunctionListAdd(&DMList,sname,function);CHKERRQ(ierr); 2584 PetscFunctionReturn(0); 2585 } 2586 2587 #undef __FUNCT__ 2588 #define __FUNCT__ "DMLoad" 2589 /*@C 2590 DMLoad - Loads a DM that has been stored in binary with DMView(). 2591 2592 Collective on PetscViewer 2593 2594 Input Parameters: 2595 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or 2596 some related function before a call to DMLoad(). 2597 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or 2598 HDF5 file viewer, obtained from PetscViewerHDF5Open() 2599 2600 Level: intermediate 2601 2602 Notes: 2603 The type is determined by the data in the file, any type set into the DM before this call is ignored. 2604 2605 Notes for advanced users: 2606 Most users should not need to know the details of the binary storage 2607 format, since DMLoad() and DMView() completely hide these details. 2608 But for anyone who's interested, the standard binary matrix storage 2609 format is 2610 .vb 2611 has not yet been determined 2612 .ve 2613 2614 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad() 2615 @*/ 2616 PetscErrorCode DMLoad(DM newdm, PetscViewer viewer) 2617 { 2618 PetscErrorCode ierr; 2619 PetscBool isbinary; 2620 PetscInt classid; 2621 char type[256]; 2622 2623 PetscFunctionBegin; 2624 PetscValidHeaderSpecific(newdm,DM_CLASSID,1); 2625 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 2626 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 2627 if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 2628 2629 ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr); 2630 if (classid != DM_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file"); 2631 ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr); 2632 ierr = DMSetType(newdm, type);CHKERRQ(ierr); 2633 if (newdm->ops->load) { 2634 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 2635 } 2636 PetscFunctionReturn(0); 2637 } 2638 2639 /******************************** FEM Support **********************************/ 2640 2641 #undef __FUNCT__ 2642 #define __FUNCT__ "DMPrintCellVector" 2643 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) 2644 { 2645 PetscInt f; 2646 PetscErrorCode ierr; 2647 2648 PetscFunctionBegin; 2649 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2650 for (f = 0; f < len; ++f) { 2651 ierr = PetscPrintf(PETSC_COMM_SELF, " | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr); 2652 } 2653 PetscFunctionReturn(0); 2654 } 2655 2656 #undef __FUNCT__ 2657 #define __FUNCT__ "DMPrintCellMatrix" 2658 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) 2659 { 2660 PetscInt f, g; 2661 PetscErrorCode ierr; 2662 2663 PetscFunctionBegin; 2664 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2665 for (f = 0; f < rows; ++f) { 2666 ierr = PetscPrintf(PETSC_COMM_SELF, " |");CHKERRQ(ierr); 2667 for (g = 0; g < cols; ++g) { 2668 ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr); 2669 } 2670 ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr); 2671 } 2672 PetscFunctionReturn(0); 2673 } 2674 2675 #undef __FUNCT__ 2676 #define __FUNCT__ "DMGetDefaultSection" 2677 /*@ 2678 DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM. 2679 2680 Input Parameter: 2681 . dm - The DM 2682 2683 Output Parameter: 2684 . section - The PetscSection 2685 2686 Level: intermediate 2687 2688 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2689 2690 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2691 @*/ 2692 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) 2693 { 2694 PetscFunctionBegin; 2695 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2696 PetscValidPointer(section, 2); 2697 *section = dm->defaultSection; 2698 PetscFunctionReturn(0); 2699 } 2700 2701 #undef __FUNCT__ 2702 #define __FUNCT__ "DMSetDefaultSection" 2703 /*@ 2704 DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM. 2705 2706 Input Parameters: 2707 + dm - The DM 2708 - section - The PetscSection 2709 2710 Level: intermediate 2711 2712 Note: Any existing Section will be destroyed 2713 2714 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2715 @*/ 2716 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) 2717 { 2718 PetscInt numFields; 2719 PetscInt f; 2720 PetscErrorCode ierr; 2721 2722 PetscFunctionBegin; 2723 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2724 PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2725 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2726 ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr); 2727 dm->defaultSection = section; 2728 ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr); 2729 if (numFields) { 2730 ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr); 2731 for (f = 0; f < numFields; ++f) { 2732 const char *name; 2733 2734 ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr); 2735 ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr); 2736 } 2737 } 2738 /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */ 2739 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2740 PetscFunctionReturn(0); 2741 } 2742 2743 #undef __FUNCT__ 2744 #define __FUNCT__ "DMGetDefaultGlobalSection" 2745 /*@ 2746 DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM. 2747 2748 Collective on DM 2749 2750 Input Parameter: 2751 . dm - The DM 2752 2753 Output Parameter: 2754 . section - The PetscSection 2755 2756 Level: intermediate 2757 2758 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2759 2760 .seealso: DMSetDefaultSection(), DMGetDefaultSection() 2761 @*/ 2762 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) 2763 { 2764 PetscErrorCode ierr; 2765 2766 PetscFunctionBegin; 2767 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2768 PetscValidPointer(section, 2); 2769 if (!dm->defaultGlobalSection) { 2770 if (!dm->defaultSection || !dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection"); 2771 ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr); 2772 ierr = PetscLayoutDestroy(&dm->map);CHKERRQ(ierr); 2773 ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm),dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr); 2774 } 2775 *section = dm->defaultGlobalSection; 2776 PetscFunctionReturn(0); 2777 } 2778 2779 #undef __FUNCT__ 2780 #define __FUNCT__ "DMSetDefaultGlobalSection" 2781 /*@ 2782 DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM. 2783 2784 Input Parameters: 2785 + dm - The DM 2786 - section - The PetscSection, or NULL 2787 2788 Level: intermediate 2789 2790 Note: Any existing Section will be destroyed 2791 2792 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection() 2793 @*/ 2794 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) 2795 { 2796 PetscErrorCode ierr; 2797 2798 PetscFunctionBegin; 2799 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2800 if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2801 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2802 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2803 dm->defaultGlobalSection = section; 2804 PetscFunctionReturn(0); 2805 } 2806 2807 #undef __FUNCT__ 2808 #define __FUNCT__ "DMGetDefaultSF" 2809 /*@ 2810 DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set, 2811 it is created from the default PetscSection layouts in the DM. 2812 2813 Input Parameter: 2814 . dm - The DM 2815 2816 Output Parameter: 2817 . sf - The PetscSF 2818 2819 Level: intermediate 2820 2821 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 2822 2823 .seealso: DMSetDefaultSF(), DMCreateDefaultSF() 2824 @*/ 2825 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) 2826 { 2827 PetscInt nroots; 2828 PetscErrorCode ierr; 2829 2830 PetscFunctionBegin; 2831 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2832 PetscValidPointer(sf, 2); 2833 ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr); 2834 if (nroots < 0) { 2835 PetscSection section, gSection; 2836 2837 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 2838 if (section) { 2839 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 2840 ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr); 2841 } else { 2842 *sf = NULL; 2843 PetscFunctionReturn(0); 2844 } 2845 } 2846 *sf = dm->defaultSF; 2847 PetscFunctionReturn(0); 2848 } 2849 2850 #undef __FUNCT__ 2851 #define __FUNCT__ "DMSetDefaultSF" 2852 /*@ 2853 DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM 2854 2855 Input Parameters: 2856 + dm - The DM 2857 - sf - The PetscSF 2858 2859 Level: intermediate 2860 2861 Note: Any previous SF is destroyed 2862 2863 .seealso: DMGetDefaultSF(), DMCreateDefaultSF() 2864 @*/ 2865 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) 2866 { 2867 PetscErrorCode ierr; 2868 2869 PetscFunctionBegin; 2870 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2871 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2); 2872 ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr); 2873 dm->defaultSF = sf; 2874 PetscFunctionReturn(0); 2875 } 2876 2877 #undef __FUNCT__ 2878 #define __FUNCT__ "DMCreateDefaultSF" 2879 /*@C 2880 DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections 2881 describing the data layout. 2882 2883 Input Parameters: 2884 + dm - The DM 2885 . localSection - PetscSection describing the local data layout 2886 - globalSection - PetscSection describing the global data layout 2887 2888 Level: intermediate 2889 2890 .seealso: DMGetDefaultSF(), DMSetDefaultSF() 2891 @*/ 2892 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection) 2893 { 2894 MPI_Comm comm; 2895 PetscLayout layout; 2896 const PetscInt *ranges; 2897 PetscInt *local; 2898 PetscSFNode *remote; 2899 PetscInt pStart, pEnd, p, nroots, nleaves, l; 2900 PetscMPIInt size, rank; 2901 PetscErrorCode ierr; 2902 2903 PetscFunctionBegin; 2904 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 2905 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2906 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 2907 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 2908 ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr); 2909 ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr); 2910 ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr); 2911 ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr); 2912 ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr); 2913 ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr); 2914 ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr); 2915 for (p = pStart, nleaves = 0; p < pEnd; ++p) { 2916 PetscInt gdof, gcdof; 2917 2918 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 2919 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 2920 nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 2921 } 2922 ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr); 2923 ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr); 2924 for (p = pStart, l = 0; p < pEnd; ++p) { 2925 const PetscInt *cind; 2926 PetscInt dof, cdof, off, gdof, gcdof, goff, gsize, d, c; 2927 2928 ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr); 2929 ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr); 2930 ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr); 2931 ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr); 2932 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 2933 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 2934 ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr); 2935 if (!gdof) continue; /* Censored point */ 2936 gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 2937 if (gsize != dof-cdof) { 2938 if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof); 2939 cdof = 0; /* Ignore constraints */ 2940 } 2941 for (d = 0, c = 0; d < dof; ++d) { 2942 if ((c < cdof) && (cind[c] == d)) {++c; continue;} 2943 local[l+d-c] = off+d; 2944 } 2945 if (gdof < 0) { 2946 for (d = 0; d < gsize; ++d, ++l) { 2947 PetscInt offset = -(goff+1) + d, r; 2948 2949 ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr); 2950 if (r < 0) r = -(r+2); 2951 remote[l].rank = r; 2952 remote[l].index = offset - ranges[r]; 2953 } 2954 } else { 2955 for (d = 0; d < gsize; ++d, ++l) { 2956 remote[l].rank = rank; 2957 remote[l].index = goff+d - ranges[rank]; 2958 } 2959 } 2960 } 2961 if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves); 2962 ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr); 2963 ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 2964 PetscFunctionReturn(0); 2965 } 2966 2967 #undef __FUNCT__ 2968 #define __FUNCT__ "DMGetPointSF" 2969 /*@ 2970 DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM. 2971 2972 Input Parameter: 2973 . dm - The DM 2974 2975 Output Parameter: 2976 . sf - The PetscSF 2977 2978 Level: intermediate 2979 2980 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 2981 2982 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 2983 @*/ 2984 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) 2985 { 2986 PetscFunctionBegin; 2987 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2988 PetscValidPointer(sf, 2); 2989 *sf = dm->sf; 2990 PetscFunctionReturn(0); 2991 } 2992 2993 #undef __FUNCT__ 2994 #define __FUNCT__ "DMSetPointSF" 2995 /*@ 2996 DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM. 2997 2998 Input Parameters: 2999 + dm - The DM 3000 - sf - The PetscSF 3001 3002 Level: intermediate 3003 3004 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3005 @*/ 3006 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) 3007 { 3008 PetscErrorCode ierr; 3009 3010 PetscFunctionBegin; 3011 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3012 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1); 3013 ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr); 3014 ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr); 3015 dm->sf = sf; 3016 PetscFunctionReturn(0); 3017 } 3018 3019 #undef __FUNCT__ 3020 #define __FUNCT__ "DMGetNumFields" 3021 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields) 3022 { 3023 PetscFunctionBegin; 3024 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3025 PetscValidPointer(numFields, 2); 3026 *numFields = dm->numFields; 3027 PetscFunctionReturn(0); 3028 } 3029 3030 #undef __FUNCT__ 3031 #define __FUNCT__ "DMSetNumFields" 3032 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields) 3033 { 3034 PetscInt f; 3035 PetscErrorCode ierr; 3036 3037 PetscFunctionBegin; 3038 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3039 for (f = 0; f < dm->numFields; ++f) { 3040 ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr); 3041 } 3042 ierr = PetscFree(dm->fields);CHKERRQ(ierr); 3043 dm->numFields = numFields; 3044 ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr); 3045 for (f = 0; f < dm->numFields; ++f) { 3046 ierr = PetscContainerCreate(PetscObjectComm((PetscObject)dm), (PetscContainer*) &dm->fields[f]);CHKERRQ(ierr); 3047 } 3048 PetscFunctionReturn(0); 3049 } 3050 3051 #undef __FUNCT__ 3052 #define __FUNCT__ "DMGetField" 3053 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field) 3054 { 3055 PetscFunctionBegin; 3056 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3057 PetscValidPointer(field, 2); 3058 if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3059 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3060 *field = dm->fields[f]; 3061 PetscFunctionReturn(0); 3062 } 3063 3064 #undef __FUNCT__ 3065 #define __FUNCT__ "DMSetCoordinates" 3066 /*@ 3067 DMSetCoordinates - Sets into the DM a global vector that holds the coordinates 3068 3069 Collective on DM 3070 3071 Input Parameters: 3072 + dm - the DM 3073 - c - coordinate vector 3074 3075 Note: 3076 The coordinates do include those for ghost points, which are in the local vector 3077 3078 Level: intermediate 3079 3080 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3081 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM() 3082 @*/ 3083 PetscErrorCode DMSetCoordinates(DM dm, Vec c) 3084 { 3085 PetscErrorCode ierr; 3086 3087 PetscFunctionBegin; 3088 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3089 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3090 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3091 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3092 dm->coordinates = c; 3093 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3094 PetscFunctionReturn(0); 3095 } 3096 3097 #undef __FUNCT__ 3098 #define __FUNCT__ "DMSetCoordinatesLocal" 3099 /*@ 3100 DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates 3101 3102 Collective on DM 3103 3104 Input Parameters: 3105 + dm - the DM 3106 - c - coordinate vector 3107 3108 Note: 3109 The coordinates of ghost points can be set using DMSetCoordinates() 3110 followed by DMGetCoordinatesLocal(). This is intended to enable the 3111 setting of ghost coordinates outside of the domain. 3112 3113 Level: intermediate 3114 3115 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3116 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM() 3117 @*/ 3118 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c) 3119 { 3120 PetscErrorCode ierr; 3121 3122 PetscFunctionBegin; 3123 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3124 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3125 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3126 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3127 3128 dm->coordinatesLocal = c; 3129 3130 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3131 PetscFunctionReturn(0); 3132 } 3133 3134 #undef __FUNCT__ 3135 #define __FUNCT__ "DMGetCoordinates" 3136 /*@ 3137 DMGetCoordinates - Gets a global vector with the coordinates associated with the DM. 3138 3139 Not Collective 3140 3141 Input Parameter: 3142 . dm - the DM 3143 3144 Output Parameter: 3145 . c - global coordinate vector 3146 3147 Note: 3148 This is a borrowed reference, so the user should NOT destroy this vector 3149 3150 Each process has only the local coordinates (does NOT have the ghost coordinates). 3151 3152 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3153 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3154 3155 Level: intermediate 3156 3157 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3158 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM() 3159 @*/ 3160 PetscErrorCode DMGetCoordinates(DM dm, Vec *c) 3161 { 3162 PetscErrorCode ierr; 3163 3164 PetscFunctionBegin; 3165 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3166 PetscValidPointer(c,2); 3167 if (!dm->coordinates && dm->coordinatesLocal) { 3168 DM cdm = NULL; 3169 3170 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3171 ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr); 3172 ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr); 3173 ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3174 ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3175 } 3176 *c = dm->coordinates; 3177 PetscFunctionReturn(0); 3178 } 3179 3180 #undef __FUNCT__ 3181 #define __FUNCT__ "DMGetCoordinatesLocal" 3182 /*@ 3183 DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM. 3184 3185 Collective on DM 3186 3187 Input Parameter: 3188 . dm - the DM 3189 3190 Output Parameter: 3191 . c - coordinate vector 3192 3193 Note: 3194 This is a borrowed reference, so the user should NOT destroy this vector 3195 3196 Each process has the local and ghost coordinates 3197 3198 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3199 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3200 3201 Level: intermediate 3202 3203 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3204 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM() 3205 @*/ 3206 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c) 3207 { 3208 PetscErrorCode ierr; 3209 3210 PetscFunctionBegin; 3211 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3212 PetscValidPointer(c,2); 3213 if (!dm->coordinatesLocal && dm->coordinates) { 3214 DM cdm = NULL; 3215 3216 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3217 ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr); 3218 ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr); 3219 ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3220 ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3221 } 3222 *c = dm->coordinatesLocal; 3223 PetscFunctionReturn(0); 3224 } 3225 3226 #undef __FUNCT__ 3227 #define __FUNCT__ "DMGetCoordinateDM" 3228 /*@ 3229 DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates 3230 3231 Collective on DM 3232 3233 Input Parameter: 3234 . dm - the DM 3235 3236 Output Parameter: 3237 . cdm - coordinate DM 3238 3239 Level: intermediate 3240 3241 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3242 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3243 @*/ 3244 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm) 3245 { 3246 PetscErrorCode ierr; 3247 3248 PetscFunctionBegin; 3249 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3250 PetscValidPointer(cdm,2); 3251 if (!dm->coordinateDM) { 3252 if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM"); 3253 ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr); 3254 } 3255 *cdm = dm->coordinateDM; 3256 PetscFunctionReturn(0); 3257 } 3258 3259 #undef __FUNCT__ 3260 #define __FUNCT__ "DMLocatePoints" 3261 /*@ 3262 DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells 3263 3264 Not collective 3265 3266 Input Parameters: 3267 + dm - The DM 3268 - v - The Vec of points 3269 3270 Output Parameter: 3271 . cells - The local cell numbers for cells which contain the points 3272 3273 Level: developer 3274 3275 .keywords: point location, mesh 3276 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3277 @*/ 3278 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells) 3279 { 3280 PetscErrorCode ierr; 3281 3282 PetscFunctionBegin; 3283 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3284 PetscValidHeaderSpecific(v,VEC_CLASSID,2); 3285 PetscValidPointer(cells,3); 3286 if (dm->ops->locatepoints) { 3287 ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr); 3288 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM"); 3289 PetscFunctionReturn(0); 3290 } 3291